分类: 系统运维
2009-08-19 17:33:25
关于HTTP协议的内容,记述如下:
RFC2616中主要描述了HTTP 1.1协议。下面的描述没有实现其各个方面的内容,只提出了一种能够完成所有HTTP网页抓取的最小实现(不能够抓取HTTPS)。
1、首先提交一个URL地址,分为普通的GET网页获取,POST的数据提交两种基本模式。
建立HttpWebReques实例,其中uri是网页的URL的地址:
HttpWebRequest webrequest = (HttpWebRequest) WebRequest.Create(uri);
KeepAlive表示HTTP的连接是长连接:
webrequest.KeepAlive = true;
如果需要,添加引用地址,主要用于防止其他网站的连接引用,比如登陆时,经常需要验证:
if(referer!=null)
{
webrequest.Referer=referer;
}
选择数据的提交方式,有GET、POST两种方式,HEAD不常用:
switch(RequestMethod)
{
case 1:
webrequest.Method="GET";
break;
case 2:
webrequest.Method="POST";
break;
case 3:
webrequest.Method="HEAD";
break;
default:
webrequest.Method="GET";
break;
}
设置User-Agent,经常遇到,在某些网站中,做了限制,User-Agent为空,则不能访问:
webrequest.UserAgent = "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0; .NET CLR 1.1.4322; .NET CLR 2.0.50215; fqSpider)";
添加其他的HTTP的Header信息,collHeader是一个NameValue的Collection:
if(collHeader!=null&&collHeader.Count>0)
{
int iCount = collHeader.Count;
string key;
string keyvalue;
for (int i=0; i < iCount; i++)
{
key = collHeader.Keys[i];
keyvalue = collHeader[i];
webrequest.Headers.Add(key, keyvalue);
}
}
设置Content-Type的内容,如果为POST,设置成application/x-www-form-urlencoded,如果是Get设置成text/html:
if(webrequest.Method=="POST")
{
webrequest.ContentType="application/x-www-form-urlencoded";
}
else
{
webrequest.ContentType = "text/html";
}
设置代理服务器地址和端口:
if ((ProxyServer!=null) &&(ProxyServer.Length > 0))
{
webrequest.Proxy = new
WebProxy(ProxyServer,ProxyPort);
}
设置是否允许自动转移:
webrequest.AllowAutoRedirect = true;
设置基本的登陆认证 :
if (NwCred)
{
CredentialCache wrCache =
new CredentialCache();
wrCache.Add(new Uri(uri),"Basic",
new NetworkCredential(UserName,UserPwd));
webrequest.Credentials = wrCache;
}
设置Request的Cookie容器:
webrequest.CookieContainer=Cookies;
设置POST数据:
byte[] bytes = Encoding.ASCII.GetBytes(RequestData);
webrequest.ContentLength=bytes.Length;
Stream oStreamOut = webrequest.GetRequestStream();
oStreamOut.Write(bytes,0,bytes.Length);
oStreamOut.Close();
具体代码如下:
1.
using System.IO;
using System.Net;
using System.Text.RegularExpressions;
namespace datagrid_study
{
public class WebForm3 : System.Web.UI.Page
{
public WebClient web=new WebClient();
private void Page_Load(object sender, System.EventArgs e)
{
// 在此处放置用户代码以初始化页面
Stream str;
str=web.OpenRead();
StreamReader read=new StreamReader(str,System.Text.Encoding.GetEncoding("GB2312"));
string html=read.ReadToEnd();
string aaa=html.ToString();
string bbb=Regex.Split(aaa,"
2.
//根据Url地址得到网页的html源码
private string GetWebContent1(string Url)
{
string strResult="";
try
{
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(Url);
//声明一个HttpWebRequest请求
request.Timeout = 30000;
//设置连接超时时间
request.Headers.Set("Pragma", "no-cache");
HttpWebResponse response = (HttpWebResponse)request.GetResponse();
Stream streamReceive = response.GetResponseStream();
System.Text.Encoding encoding = System.Text.Encoding.GetEncoding("GB2312");
StreamReader streamReader = new StreamReader(streamReceive, encoding);
strResult = streamReader.ReadToEnd();
}
catch
{
return "";
}
return strResult;
}
例3.
public class WebForm1 : System.Web.UI.Page
{
protected System.Web.UI.WebControls.DataGrid dgData;
private void Page_Load(object sender, System.EventArgs e)
{
// 在此处放置用户代码以初始化页面
dgData.DataSource=getInfo();
dgData.DataBind();
}
//根据Url地址得到网页的html源码
private string GetWebContent1(string Url)
{
string strResult="";
try
{
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(Url);
//声明一个HttpWebRequest请求
request.Timeout = 30000;
//设置连接超时时间
request.Headers.Set("Pragma", "no-cache");
HttpWebResponse response = (HttpWebResponse)request.GetResponse();
Stream streamReceive = response.GetResponseStream();
System.Text.Encoding encoding = System.Text.Encoding.GetEncoding("GB2312");
StreamReader streamReader = new StreamReader(streamReceive, encoding);
strResult = streamReader.ReadToEnd();
}
catch
{
return "";
}
return strResult;
}
//获取超级链接和文章标题、内容
private DataTable getInfo()
{
//创建datatable
DataTable dt = new DataTable();
dt.Columns.Add("title",typeof(string));
dt.Columns.Add("URL",typeof(string));
dt.Columns.Add("content",typeof(string));
dt.Columns.Add("newsdate",typeof(string));
dt.TableName="newsthief";
string html=GetWebContent1("");
string width=84% >";
//计算匹配的个数
int Count=0;
MatchCollection Matches=Regex.Matches(html,strPattern0,RegexOptions.IgnoreCase|RegexOptions.Compiled);
foreach(Match NextMatch in Matches)
{
Count++;
}
string sHtml=html;
string strPattern=@"a[\s]+href=(?[^\s>]+)[^>]*>(?
for(int j=0;j
string sTemp=Regex.Split(sHtml,"",RegexOptions.IgnoreCase)[j+1]; ",RegexOptions.IgnoreCase)[0];
string sHref=Regex.Split(sTemp,"
string sDateTemp=Regex.Split(sTemp,"",RegexOptions.IgnoreCase)[1];
string sDate=Regex.Split(sDateTemp,"",RegexOptions.IgnoreCase)[1];
Matches=Regex.Matches(sHref,strPattern,RegexOptions.IgnoreCase|RegexOptions.Compiled);
foreach(Match NextMatch in Matches)
{
string URL="();
string title=NextMatch.Groups["Text"].Value.ToString().Trim();
string htmlContent=GetWebContent1(URL);
string sContentTemp=Regex.Split(htmlContent,"",RegexOptions.IgnoreCase)[1];
",RegexOptions.IgnoreCase)[0];
string sContent=Regex.Split(sContentTemp,"
sContentTemp=Regex.Split(sContent,"",RegexOptions.IgnoreCase)[2];
sContent=Regex.Split(sContentTemp,"",RegexOptions.IgnoreCase)[0];
//去掉 @"\)"
string sContent1=Regex.Replace(sContent,@""," ",RegexOptions.IgnoreCase|RegexOptions.Compiled);
DataRow dr = dt.NewRow();
dr["title"]=title;
dr["URL"]=URL;
dr["content"]=sContent1;
dr["newsdate"]=sDate;
dt.Rows.Add(dr);
}
}
return dt;
}