Asp tutorial. net train ticket source information Capturing System
I. system functions:
1. Capture the latest ticket source information from the network at intervals;
2. Supports filtering of ticket Source Information Based on keywords;
3. Supports the capture interval setting;
4. Supports link to the ticket source URL;
Ii. runtime environment:
1. net2.0 framework and above;
2. ie6.0 and later;
Iii. Implementation ideas:
1. Set the captured address and Resolution Method
Public static list <site> getdefaultsites ()
{
List <site> sites = new list <site> ();
Sites = new list <site> ();
Sites. add (new site ()
{
Name = "Firefox ",
Url = "http://www.huochepiao.com/city/search.asp? Leixing = % d7 % aa % c8 % c3 & chufa = & daoda = ",
Regexpattern = @ "· <a href = "(.*?) "Mce_href = "(.*?) "Target = _ blank> (.*?) </A> ",
Encoding = encoding. default,
Keys = new string [] {"lie "}
});
Sites. add (new site ()
{
Name = "People's Network ",
Url = "http://beijing.baixing.com/huochepiao? % E5 % 8f % 91% e8 % bd % a6 % e6 % 97% a5 % e6 % 9c % 9f = & % e8 % bd % a6 % e6 % ac % a1 = & % e5 % 87% ba % e5 % 8f % 91% e5 % 9f % 8e % e5 % b8 % 82 = % e5 % 8c % 97% e4 % ba % ac & % e5 % 88% b0 % e8 % be % e5 % 9f % 8e % e5 % b8 % 82 = & wanted = 1 ",
Regexpattern = @ "> <a href = ""/(.*?) ""> (.*?) </A> </td> ",
Encoding = encoding. utf8,
Domain = "http://beijing.baixing.com /",
Keys = new string [] {"lie "}
});
Sites. add (new site ()
{
Name = "Ganji ",
Url = "http://bj.ganji.com/piao ",
Regexpattern = @ "<dt> <a href = ""/(.*?) "" Target = "" _ blank ""> (.*?) </A> </dt> ",
Encoding = encoding. utf8,
Domain = "http://bj.ganji.com /",
Keys = new string [] {"lie "}
});
Sites. add (new site ()
{
Name = "Youku ",
Url = "http://huoche.kuxun.cn/zhuanrang-beijing-wuhan.html ",
Regexpattern = @ "<div class =" "col_11 left" "> (.*?) <Br/> <div style = "padding: 8px 0 0 0px;" mce_style = "padding: 8px 0 0 0px; "> <a target = '_ blank' href = "(. *?) "Mce_href = "(.*?) "> ",
Encoding = encoding. utf8,
Domain = "",
Ischange = "yes"
});
Return sites;
}
Capture web page information
Public string getnetstring (string url, encoding codetpye)
{
String str = "";
Try
{
Webclient client = new webclient ();
Byte [] pagedata = client. downloaddata (url );
Str = codetpye. getstring (pagedata );
}
Catch
{
}
Return str;
}
Parse ticket source information
Public class clsnetinfoparseserver
{
Private static ilist <getresult> lslist = new list <getresult> ();
Public void clearls ()
{
Lslist = new list <getresult> ();
}
Private bool ishas (string url)
{
Foreach (var item in lslist)
{
If (item. url = url)
{
Return true;
}
}
Return false;
}
Public ilist <getresult> donetinfoparse (string strnetinfo, site, string [] keys)
{
Ilist <getresult> list = new list <getresult> ();
Matchcollection mc = regex. matches (strnetinfo, site. regexpattern );
Foreach (match m in mc)
{
If (m. success)
{
Getresult r = new getresult ();
If (! String. isnullorempty (site. ischange ))
{
R. content = site. domain + m. groups tutorial [1]. value. trim ();
R. url = m. groups [2]. value. trim ();
}
Else
{
R. url = site. domain + m. groups [1]. value. trim ();
R. content = m. groups [2]. value. trim ();
}
If (! Ishas (r. url ))
{
Bool iscontainkey = false;
If (keys! = Null & keys. length> 0)
{
Foreach (string key in keys)
{
If (r. content. contains (key ))
{
Iscontainkey = true;
Break;
}
}
}
Else
{
Iscontainkey = true;
}
If (! Iscontainkey)
Continue;
R. getdatetime = datetime. now. tostring ();
R. name = site. name;
Lslist. add (r );
List. add (r );
}
}
}
Return list;
}
}