C # remote FETCH/Read Web content

Source: Internet
Author: User

Reprinted from: http://blog.csdn.net/gisfarmer/article/details/2836904

  1. Using System;
  2. Using System.Collections.Generic;
  3. Using System.Text;
  4. Using System.Net;
  5. Using System.IO;
  6. Namespace thief
  7. {
  8. Class Program
  9. {
  10. static void Main (string[] args)
  11. {
  12. try {
  13. WebClient mywebclient = new WebClient ();
  14. Mywebclient.credentials = CredentialCache.DefaultCredentials; //Gets or sets the network credentials that are used to authenticate requests to Internet resources.
  15. byte[] pagedata = Mywebclient.downloaddata (http://www.163.com);//download data from the specified Web site
  16. string pagehtml = Encoding.Default.GetString (pagedata); //If you are using GB2312 to obtain a website page, use this sentence
  17. //string pagehtml = Encoding.UTF8.GetString (pagedata);//If the Get Site page uses UTF-8, use this sentence
  18. Console.WriteLine (pagehtml); //Enter what you get in the console
  19. using (StreamWriter SW = new StreamWriter ("c://test//ouput.html"))//writes the acquired content to the text
  20. {
  21. Sw. Write (pagehtml);
  22. }
  23. Console.ReadLine (); //Let the console pause, or flash past
  24. }
  25. catch (WebException webEx) {
  26. Console.WriteLine (WebEx.Message.ToString ());
  27. }
  28. }
  29. }
  30. }

Improve, after adding the timer

      1. Using System;
      2. Using System.Text;
      3. Using System.Timers;
      4. Using System.Net;
      5. Using System.IO;
      6. <summary>
      7. Fetches the contents of the specified webpage every 5 seconds and saves it as a file in the C:/test directory
      8. </summary>
      9. Namespace Timertest
      10. {
      11. Class Program
      12. {
      13. public static string outfilename = ""; //generated file name
      14. public static string myurl = "http://bxg.cfchina.cn"; The page to crawl
      15. static void Main (string[] args)
      16. {
      17. Timer MyTimer = new timer ();
      18. MyTimer. Elapsed + =new Elapsedeventhandler (GETURL); Events that specify Timers
      19. MyTimer. Interval = 5000; //Catch once every 5 seconds
      20. MyTimer. Start ();
      21. MyTimer. Enabled = true;
      22. while (Console.read ()! = ' q ') //until you press the lowercase letter Q to exit, otherwise crawl down
      23. {
      24. }
      25. }
      26. //Timer event content
      27. static void GetUrl (object source, Elapsedeventargs e)
      28. {
      29. Try
      30. {
      31. WebClient mywebclient = new WebClient ();
      32. Mywebclient.credentials = CredentialCache.DefaultCredentials; //Gets or sets the network credentials that are used to authenticate requests to Internet resources.
      33. byte[] Pagedata = Mywebclient.downloaddata (Myurl); //Download data from a specified website
      34. string pagehtml = Encoding.Default.GetString (pagedata); //If you are using GB2312 to obtain a website page, use this sentence
      35. //string pagehtml = Encoding.UTF8.GetString (pagedata);//If the Get Site page uses UTF-8, use this sentence
      36. //console.writeline (pagehtml);//Enter what you get in the console
      37. Outfilename = "c://test//" + DateTime.Now.ToString (). Replace (" ", ""). Replace (":", ""). Replace ("-", "") + ". html";
      38. using (StreamWriter SW = new StreamWriter (outfilename))//write the acquired content to the text
      39. {
      40. Sw. Write (pagehtml);
      41. }
      42. Console.WriteLine (Outfilename); //Output the file name after saving
      43. }
      44. catch (WebException webEx)
      45. {
      46. Console.WriteLine (WebEx.Message.ToString ());
      47. }
      48. }
      49. }
      50. }

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.