This is a basic web search program, enter search criteria from the command line (the starting URL, the maximum number of processing URLs, the string to search),
It searches the URLs on the internet one-by-one, finding and outputting pages that match the search criteria. The prototype of this program comes from the art of Java programming,
For a better analysis, the webmaster removed the GUI part, and slightly modified to apply jdk1.5. Based on this program, you can write a search on the internet
Images, e-mails, web downloads and other "reptiles."
Let's look at the process of running the program:
Packagecom.utils;ImportJava.io.DataOutputStream;ImportJava.io.File;ImportJava.io.FileOutputStream;Importjava.io.IOException;ImportOrg.apache.commons.httpclient.DefaultHttpMethodRetryHandler;Importorg.apache.commons.httpclient.HttpClient;Importorg.apache.commons.httpclient.HttpException;ImportOrg.apache.commons.httpclient.HttpStatus;ImportOrg.apache.commons.httpclient.methods.GetMethod;ImportOrg.apache.commons.httpclient.params.HttpMethodParams; Public classUtilio {/*Download the page that the URL points to*/ Public Static voidDownloadFile (FinalString URL,FinalString name,FinalString type,FinalString Path) { /*1. Generate the Httpclinet object and set the parameters*/HttpClient HttpClient=NewHttpClient (); //Set Http connection Timeout 5sHttpclient.gethttpconnectionmanager (). Getparams (). Setconnectiontimeout (5000); /*2. Generate the GetMethod object and set the parameters*/GetMethod GetMethod=Newgetmethod (URL); //set a GET request timeout of 5sGetmethod.getparams (). Setparameter (Httpmethodparams.so_timeout, 5000); //set Request retry processingGetmethod.getparams (). Setparameter (Httpmethodparams.retry_handler,NewDefaulthttpmethodretryhandler ()); /*3. Executing an HTTP GET request*/ Try { intStatusCode =Httpclient.executemethod (GetMethod); //Determine the status code of the access if(StatusCode! =HTTPSTATUS.SC_OK) {System.err.println ("Method failed:" +getmethod.getstatusline ()); } /*4. Handling HTTP Response Content*/ byte[] Responsebody = Getmethod.getresponsebody ();//read as byte array//generate file name when saving based on Web page URLSavetolocalnewfile (Responsebody, path,name+type); } Catch(HttpException e) {//A fatal exception may be the protocol is wrong or the content returned is problematicSystem.out.println ("Please check your provided HTTP address!"); E.printstacktrace (); } Catch(IOException e) {//Network exception occurredE.printstacktrace (); } finally { //Release Connectiongetmethod.releaseconnection (); } } Private Static voidSavetolocalnewfile (byte[] data, String filedir,string fileName) { Try{String FilePath= filedir+ "/" +FileName; System.out.println (FilePath); File FileNew=NewFile (FilePath);//new A file construction parameter is a stringSystem.out.println (); File RootFile=filenew.getparentfile ();//Get parent folder if( !filenew.exists ()) {Rootfile.mkdirs (); Filenew.createnewfile (); } DataOutputStream out=NewDataOutputStream (NewFileOutputStream (filenew)); for(inti = 0; i < data.length; i++) Out.write (Data[i]); Out.flush (); Out.close (); } Catch(IOException e) {e.printstacktrace (); } } }
java-native crawler mechanism source code