Saves the entire html content to a specified file.
* Webpage capturing is basically possible, but you need to manually enter a URL to save the entire html content to the specified file.
*
* @ Author chenguoyong
*
*/
Public class ScrubSelectedWeb {
Privatefinal static String CRLF = System. getProperty ("line. separator ");
/**
* @ Param args
*/
Publicstatic void main (String [] args ){
Try {
URLur = newURL ("http://www.google.cn /");
InputStreaminstr = ur. openStream ();
Strings, str;
BufferedReaderin = new BufferedReader (new InputStreamReader (instr ));
StringBuffersb = new StringBuffer ();
BufferedWriterout = new BufferedWriter (new FileWriter (
"D:/outPut.txt "));
While (s = in. readLine ())! = Null ){
Sb. append (s + CRLF );
}
System. out. println (sb );
Str = new String (sb );
Out. write (str );
Out. close ();
In. close ();
} Catch (MalformedURLException e ){
E. printStackTrace ();
} Catch (IOException e ){
E. printStackTrace ();
}
}
}