$urls = Array ( ' http://www.sina.com.cn/', ' http://www.sohu.com/', ' http://www.163.com/' ); $save _to= '/test.txt '; Write the crawled code to the file $st = fopen ($save _to, "a"); $MH = Curl_multi_init (); foreach ($urls as $i = = $url) { $conn [$i] = Curl_init ($url); curl_setopt ($conn [$i], Curlopt_useragent, "mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0) "); curl_setopt ($conn [$i], Curlopt_header, 0); curl_setopt ($conn [$i], curlopt_connecttimeout,60); curl_setopt ($conn [$i],curlopt_returntransfer,true); Setting does not write the crawl substitution code to the browser, but instead to the string Curl_multi_add_handle ($MH, $conn [$i]); } do { Curl_multi_exec ($MH, $active); } while ($active); foreach ($urls as $i = = $url) { $data = Curl_multi_getcontent ($conn [$i]); Gets the code string for the crawl Fwrite ($st, $data); Writes a string to the file. Of course, you can also not write to the file, such as deposit database }//Get data variable and write to file foreach ($urls as $i = = $url) { Curl_multi_remove_handle ($MH, $conn [$i]); Curl_close ($conn [$i]); } Curl_multi_close ($MH); Fclose ($st); ?> |