The idea is to use curl to crawl the Web page source, and then the keyword to find out the image url.
Example:
1#include <stdio.h>2#include <stdlib.h>3#include <string.h>4#include <curl/curl.h>5 6 voidGET_KEY_FROM_STR (Char*origin,Char*str1,Char*str2,Char*key);7 8 intMainintargcChar**Argv)9 {TenCURL *curl; oneFILE *FP = NULL, *fp_read =NULL; a intFile_size =0; - Char*tmp =NULL; - intFlag =0; the Charkey[1024x768] = {0}; - Charstr1[ -] = {0}; - Charstr2[ -] = {0}; - + if(FP = fopen ("Test.txt","W")) ==NULL) - { + return 1; a } atCurl =Curl_easy_init (); - if(curl) - { -Flag =1; -Curl_easy_setopt (curl, curlopt_url, argv[1]);//Curl Settings URL -Curl_easy_setopt (curl, curlopt_writedata, fp);//the Web page source code exists in the file, as if only file, cannot save the string inCurl_easy_perform (curl);//Start Crawl - Curl_easy_cleanup (curl); to fclose (fp); +Fp_read = fopen ("Test.txt","RB"); -Fseek (fp_read,0, seek_end);//move the pointer to the end of the text theFile_size = Ftell (fp_read);//figure out the size of the text *Fseek (fp_read,0, seek_set);//and move the pointer to the front . $TMP = (Char*)malloc(file_size *sizeof(Char));//malloc section of memoryPanax Notoginsengprintf"file_size:%d\n", file_size); -Fread (tmp, file_size,sizeof(Char), fp_read);//Read File the fclose (fp_read); +snprintf (str1,sizeof(str1),"img src=\ ""); asnprintf (str2,sizeof(str2),"\""); theGet_key_from_str (tmp, str1, str2, key);//Get URL + -printf"key:%s\n", key); $ free(tmp); $ } - - if(!Flag) the { - fclose (fp);Wuyi } the - return 0; wu } - //grab the keyword between str1 and str2 about voidGET_KEY_FROM_STR (Char*origin,Char*str1,Char*str2,Char*Key) $ { - Char*p =strstr (origin, str1); - Char*q = strstr (p +strlen (str1), str2); - intLen = q-p-strlen (str1); asnprintf (key, Len +1,"%s", p +strlen (str1)); +Key[len +1] =' /'; the}
Gcc-g-wall Main.c-o Test-lcurl
./test URL
C Language Call Curl Library Crawl Web page picture (go)