Efficient URL Indexing in Web Crawlers
Http://blog.csdn.net/chinafe/article/details/7816878
The array method is used for storage, but the array is limited. Here we will improve the method by using vector to achieve 10 million pieces of data. During the test, 10 million of the index file is 9 m.
Complete implementationCodeNone
# Include <stdio. h> # include <windows. h ># include <vector> Using STD: vector; void main () {vector <bool> vints (10000000, bool (0 )); // create a vector containing 10000000 boolean data and initialize it to 0: DWORD dw_len = vints. size () * sizeof (bool); printf ("Write vints Len: % d \ n", dw_len); vints. at (10) = 1; // write a piece of data DWORD dwwrite; handle hfilewrite = createfile ("C: \ index.txt", generic_write, file_cmd_read, null, open_always ); // Save the index file if (hfilewrite = I Nvalid_handle_value) {closehandle (hfilewrite); return;} writefile (hfilewrite, (char *) vints. begin (), dw_len, & dwwrite, 0); closehandle (hfilewrite); vints. clear (); vector <bool> vints_read (10000000, bool (0); // create a vector containing 10000000 boolean data and initialize it to 0: handle hfileread = createfile ("C: \ index.txt", generic_read, file_assist_read, null, open_existing,); // read the index file if (hfileread = invalid_handle_value) return; DWORD ret = 0; If (! Readfile (hfileread, (char *) vints_read.begin (), dw_len, & ret, 0) {MessageBox (0, "read", "Read index error", mb_ OK ); return;} closehandle (hfileread); If (Ret! = Dw_len) {MessageBox (0, "read", "Read index error", mb_ OK); return;} printf ("read vints Len: % d \ n", RET ); if (vints_read.at (10) = 1) // test the read result {printf ("sucess \ n");} return ;}