#Author:yeshengbao#-- Coding:utf-8- -#@Time: 2018/5/24 21:38# process: If a person has a clone (the number of points is preferably the number of CPU cores) almost at the same time to work
# thread: such as this person is boiling water, but at the same time can be boiled in the hours to eat, and sweeping, then the thread will be randomly selected, may also appear to have not finished sweeping, the water opened, but he will sweep the floor {This may occur data loss}.
# coprocessor: This one is very similar to a thread that is smaller than a thread, but he has been programmed to perform the task, not to waste extra time on the line, and to save resources when created
ImportdatetimeImportRequestsImportOSImportHashlib fromMultiprocessingImportProcess fromlxmlImportetree fromThreadingImportThread fromGeventImportMonkeyImportGeventmonkey.patch_socket ()#To Open the Monkey method, you must addBag ='Books'if notos.path.exists (bag): Os.mkdir (bag)classDoutu (object):def __init__(self): Self.url='http://www.23us.so/files/article/html/6/6926/index.html'self.headers= { "user-agent":"mozilla/5.0 (Windows NT 10.0; Win64; x64) applewebkit/537.36 (khtml, like Gecko)" "chrome/64.0.3282.186 safari/537.36", } defMD5 (self, STRs): Stri= Hashlib.md5 (Strs.encode ('Utf-8')) Key=stri.hexdigest ()returnKeydefget_source (self, URL, headers):Try: Response= Requests.get (URL, headers=headers, timeout=10). ContentreturnResponseexceptException:returnself.get_source (URL, headers)defget_detail_content (Self, frction_detail_url):iffrction_detail_url:html= Self.get_source (Frction_detail_url, Self.headers). Decode ('Utf-8') Doc=etree. HTML (HTML) title= Doc.xpath ('.//div[@class = "Bdsub"]/dl/dd[1]/h1/text ()') [0] content="'. Join (Doc.xpath ('.//div[@class = "Bdsub"]/dl/dd[@id = "Contents"]/text ()'). Strip (). Replace ('\ n',"'). Replace ('\ t',"') ifContent:with Open (Bag+'\\'+'Text.txt','A +', encoding='Utf-8') as Fp:fp.write (title+' :'+ content +'\ n') Print('writing {}_{}'. Format (title, content))defAnalysis_index (self, HTML): Doc=etree. HTML (HTML) td_list= Doc.xpath ('.//table[@id = "at"]//td[@class = "L"]') Thread_list= [] forTdinchTd_list:xie= Gevent.spawn (Self.get_detail_content, Td.xpath ('./a/@href') [0]) Xie.start () Thread_list.append (Xie)Print(Xie)#While True: #If Len (thread_list) < 100: # can choose how many threads to open #th = Thread (target=self.get_detail_content, args= (Td.xpath ('./a/@href ') [0],)) #Th.start () #thread_list.append (TH) # Break # #Else: #print (thread_list) #Time.sleep (3) #For ths in thread_list: #if not ths.is_alive (): #thread_list.remove (THS) forThinchThread_list:#to ensure the end of thread or co-operationTh.join ()defBegin_spider (self): HTML= Self.get_source (Self.url, Self.headers). Decode ('Utf-8') Self.analysis_index (HTML) start_time= Datetime.datetime.now ()#Program start timeDoutu =Doutu () doutu.begin_spider () Over_time= Datetime.datetime.now ()#Program End TimeTotal_time = (over_time-start_time). Total_seconds ()Print('Program Total%s seconds'%total_time)#Thread 620 page approx. 40s#co-Process 18s#the usage of the process must function in the IF __name__ = = ' __main__ ': Inside#thread_lists = []#for page in range (, Bayi):#While True:#If Len (thread_lists) < 8:## th = Threading. Thread (target=dou.begin_by_page,args= (page,))#th = multiprocessing. Process (Target=dou.begin_by_page, args= (page,))#Th.start ()#thread_lists.append (TH)# Break#Else:#Time.sleep (3)#print (thread_lists)#print (' Process pool is full ')#For ths in thread_lists:#if not ths.is_alive ():#thread_lists.remove (THS)#For ths in thread_lists:#Ths.join ()
Python process/thread/co-test