Code One:
#!/usr/bin/python#-*-coding:utf-8-*-#encoding =utf-8 import threadingimport queueimport sysimport Urllib2import Reimport MYSQLDB # # Database variable settings #db_host = ' 127.0.0.1 ' Db_user = "xxxx" DB_PASSWD = "XXXXXXXX" db_name = "XXXX" # # Variable Settings #thread_lim IT = 3jobs = Queue.queue (5) Singlelock = Threading. Lock () info = Queue.queue () def WorkerBee (inputlist): For x in Xrange (thread_limit): print ' Thead {0} started. '. Format (x) T = Spider () t.start () for I in Inputlist:try:jobs.put (i, Block=true, Timeo ut=5) Except:singlelock.acquire () print "The queue is full!" Singlelock.release () # Wait for the threads to finish Singlelock.acquire () # Acquire the lock so we can Prin T print "Waiting for threads to finish." Singlelock.release () # Release the Lock Jobs.join () # This command waits for all threads to finish. # While not Jobs.empty (): # Print Jobs.get () def getTitle (url,time=TEN): Response = Urllib2.urlopen (url,timeout=time) HTML = Response.read () response.close () Reg = R ' <title> (. *?) </title> ' title = Re.compile (reg). FindAll (HTML) # title = Title[0].decode (' gb2312 ', ' replace '). Encode (' utf-8 ') title = Tit Le[0] return title class Spider (Threading. Thread): def run (self): while 1:try:job = Jobs.get (true,1) Singlelock . Acquire () title = GetTitle (Job[1]) info.put ([Job[0],title], block=true, timeout=5) # print ' This {0} is {1} '. Format (job[1],title) singlelock.release () Jobs.task_done () Except:break; if __name__ = = ' __main__ ': con = None urls = [] Try:con = MySQLdb.connect (db_host,db_user,db_passwd,db_na ME) cur = con.cursor () cur.execute (' SELECT id,url from ' table_name ' WHERE ' status ' =0 LIMIT ') rows = Cur.fetchall () for row in rows: # Print row Urls.append ([row[0],row[1]]) WorkerBee (U RLS) while not Info.empty (): Print info.geT () finally:if con:con.close ()
Code two:
#!/usr/bin/python#-*-coding:utf-8-*-#encoding =utf-8#filename:robot.py Import threading,queue,sys,urllib2,re## Variable Settings #thread_limit = 3 #设置线程数jobs = queue.queue (5) #设置队列长度singlelock = Threading. Lock () #设置一个线程锁 to avoid repeated calls to URLs = [' http://games.sina.com.cn/w/n/2013-04-28/1634703505.shtml ', '/HTTP/ Games.sina.com.cn/w/n/2013-04-28/1246703487.shtml ', ' http://games.sina.com.cn/w/n/2013-04-28/1028703471.shtml ', ' http://games.sina.com.cn/w/n/2013-04-27/1015703426.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/ 1554703373.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1512703346.shtml ', ' http://games.sina.com.cn/w/n/ 2013-04-26/1453703334.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1451703333.shtml ', ' HTTP// Games.sina.com.cn/w/n/2013-04-26/1445703329.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1434703322.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1433703321.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/ 1433703320.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1429703318.sHTML ', ' http://games.sina.com.cn/w/n/2013-04-26/1429703317.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/ 1409703297.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1406703296.shtml ', ' http://games.sina.com.cn/w/n/ 2013-04-26/1402703292.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1353703286.shtml ', ' HTTP// Games.sina.com.cn/w/n/2013-04-26/1348703284.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1327703275.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1239703265.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/ 1238703264.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1231703262.shtml ', ' http://games.sina.com.cn/w/n/ 2013-04-26/1229703261.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1228703260.shtml ', ' HTTP// Games.sina.com.cn/w/n/2013-04-26/1223703259.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1218703258.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1202703254.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/ 1159703251.shtml ', ' http://games.sina.com.cn/w/n/2013-04-26/1139703233.shtml '] def workerbee (inputlist): For x in Xrange (thread_limit): print ' Thead {0} started. '. Format (x) T = Spider () t.start () for I in Inputlist:try:jobs.put (i, block=true, timeout=5) except: Singlelock.acquire () print "The queue is full!" Singlelock.release () # Wait for the threads to finish Singlelock.acquire () # Acquire the lock so we can print print "Waiting for threads to finish." Singlelock.release () # Release the Lock Jobs.join () # This command waits for all threads to finish. # While not Jobs.empty (): # Print Jobs.get () def getTitle (url,time=10): Response = Urllib2.urlopen (url,timeout=time) H tml = Response.read () response.close () Reg = R '<title>(.*?)</title>' title = Re.compile (reg). FindAll (HTML) title = Title[0].decode (' gb2312 ', ' replace '). Encode (' Utf-8 ') return title class Spider (Threading. Thread): def run (self): while 1:try:job = Jobs.get (true,1) singlelock.acquire () title = ge Ttitle (Job) print ' This {0} is {1} '. Format (job,title) singlelock.release () Jobs.task_done () exce Pt:break; if __name__ = = ' __main__ ': WorkerBee (URLs)