Python crawler crawls all cars (used cars), uses Padas, matplotlib to generate graphs

Source: Internet
Author: User
Tags mongoclient

The program is mainly to capture the vehicle sales information, including the car system, car model, date of purchase, car selling price, driving distance, down payment price and so on information. Words do not say more direct code.

Car system py File

#-*-Coding:utf-8-*-import refrom urllib.request import urlopenfrom scrapy.http import request# from Urllib.request Imp ORT requestfrom BS4 Import beautifulsoupfrom lxml import etreeimport pymongoimport scrapyfrom scrapy.selector Import HtmlX Pathselectorclient = Pymongo. Mongoclient (host= "127.0.0.1") db = Client.renrenchecollection = db. Carclass #表名classificationimport Redis #导入redis数据库r = Redis. Redis (host= ' 127.0.0.1 ', port=6379, Db=0) class Renrenchespider (Scrapy. Spider): name = "Carinfo1" allowed_domains = ["renrenche.com"] #允许访问的域 start_urls = ["Https://www.renre        nche.com/bj/ershouche/"] #每爬完一个网页会回调parse方法 def parse (self, response): HxS = htmlxpathselector (response) HX = Hxs.select ('//div[@class = "Brand-more-content"]/div[@class = "Brand-section brand-section-1"]/p[@class = "BL"]/ span[@class = "bn"]/a ') for secitem in Hx:url = Secitem.select ("@href"). Extract () c = "https:/      /www.renrenche.com "+url[0]      Name = Secitem.select ("text ()"). Extract () ClassID =self.insertmongo (name,none) print (c) Print (name) request = Request (C,callback=lambda response,pid=str (ClassID): Self.parse_subclass (response,pid        ) Yield Request Def parse_subclass (self, response,pid): # Print (Response.body.decode (' Utf-8 '))  HXS = htmlxpathselector (response) HX = Hxs.select ('//ul[@id = ' filter_series ']/li[@class = ""]/a ') for Secitem            In hx:urls = Secitem.select ("@href"). Extract () URL = "https://www.renrenche.com" + urls[0] Name = Secitem.select ("text ()"). Extract () print (URL) print (name) ClassID = Self.inser Tmongo (name,pid) Self.pushredis (classid,url,pid) def Insertmongo (self,classname,pid): ClassID = Colle Ction.insert ({' classname ': ClassName, ' PID ':p ID}) return classid def pushredis (Self,classid,url,pid,): Caru RL = '%s,%s,%s '% (classiD,url,pid) R.lpush (' Carurl ', Carurl) 

Sell all kinds of information py file

#-*-Coding:utf-8-*-import refrom urllib.request import urlopenfrom scrapy.http import requestimport pymongoimport SCRA Pyfrom Time Import sleepfrom scrapy.selector import htmlxpathselectorclient = Pymongo. Mongoclient (host= "127.0.0.1") db = Client.renrenchecollection = db. Carinfoimport Redis # import Redis Database R = Redis. Redis (host= ' 127.0.0.1 ', port=6379, Db=0) class Renrenchespider (Scrapy. Spider): name = "Carinfo2" allowed_domains = ["renrenche.com"] dict = {} Start_urls = [] def __init__ (self)            : # defines a method a = R.lrange (' Carurl ', 0,-1) for item in A:novelurl = Bytes.decode (item)            arr = Novelurl.split (', ') # split string RenrencheSpider.start_urls.append (arr[1]) pid = arr[0] url = arr[1] self.dict[url] = {"pid":p ID, "num": 0} def parse (self, response): ClassInfo = self.dict[       Response.url] pid = classinfo[' pid ') num = classinfo[' num '] # print (self.dict) if num>3:     return None HXS = htmlxpathselector (response) HX = Hxs.select ('//ul[@class = ' Row-fluid list-row js-car-l Ist "]") s= "" for secitem in hx:hx1 = Secitem.select ('//li[@class = "Span6 list-item car-item"]/a[@ rrc-event-param= "Search"]/h3 ') name = Hx1.select ("text ()"). Extract () A = "model:" +name[0] s +=a+ "\ n" # classid = Collection.insert ({' Carinfo ': A, ' pid ': pid}) # Print (a) for Secitem in HX:HX2 = Secitem.select ('//div[@class = "mileage"]/span[@class = "basic"] ') name = Hx2.select ("text ()") . extract () b = "Car purchase year/kilometer:" +name[0]+ "/" +name[1 "s +=b+" \ n "# print (b) for Secitem I n hx:hx3 = secitem.select ('//div[@class = "Tags-box"]/div[@class = "price"] ') name = Hx3.select ("text ()            "). Extract () c = str (name[0]) c = C.strip () c =" Sell Price: "+c+" million "s +=c+" \ n "        # Print (c)For secitem in hx:hx4 = Secitem.select ('//div[@class = "Down-payment"]/div[@class = "m-l"] ') name = Hx4        . Select ("Text ()"). Extract () d = "Down Payment:" +name[0]+ "s +=d+" \ n "# print (d) print (s) ClassID = Self.insertmongo (S, PID) def insertmongo (self, classname, pid): ClassID = Collection.insert ({' CL Assname ': classname, ' pid ': pid}) return ClassID # Self.pushredis (ClassID, PID) # print ('-----------hand Return to--------------') # # HXS = Htmlxpathselector (response) # HX = Hxs.select ('//li[@class = ' lbf-pagination-it     Em "]/a[@class =" Lbf-pagination-next "] # urls = hx.select (" @href "). Extract () # d =" https: "+ urls[0] # classinfo[' num '] +=1 # Self.dict[d] = ClassInfo # print (d) # request = Request (d, CALLBACK=SELF.P    ARSE) # Yield request # print ('--------End--------------') # def pushredis (self, classid, C, PID): # Novelnameurl = '%s,%s,%s '% (ClassID, C, PID) # R.lpush (' Novelnameurl ', Novelnameurl) 

  

Python crawler crawls all cars (used cars), uses Padas, matplotlib to generate graphs

Related Article

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.