This article mainly introduces how to run scrapy from a script in Python. The example analyzes the usage skills of running Python scripts, which has some reference value, for more information about how to run scrapy in a script, see the example in this article. Share it with you for your reference. The details are as follows:
The code is as follows:
#! /Usr/bin/python
Import OS
OS. environ. setdefault ('scrapy _ SETTINGS_MODULE ', 'Project. setting') # Must be at the top before other imports
From scrapy import log, signals, project
From scrapy. xlib. pydispatch import dispatcher
From scrapy. conf import settings
From scrapy. crawler import crawler process
From multiprocessing import Process, Queue
Class crawler script ():
Def _ init _ (self ):
Self. crawler = crawler process (settings)
If not hasattr (project, 'crawler '):
Self. crawler. install ()
Self. crawler. configure ()
Self. items = []
Dispatcher. connect (self. _ item_passed, signals. item_passed)
Def _ item_passed (self, item ):
Self. items. append (item)
Def _ crawl (self, queue, spider_name ):
Spider = self. crawler. spiders. create (spider_name)
If spider:
Self. crawler. queue. append_spider (spider)
Self. crawler. start ()
Self. crawler. stop ()
Queue. put (self. items)
Def crawl (self, spider ):
Queue = Queue ()
P = Process (target = self. _ crawl, args = (queue, spider ,))
P. start ()
P. join ()
Return queue. get (True)
# Usage
If _ name _ = "_ main __":
Log. start ()
"""
This example runs spider1 and then spider2 three times.
"""
Items = list ()
Crawler = crawler script ()
Items. append (crawler. crawl ('spider1 '))
For I in range (3 ):
Items. append (crawler. crawl ('spider2 '))
Print items
I hope this article will help you with Python programming.