This article describes how to run scrapy in a Python thread. it involves Python thread operations and is very useful, for more information about how to run scrapy in Python, see the example in this article. Share it with you for your reference. The details are as follows:
If you want to call scrapy in a written program, you can use the following code to run scrapy in a thread.
"""Code to run Scrapy crawler in a thread - works on Scrapy 0.8"""import threading, Queuefrom twisted.internet import reactorfrom scrapy.xlib.pydispatch import dispatcherfrom scrapy.core.manager import scrapymanagerfrom scrapy.core.engine import scrapyenginefrom scrapy.core import signalsclass CrawlerThread(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.running = False def run(self): self.running = True scrapymanager.configure(control_reactor=False) scrapymanager.start() reactor.run(installSignalHandlers=False) def crawl(self, *args): if not self.running: raise RuntimeError("CrawlerThread not running") self._call_and_block_until_signal(signals.spider_closed, \ scrapymanager.crawl, *args) def stop(self): reactor.callFromThread(scrapyengine.stop) def _call_and_block_until_signal(self, signal, f, *a, **kw): q = Queue.Queue() def unblock(): q.put(None) dispatcher.connect(unblock, signal=signal) reactor.callFromThread(f, *a, **kw) q.get()# Usage example below: import osos.environ.setdefault('SCRAPY_SETTINGS_MODULE', 'myproject.settings')from scrapy.xlib.pydispatch import dispatcherfrom scrapy.core import signalsfrom scrapy.conf import settingsfrom scrapy.crawler import CrawlerThreadsettings.overrides['LOG_ENABLED'] = False # avoid log noisedef item_passed(item): print "Just scraped item:", itemdispatcher.connect(item_passed, signal=signals.item_passed)crawler = CrawlerThread()print "Starting crawler thread..."crawler.start()print "Crawling somedomain.com...."crawler.crawl('somedomain.com) # blocking callprint "Crawling anotherdomain.com..."crawler.crawl('anotherdomain.com') # blocking callprint "Stopping crawler thread..."crawler.stop()
I hope this article will help you with Python programming.