提交 6339864f 编写于 作者: J Julia Medina

Minor refactor in the docs and functions used in the shell command

上级 70f2010d
......@@ -50,11 +50,15 @@ class Command(ScrapyCommand):
elif url:
spidercls = spidercls_for_request(spiders, Request(url),
spidercls, log_multiple=True)
# The crawler is created this way since the Shell manually handles the
# crawling engine, so the set up in the crawl method won't work
crawler = self.crawler_process._create_logged_crawler(spidercls)
# The Shell class needs a persistent engine in the crawler
crawler.engine = crawler._create_engine()
crawler.engine.start()
self.crawler_process._start_logging()
self.crawler_process.start(start_reactor=False)
self._start_crawler_thread()
shell = Shell(crawler, update_vars=self.update_vars, code=opts.code)
......
......@@ -126,13 +126,11 @@ class CrawlerProcess(CrawlerRunner):
self._stop_logging()
reactor.callFromThread(self._stop_reactor)
def start(self, stop_after_crawl=True):
self._start_logging()
self._start_reactor(stop_after_crawl)
def _start_logging(self):
def start(self, stop_after_crawl=True, start_reactor=True):
self.log_observer = log.start_from_settings(self.settings)
log.scrapy_info(self.settings)
if start_reactor:
self._start_reactor(stop_after_crawl)
def _start_reactor(self, stop_after_crawl=True):
if stop_after_crawl:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册