diff --git a/docs/topics/extensions.rst b/docs/topics/extensions.rst index 4a0016f831379643f1accfd8b53b1238feb3f329..95bedb67b83da4df8c1eeb79439b2ac6bde48b70 100644 --- a/docs/topics/extensions.rst +++ b/docs/topics/extensions.rst @@ -232,17 +232,6 @@ The telnet console must be enabled by the :setting:`TELNETCONSOLE_ENABLED` setting, and the server will listen in the port specified in :setting:`WEBCONSOLE_PORT`. -Spider reloader extension -~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. module:: scrapy.contrib.spider.reloader - :synopsis: Spider reloader extension - -.. class:: scrapy.contrib.spider.reloader.SpiderReloader - -Reload spider objects once they've finished scraping, to release the resources -and references to other objects they may hold. - .. _topics-extensions-ref-memusage: Memory usage extension diff --git a/docs/topics/settings.rst b/docs/topics/settings.rst index 9f832df4171203e11b9e04b1355a3e50b5a79996..0823b63fc655dd9bff7d54a9ae573333511bae1b 100644 --- a/docs/topics/settings.rst +++ b/docs/topics/settings.rst @@ -492,7 +492,6 @@ Default:: 'scrapy.contrib.webconsole.spiderctl.Spiderctl': 0, 'scrapy.contrib.webconsole.enginestatus.EngineStatus': 0, 'scrapy.contrib.webconsole.stats.StatsDump': 0, - 'scrapy.contrib.spider.reloader.SpiderReloader': 0, 'scrapy.contrib.memusage.MemoryUsage': 0, 'scrapy.contrib.memdebug.MemoryDebugger': 0, 'scrapy.contrib.closedomain.CloseDomain': 0, diff --git a/scrapy/conf/default_settings.py b/scrapy/conf/default_settings.py index d14d4958f964f9630624ad4dd9b6f6bf78a13015..04fee1738e692136cf3ae42dcc879f8921648042 100644 --- a/scrapy/conf/default_settings.py +++ b/scrapy/conf/default_settings.py @@ -81,7 +81,6 @@ EXTENSIONS_BASE = { 'scrapy.contrib.webconsole.spiderctl.Spiderctl': 0, 'scrapy.contrib.webconsole.enginestatus.EngineStatus': 0, 'scrapy.contrib.webconsole.stats.StatsDump': 0, - 'scrapy.contrib.spider.reloader.SpiderReloader': 0, 'scrapy.contrib.memusage.MemoryUsage': 0, 'scrapy.contrib.memdebug.MemoryDebugger': 0, 'scrapy.contrib.closedomain.CloseDomain': 0, diff --git a/scrapy/contrib/spider/reloader.py b/scrapy/contrib/spider/reloader.py deleted file mode 100644 index 6b60b753428e3ef0fa9d5ad46b53b6dcde2bedc5..0000000000000000000000000000000000000000 --- a/scrapy/contrib/spider/reloader.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Reload spider modules once they are finished scraping - -This is to release any resources held on to by scraping spiders. -""" -import sys -from scrapy.xlib.pydispatch import dispatcher -from scrapy.core import signals -from scrapy import log - -class SpiderReloader(object): - - def __init__(self): - dispatcher.connect(self.domain_closed, signal=signals.domain_closed) - - def domain_closed(self, domain, spider): - module = spider.__module__ - log.msg("reloading module %s" % module, domain=domain) - reload(sys.modules[module])