未验证 提交 f8f5f463 编写于 作者: D Daniel Graña 提交者: GitHub

Merge pull request #3253 from rpkilby/depth-stats

[MRG+1] Update depth middleware stats (fixes #3245)
......@@ -335,17 +335,6 @@ See also: :ref:`faq-bfo-dfo` about tuning Scrapy for BFO or DFO.
other priority settings :setting:`REDIRECT_PRIORITY_ADJUST`
and :setting:`RETRY_PRIORITY_ADJUST`.
.. setting:: DEPTH_STATS
DEPTH_STATS
-----------
Default: ``True``
Scope: ``scrapy.spidermiddlewares.depth.DepthMiddleware``
Whether to collect maximum depth stats.
.. setting:: DEPTH_STATS_VERBOSE
DEPTH_STATS_VERBOSE
......
......@@ -212,7 +212,8 @@ DepthMiddleware
* :setting:`DEPTH_LIMIT` - The maximum depth that will be allowed to
crawl for any site. If zero, no limit will be imposed.
* :setting:`DEPTH_STATS` - Whether to collect depth stats.
* :setting:`DEPTH_STATS_VERBOSE` - Whether to collect the number of
requests for each depth.
* :setting:`DEPTH_PRIORITY` - Whether to prioritize the requests based on
their depth.
......
......@@ -55,7 +55,7 @@ DEFAULT_REQUEST_HEADERS = {
}
DEPTH_LIMIT = 0
DEPTH_STATS = True
DEPTH_STATS_VERBOSE = False
DEPTH_PRIORITY = 0
DNSCACHE_ENABLED = True
......
......@@ -13,7 +13,7 @@ logger = logging.getLogger(__name__)
class DepthMiddleware(object):
def __init__(self, maxdepth, stats=None, verbose_stats=False, prio=1):
def __init__(self, maxdepth, stats, verbose_stats=False, prio=1):
self.maxdepth = maxdepth
self.stats = stats
self.verbose_stats = verbose_stats
......@@ -41,7 +41,7 @@ class DepthMiddleware(object):
extra={'spider': spider}
)
return False
elif self.stats:
else:
if self.verbose_stats:
self.stats.inc_value('request_depth_count/%s' % depth,
spider=spider)
......@@ -50,7 +50,7 @@ class DepthMiddleware(object):
return True
# base case (depth=0)
if self.stats and 'depth' not in response.meta:
if 'depth' not in response.meta:
response.meta['depth'] = 0
if self.verbose_stats:
self.stats.inc_value('request_depth_count/0', spider=spider)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册