提交 c2d71680 编写于 作者: J Julia Medina

Use LogCapture in testfixtures package for tests

This allows to remove `get_testlog` helper, `flushLoggedErrors` from
twisted.trial.unittest.TestCase and Twisted log observers created for
each test on conftest.py.
上级 7a958f90
import six
import pytest
from twisted.python import log
collect_ignore = ["scrapy/stats.py", "scrapy/project.py"]
......@@ -10,35 +9,6 @@ if six.PY3:
if len(file_path) > 0 and file_path[0] != '#':
collect_ignore.append(file_path)
class LogObservers:
"""Class for keeping track of log observers across test modules"""
def __init__(self):
self.observers = []
def add(self, logfile='test.log'):
fileobj = open(logfile, 'wb')
observer = log.FileLogObserver(fileobj)
log.startLoggingWithObserver(observer.emit, 0)
self.observers.append((fileobj, observer))
def remove(self):
fileobj, observer = self.observers.pop()
log.removeObserver(observer.emit)
fileobj.close()
@pytest.fixture(scope='module')
def log_observers():
return LogObservers()
@pytest.fixture()
def setlog(request, log_observers):
"""Attach test.log file observer to twisted log, for trial compatibility"""
log_observers.add()
request.addfinalizer(log_observers.remove)
@pytest.fixture()
def chdir(tmpdir):
......
[pytest]
usefixtures = chdir setlog
usefixtures = chdir
python_files=test_*.py __init__.py
addopts = --doctest-modules --assert=plain
twisted = 1
......@@ -46,19 +46,6 @@ def get_testenv():
env['PYTHONPATH'] = get_pythonpath()
return env
def get_testlog():
"""Get Scrapy log of current test, ignoring the rest"""
with open("test.log", "rb") as fp:
loglines = fp.readlines()
thistest = []
for line in loglines[::-1]:
thistest.append(line)
if "[-] -->" in line:
break
return "".join(thistest[::-1])
def assert_samelines(testcase, text1, text2, msg=None):
"""Asserts text1 and text2 have the same lines, ignoring differences in
line endings between platforms
......
......@@ -4,3 +4,4 @@ mitmproxy==0.10.1
netlib==0.10.1
pytest-twisted
jmespath
testfixtures
......@@ -39,7 +39,6 @@ class TestCloseSpider(TestCase):
close_on = 5
crawler = get_crawler(ErrorSpider, {'CLOSESPIDER_ERRORCOUNT': close_on})
yield crawler.crawl(total=1000000)
self.flushLoggedErrors(crawler.spider.exception_cls)
reason = crawler.spider.meta['close_reason']
self.assertEqual(reason, 'closespider_errorcount')
key = 'spider_exceptions/{name}'\
......
import json
import socket
import logging
from testfixtures import LogCapture
from twisted.internet import defer
from twisted.trial.unittest import TestCase
from scrapy.utils.test import get_crawler, get_testlog
from scrapy.utils.test import get_crawler
from tests import mock
from tests.spiders import FollowAllSpider, DelaySpider, SimpleSpider, \
BrokenStartRequestsSpider, SingleRequestSpider, DuplicateStartRequestsSpider
......@@ -72,36 +76,47 @@ class CrawlTestCase(TestCase):
@defer.inlineCallbacks
def test_retry_503(self):
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("http://localhost:8998/status?n=503")
self._assert_retried()
with LogCapture() as l:
yield crawler.crawl("http://localhost:8998/status?n=503")
self._assert_retried(l)
@defer.inlineCallbacks
def test_retry_conn_failed(self):
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("http://localhost:65432/status?n=503")
self._assert_retried()
with LogCapture() as l:
yield crawler.crawl("http://localhost:65432/status?n=503")
self._assert_retried(l)
@defer.inlineCallbacks
def test_retry_dns_error(self):
with mock.patch('socket.gethostbyname',
side_effect=socket.gaierror(-5, 'No address associated with hostname')):
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("http://example.com/")
self._assert_retried()
with LogCapture() as l:
yield crawler.crawl("http://example.com/")
self._assert_retried(l)
@defer.inlineCallbacks
def test_start_requests_bug_before_yield(self):
crawler = get_crawler(BrokenStartRequestsSpider)
yield crawler.crawl(fail_before_yield=1)
errors = self.flushLoggedErrors(ZeroDivisionError)
self.assertEqual(len(errors), 1)
with LogCapture('scrapy', level=logging.ERROR) as l:
crawler = get_crawler(BrokenStartRequestsSpider)
yield crawler.crawl(fail_before_yield=1)
self.assertEqual(len(l.records), 1)
record = l.records[0]
self.assertIsNotNone(record.exc_info)
self.assertIs(record.exc_info[0], ZeroDivisionError)
@defer.inlineCallbacks
def test_start_requests_bug_yielding(self):
crawler = get_crawler(BrokenStartRequestsSpider)
yield crawler.crawl(fail_yielding=1)
errors = self.flushLoggedErrors(ZeroDivisionError)
self.assertEqual(len(errors), 1)
with LogCapture('scrapy', level=logging.ERROR) as l:
crawler = get_crawler(BrokenStartRequestsSpider)
yield crawler.crawl(fail_yielding=1)
self.assertEqual(len(l.records), 1)
record = l.records[0]
self.assertIsNotNone(record.exc_info)
self.assertIs(record.exc_info[0], ZeroDivisionError)
@defer.inlineCallbacks
def test_start_requests_lazyness(self):
......@@ -145,28 +160,29 @@ foo body
with multiples lines
'''})
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("http://localhost:8998/raw?{0}".format(query))
log = get_testlog()
self.assertEqual(log.count("Got response 200"), 1)
with LogCapture() as l:
yield crawler.crawl("http://localhost:8998/raw?{0}".format(query))
self.assertEqual(str(l).count("Got response 200"), 1)
@defer.inlineCallbacks
def test_retry_conn_lost(self):
# connection lost after receiving data
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("http://localhost:8998/drop?abort=0")
self._assert_retried()
with LogCapture() as l:
yield crawler.crawl("http://localhost:8998/drop?abort=0")
self._assert_retried(l)
@defer.inlineCallbacks
def test_retry_conn_aborted(self):
# connection lost before receiving data
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("http://localhost:8998/drop?abort=1")
self._assert_retried()
with LogCapture() as l:
yield crawler.crawl("http://localhost:8998/drop?abort=1")
self._assert_retried(l)
def _assert_retried(self):
log = get_testlog()
self.assertEqual(log.count("Retrying"), 2)
self.assertEqual(log.count("Gave up retrying"), 1)
def _assert_retried(self, log):
self.assertEqual(str(log).count("Retrying"), 2)
self.assertEqual(str(log).count("Gave up retrying"), 1)
@defer.inlineCallbacks
def test_referer_header(self):
......
from __future__ import print_function
from testfixtures import LogCapture
from twisted.trial import unittest
from twisted.python.failure import Failure
from twisted.internet import reactor
from twisted.internet.defer import Deferred, inlineCallbacks
from twisted.python import log as txlog
from scrapy.http import Request, Response
from scrapy.spider import Spider
......@@ -11,7 +11,6 @@ from scrapy.utils.request import request_fingerprint
from scrapy.contrib.pipeline.media import MediaPipeline
from scrapy.utils.signal import disconnect_all
from scrapy import signals
from scrapy import log
def _mocked_download_func(request, info):
......@@ -60,26 +59,21 @@ class BaseMediaPipelineTestCase(unittest.TestCase):
fail = Failure(Exception())
results = [(True, 1), (False, fail)]
events = []
txlog.addObserver(events.append)
new_item = self.pipe.item_completed(results, item, self.info)
txlog.removeObserver(events.append)
self.flushLoggedErrors()
with LogCapture() as l:
new_item = self.pipe.item_completed(results, item, self.info)
assert new_item is item
assert len(events) == 1
assert events[0]['logLevel'] == log.ERROR
assert events[0]['failure'] is fail
assert len(l.records) == 1
record = l.records[0]
assert record.levelname == 'ERROR'
assert record.failure is fail
# disable failure logging and check again
self.pipe.LOG_FAILED_RESULTS = False
events = []
txlog.addObserver(events.append)
new_item = self.pipe.item_completed(results, item, self.info)
txlog.removeObserver(events.append)
self.flushLoggedErrors()
with LogCapture() as l:
new_item = self.pipe.item_completed(results, item, self.info)
assert new_item is item
assert len(events) == 0
assert len(l.records) == 0
@inlineCallbacks
def test_default_process_item(self):
......
......@@ -5,10 +5,11 @@ import time
from threading import Thread
from libmproxy import controller, proxy
from netlib import http_auth
from testfixtures import LogCapture
from twisted.internet import defer
from twisted.trial.unittest import TestCase
from scrapy.utils.test import get_testlog, get_crawler
from scrapy.utils.test import get_crawler
from scrapy.http import Request
from tests.spiders import SimpleSpider, SingleRequestSpider
from tests.mockserver import MockServer
......@@ -50,39 +51,44 @@ class ProxyConnectTestCase(TestCase):
@defer.inlineCallbacks
def test_https_connect_tunnel(self):
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("https://localhost:8999/status?n=200")
self._assert_got_response_code(200)
with LogCapture() as l:
yield crawler.crawl("https://localhost:8999/status?n=200")
self._assert_got_response_code(200, l)
@defer.inlineCallbacks
def test_https_noconnect(self):
os.environ['https_proxy'] = 'http://scrapy:scrapy@localhost:8888?noconnect'
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("https://localhost:8999/status?n=200")
self._assert_got_response_code(200)
with LogCapture() as l:
yield crawler.crawl("https://localhost:8999/status?n=200")
self._assert_got_response_code(200, l)
os.environ['https_proxy'] = 'http://scrapy:scrapy@localhost:8888'
@defer.inlineCallbacks
def test_https_connect_tunnel_error(self):
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("https://localhost:99999/status?n=200")
self._assert_got_tunnel_error()
with LogCapture() as l:
yield crawler.crawl("https://localhost:99999/status?n=200")
self._assert_got_tunnel_error(l)
@defer.inlineCallbacks
def test_https_tunnel_auth_error(self):
os.environ['https_proxy'] = 'http://wrong:wronger@localhost:8888'
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("https://localhost:8999/status?n=200")
with LogCapture() as l:
yield crawler.crawl("https://localhost:8999/status?n=200")
# The proxy returns a 407 error code but it does not reach the client;
# he just sees a TunnelError.
self._assert_got_tunnel_error()
self._assert_got_tunnel_error(l)
os.environ['https_proxy'] = 'http://scrapy:scrapy@localhost:8888'
@defer.inlineCallbacks
def test_https_tunnel_without_leak_proxy_authorization_header(self):
request = Request("https://localhost:8999/echo")
crawler = get_crawler(SingleRequestSpider)
yield crawler.crawl(seed=request)
self._assert_got_response_code(200)
with LogCapture() as l:
yield crawler.crawl(seed=request)
self._assert_got_response_code(200, l)
echo = json.loads(crawler.spider.meta['responses'][0].body)
self.assertTrue('Proxy-Authorization' not in echo['headers'])
......@@ -90,13 +96,12 @@ class ProxyConnectTestCase(TestCase):
def test_https_noconnect_auth_error(self):
os.environ['https_proxy'] = 'http://wrong:wronger@localhost:8888?noconnect'
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("https://localhost:8999/status?n=200")
self._assert_got_response_code(407)
with LogCapture() as l:
yield crawler.crawl("https://localhost:8999/status?n=200")
self._assert_got_response_code(407, l)
def _assert_got_response_code(self, code):
log = get_testlog()
self.assertEqual(log.count('Crawled (%d)' % code), 1)
def _assert_got_response_code(self, code, log):
self.assertEqual(str(log).count('Crawled (%d)' % code), 1)
def _assert_got_tunnel_error(self):
log = get_testlog()
self.assertEqual(log.count('TunnelError'), 1)
def _assert_got_tunnel_error(self, log):
self.assertEqual(str(log).count('TunnelError'), 1)
from unittest import TestCase
from testfixtures import LogCapture
from twisted.trial.unittest import TestCase as TrialTestCase
from twisted.internet import defer
from scrapy.utils.test import get_crawler, get_testlog
from scrapy.utils.test import get_crawler
from tests.mockserver import MockServer
from scrapy.http import Response, Request
from scrapy.spider import Spider
......@@ -174,14 +175,13 @@ class TestHttpErrorMiddlewareIntegrational(TrialTestCase):
@defer.inlineCallbacks
def test_logging(self):
crawler = get_crawler(_HttpErrorSpider)
yield crawler.crawl(bypass_status_codes={402})
# print(get_testlog())
with LogCapture() as log:
yield crawler.crawl(bypass_status_codes={402})
self.assertEqual(crawler.spider.parsed, {'200', '402'})
self.assertEqual(crawler.spider.skipped, {'402'})
self.assertEqual(crawler.spider.failed, {'404', '500'})
log = get_testlog()
self.assertIn('Ignoring response <404', log)
self.assertIn('Ignoring response <500', log)
self.assertNotIn('Ignoring response <200', log)
self.assertNotIn('Ignoring response <402', log)
self.assertIn('Ignoring response <404', str(log))
self.assertIn('Ignoring response <500', str(log))
self.assertNotIn('Ignoring response <200', str(log))
self.assertNotIn('Ignoring response <402', str(log))
......@@ -74,7 +74,6 @@ class DeferUtilsTest(unittest.TestCase):
def test_process_parallel_failure(self):
d = process_parallel([cb1, cb_fail, cb3], 'res', 'v1', 'v2')
self.failUnlessFailure(d, TypeError)
self.flushLoggedErrors()
return d
......
from testfixtures import LogCapture
from twisted.trial import unittest
from twisted.python import log as txlog
from twisted.python.failure import Failure
from twisted.internet import defer, reactor
from scrapy.xlib.pydispatch import dispatcher
from scrapy.utils.signal import send_catch_log, send_catch_log_deferred
from scrapy import log
class SendCatchLogTest(unittest.TestCase):
......@@ -14,26 +14,24 @@ class SendCatchLogTest(unittest.TestCase):
test_signal = object()
handlers_called = set()
def log_received(event):
handlers_called.add(log_received)
assert "error_handler" in event['message'][0]
assert event['logLevel'] == log.ERROR
txlog.addObserver(log_received)
dispatcher.connect(self.error_handler, signal=test_signal)
dispatcher.connect(self.ok_handler, signal=test_signal)
result = yield defer.maybeDeferred(self._get_result, test_signal, arg='test', \
handlers_called=handlers_called)
with LogCapture() as l:
result = yield defer.maybeDeferred(
self._get_result, test_signal, arg='test',
handlers_called=handlers_called
)
assert self.error_handler in handlers_called
assert self.ok_handler in handlers_called
assert log_received in handlers_called
self.assertEqual(len(l.records), 1)
record = l.records[0]
self.assertIn('error_handler', record.getMessage())
self.assertEqual(record.levelname, 'ERROR')
self.assertEqual(result[0][0], self.error_handler)
self.assert_(isinstance(result[0][1], Failure))
self.assertEqual(result[1], (self.ok_handler, "OK"))
txlog.removeObserver(log_received)
self.flushLoggedErrors()
dispatcher.disconnect(self.error_handler, signal=test_signal)
dispatcher.disconnect(self.ok_handler, signal=test_signal)
......@@ -73,12 +71,9 @@ class SendCatchLogTest2(unittest.TestCase):
def test_error_logged_if_deferred_not_supported(self):
test_signal = object()
test_handler = lambda: defer.Deferred()
log_events = []
txlog.addObserver(log_events.append)
dispatcher.connect(test_handler, test_signal)
send_catch_log(test_signal)
self.assertTrue(log_events)
self.assertIn("Cannot return deferreds from signal handler", str(log_events))
txlog.removeObserver(log_events.append)
self.flushLoggedErrors()
with LogCapture() as l:
send_catch_log(test_signal)
self.assertEqual(len(l.records), 1)
self.assertIn("Cannot return deferreds from signal handler", str(l))
dispatcher.disconnect(test_handler, test_signal)
......@@ -50,6 +50,7 @@ deps =
# tests requirements
pytest>=2.6.0
pytest-twisted
testfixtures
[testenv:py34]
basepython = python3.4
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册