scrapy/scrapy

Clean up test_download_gzip_response

Closed this issue · 0 comments

The test is currently skipped due to some Python-2-only code. We should instead remove that code.

@defer.inlineCallbacks
def test_download_gzip_response(self):
crawler = get_crawler(SingleRequestSpider, self.settings_dict)
body = b"1" * 100 # PayloadResource requires body length to be 100
request = Request(
self.mockserver.url("/payload"),
method="POST",
body=body,
meta={"download_maxsize": 50},
)
yield crawler.crawl(seed=request)
failure = crawler.spider.meta["failure"]
# download_maxsize < 100, hence the CancelledError
self.assertIsInstance(failure.value, defer.CancelledError)
# See issue https://twistedmatrix.com/trac/ticket/8175
raise unittest.SkipTest("xpayload fails on PY3")
crawler = get_crawler(SingleRequestSpider, self.settings_dict)
request.headers.setdefault(b"Accept-Encoding", b"gzip,deflate")
request = request.replace(url=self.mockserver.url("/xpayload"))
yield crawler.crawl(seed=request)
# download_maxsize = 50 is enough for the gzipped response
failure = crawler.spider.meta.get("failure")
self.assertIsNone(failure)
reason = crawler.spider.meta["close_reason"]
self.assertTrue(reason, "finished")