@@ -53,6 +53,40 @@ def parse(self, response, page: SamplePage):
53
53
_assert_all_unique_instances (page_response_instances )
54
54
55
55
56
+ @inlineCallbacks
57
+ def test_retry_reason ():
58
+ retries = deque ([True , False ])
59
+ items , page_instances , page_response_instances = [], [], []
60
+
61
+ with MockServer (EchoResource ) as server :
62
+
63
+ class SamplePage (WebPage ):
64
+ def to_item (self ):
65
+ page_instances .append (self )
66
+ page_response_instances .append (self .response )
67
+ if retries .popleft ():
68
+ raise Retry ("foo" )
69
+ return {"foo" : "bar" }
70
+
71
+ class TestSpider (BaseSpider ):
72
+ def start_requests (self ):
73
+ yield Request (server .root_url , callback = self .parse )
74
+
75
+ def parse (self , response , page : SamplePage ):
76
+ items .append (page .to_item ())
77
+
78
+ crawler = make_crawler (TestSpider )
79
+ yield crawler .crawl ()
80
+
81
+ assert items == [{"foo" : "bar" }]
82
+ assert crawler .stats .get_value ("downloader/request_count" ) == 2
83
+ assert crawler .stats .get_value ("retry/count" ) == 1
84
+ assert crawler .stats .get_value ("retry/reason_count/foo" ) == 1
85
+ assert crawler .stats .get_value ("retry/max_reached" ) is None
86
+ _assert_all_unique_instances (page_instances )
87
+ _assert_all_unique_instances (page_response_instances )
88
+
89
+
56
90
@inlineCallbacks
57
91
def test_retry_max ():
58
92
# The default value of the RETRY_TIMES Scrapy setting is 2.
0 commit comments