|
8 | 8 | from scrapy.settings import Settings
|
9 | 9 | from scrapy.utils.test import get_crawler
|
10 | 10 | from twisted.python.failure import Failure
|
11 |
| -from web_poet import HttpClient, HttpResponse |
| 11 | +from web_poet import ( |
| 12 | + HttpClient, |
| 13 | + HttpRequest, |
| 14 | + HttpRequestBody, |
| 15 | + HttpRequestHeaders, |
| 16 | + HttpResponse, |
| 17 | + RequestUrl, |
| 18 | +) |
12 | 19 | from web_poet.serialization import SerializedLeafData, register_serialization
|
13 | 20 |
|
14 | 21 | from scrapy_poet import HttpResponseProvider
|
15 | 22 | from scrapy_poet.injection import Injector
|
16 | 23 | from scrapy_poet.page_input_providers import (
|
17 | 24 | HttpClientProvider,
|
| 25 | + HttpRequestProvider, |
18 | 26 | ItemProvider,
|
19 | 27 | PageObjectInputProvider,
|
20 | 28 | PageParamsProvider,
|
@@ -204,6 +212,37 @@ async def test_http_client_provider(settings):
|
204 | 212 | assert results[0]._request_downloader == mock_factory.return_value
|
205 | 213 |
|
206 | 214 |
|
| 215 | +@ensureDeferred |
| 216 | +async def test_http_request_provider(settings): |
| 217 | + crawler = get_crawler(Spider, settings) |
| 218 | + injector = Injector(crawler) |
| 219 | + provider = HttpRequestProvider(injector) |
| 220 | + |
| 221 | + empty_scrapy_request = scrapy.http.Request("https://example.com") |
| 222 | + (empty_request,) = provider(set(), empty_scrapy_request) |
| 223 | + assert isinstance(empty_request, HttpRequest) |
| 224 | + assert isinstance(empty_request.url, RequestUrl) |
| 225 | + assert str(empty_request.url) == "https://example.com" |
| 226 | + assert empty_request.method == "GET" |
| 227 | + assert isinstance(empty_request.headers, HttpRequestHeaders) |
| 228 | + assert empty_request.headers == HttpRequestHeaders() |
| 229 | + assert isinstance(empty_request.body, HttpRequestBody) |
| 230 | + assert empty_request.body == HttpRequestBody() |
| 231 | + |
| 232 | + full_scrapy_request = scrapy.http.Request( |
| 233 | + "https://example.com", method="POST", body=b"a", headers={"a": "b"} |
| 234 | + ) |
| 235 | + (full_request,) = provider(set(), full_scrapy_request) |
| 236 | + assert isinstance(full_request, HttpRequest) |
| 237 | + assert isinstance(full_request.url, RequestUrl) |
| 238 | + assert str(full_request.url) == "https://example.com" |
| 239 | + assert full_request.method == "POST" |
| 240 | + assert isinstance(full_request.headers, HttpRequestHeaders) |
| 241 | + assert full_request.headers == HttpRequestHeaders([("a", "b")]) |
| 242 | + assert isinstance(full_request.body, HttpRequestBody) |
| 243 | + assert full_request.body == HttpRequestBody(b"a") |
| 244 | + |
| 245 | + |
207 | 246 | def test_page_params_provider(settings):
|
208 | 247 | crawler = get_crawler(Spider, settings)
|
209 | 248 | injector = Injector(crawler)
|
|
0 commit comments