1+ import os
12import socket
2- from pathlib import Path
3+ import subprocess
4+ import sys
35from textwrap import dedent
46from typing import Optional , Type , Union
57
1113from scrapy import Request
1214from scrapy .http import Response
1315from scrapy .utils .log import configure_logging
14- from scrapy .utils .testproc import ProcessTest
1516from twisted .internet .threads import deferToThread
1617from url_matcher .util import get_domain
1718from web_poet import ApplyRule , HttpResponse , ItemPage , RequestUrl , ResponseUrl , WebPage
@@ -541,7 +542,6 @@ def test_skip_download_request_url_page(settings):
541542 assert crawler .stats .get_stats ().get ("downloader/response_count" , 0 ) == 0
542543
543544
544- @inlineCallbacks
545545def test_scrapy_shell (tmp_path ):
546546 try :
547547 import scrapy .addons # noqa: F401
@@ -564,13 +564,33 @@ def test_scrapy_shell(tmp_path):
564564 }
565565 """
566566 settings = dedent (settings )
567- Path (tmp_path , "settings.py" ).write_text (settings )
568- pt = ProcessTest ()
569- pt . command = "shell"
570- pt . cwd = tmp_path
567+ (tmp_path / "settings.py" ).write_text (settings )
568+
569+ env = os . environ . copy ()
570+ env [ "SCRAPY_SETTINGS_MODULE" ] = "settings"
571571 with MockServer (EchoResource ) as server :
572- _ , out , err = yield pt .execute (
573- [server .root_url , "-c" , "item" ], settings = "settings"
572+ args = (
573+ sys .executable ,
574+ "-m" ,
575+ "scrapy.cmdline" ,
576+ "shell" ,
577+ server .root_url ,
578+ "-c" ,
579+ "item" ,
574580 )
581+ p = subprocess .Popen (
582+ args ,
583+ cwd = tmp_path ,
584+ env = env ,
585+ stdout = subprocess .PIPE ,
586+ stderr = subprocess .PIPE ,
587+ )
588+ try :
589+ out , err = p .communicate (timeout = 15 )
590+ except subprocess .TimeoutExpired :
591+ p .kill ()
592+ p .communicate ()
593+ pytest .fail ("Command took too much time to complete" )
594+
575595 assert b"Using DummyResponse instead of downloading" not in err
576596 assert b"{}" in out # noqa: P103
0 commit comments