From 31481917c0d3f7c8377c7392368add4426b6378f Mon Sep 17 00:00:00 2001 From: Alina Imtiaz Date: Mon, 18 Nov 2024 05:05:30 +0100 Subject: [PATCH] Code Review and Summary Refactoring and Improvements --- docs/_themes/flask_theme_support.py | 147 +++++++++--------- docs/conf.py | 37 +++-- setup.cfg | 2 +- setup.py | 4 +- src/requests/__init__.py | 18 ++- src/requests/_internal_utils.py | 11 +- src/requests/adapters.py | 157 ++++++++++++------- src/requests/api.py | 57 ++++--- src/requests/auth.py | 16 +- src/requests/cookies.py | 81 ++++++---- src/requests/exceptions.py | 13 +- src/requests/help.py | 4 +- src/requests/hooks.py | 1 + src/requests/models.py | 131 ++++++++++------ src/requests/sessions.py | 158 +++++++++++-------- src/requests/status_codes.py | 12 +- src/requests/structures.py | 4 +- src/requests/utils.py | 130 ++++++++++------ tests/test_adapters.py | 4 +- tests/test_help.py | 6 +- tests/test_hooks.py | 5 +- tests/test_lowlevel.py | 203 +++++++++++++----------- tests/test_requests.py | 233 ++++++++++++++++++++-------- tests/test_structures.py | 5 +- tests/test_testserver.py | 18 ++- tests/test_utils.py | 85 +++++++--- tests/testserver/server.py | 4 +- 27 files changed, 978 insertions(+), 568 deletions(-) diff --git a/docs/_themes/flask_theme_support.py b/docs/_themes/flask_theme_support.py index 33f47449c1..0dcf53b754 100644 --- a/docs/_themes/flask_theme_support.py +++ b/docs/_themes/flask_theme_support.py @@ -1,7 +1,19 @@ # flasky extensions. flasky pygments style based on tango style from pygments.style import Style -from pygments.token import Keyword, Name, Comment, String, Error, \ - Number, Operator, Generic, Whitespace, Punctuation, Other, Literal +from pygments.token import ( + Keyword, + Name, + Comment, + String, + Error, + Number, + Operator, + Generic, + Whitespace, + Punctuation, + Other, + Literal, +) class FlaskyStyle(Style): @@ -10,77 +22,68 @@ class FlaskyStyle(Style): styles = { # No corresponding class for the following: - #Text: "", # class: '' - Whitespace: "underline #f8f8f8", # class: 'w' - Error: "#a40000 border:#ef2929", # class: 'err' - Other: "#000000", # class 'x' - - Comment: "italic #8f5902", # class: 'c' - Comment.Preproc: "noitalic", # class: 'cp' - - Keyword: "bold #004461", # class: 'k' - Keyword.Constant: "bold #004461", # class: 'kc' - Keyword.Declaration: "bold #004461", # class: 'kd' - Keyword.Namespace: "bold #004461", # class: 'kn' - Keyword.Pseudo: "bold #004461", # class: 'kp' - Keyword.Reserved: "bold #004461", # class: 'kr' - Keyword.Type: "bold #004461", # class: 'kt' - - Operator: "#582800", # class: 'o' - Operator.Word: "bold #004461", # class: 'ow' - like keywords - - Punctuation: "bold #000000", # class: 'p' - + # Text: "", # class: '' + Whitespace: "underline #f8f8f8", # class: 'w' + Error: "#a40000 border:#ef2929", # class: 'err' + Other: "#000000", # class 'x' + Comment: "italic #8f5902", # class: 'c' + Comment.Preproc: "noitalic", # class: 'cp' + Keyword: "bold #004461", # class: 'k' + Keyword.Constant: "bold #004461", # class: 'kc' + Keyword.Declaration: "bold #004461", # class: 'kd' + Keyword.Namespace: "bold #004461", # class: 'kn' + Keyword.Pseudo: "bold #004461", # class: 'kp' + Keyword.Reserved: "bold #004461", # class: 'kr' + Keyword.Type: "bold #004461", # class: 'kt' + Operator: "#582800", # class: 'o' + Operator.Word: "bold #004461", # class: 'ow' - like keywords + Punctuation: "bold #000000", # class: 'p' # because special names such as Name.Class, Name.Function, etc. # are not recognized as such later in the parsing, we choose them # to look the same as ordinary variables. - Name: "#000000", # class: 'n' - Name.Attribute: "#c4a000", # class: 'na' - to be revised - Name.Builtin: "#004461", # class: 'nb' - Name.Builtin.Pseudo: "#3465a4", # class: 'bp' - Name.Class: "#000000", # class: 'nc' - to be revised - Name.Constant: "#000000", # class: 'no' - to be revised - Name.Decorator: "#888", # class: 'nd' - to be revised - Name.Entity: "#ce5c00", # class: 'ni' - Name.Exception: "bold #cc0000", # class: 'ne' - Name.Function: "#000000", # class: 'nf' - Name.Property: "#000000", # class: 'py' - Name.Label: "#f57900", # class: 'nl' - Name.Namespace: "#000000", # class: 'nn' - to be revised - Name.Other: "#000000", # class: 'nx' - Name.Tag: "bold #004461", # class: 'nt' - like a keyword - Name.Variable: "#000000", # class: 'nv' - to be revised - Name.Variable.Class: "#000000", # class: 'vc' - to be revised - Name.Variable.Global: "#000000", # class: 'vg' - to be revised - Name.Variable.Instance: "#000000", # class: 'vi' - to be revised - - Number: "#990000", # class: 'm' - - Literal: "#000000", # class: 'l' - Literal.Date: "#000000", # class: 'ld' - - String: "#4e9a06", # class: 's' - String.Backtick: "#4e9a06", # class: 'sb' - String.Char: "#4e9a06", # class: 'sc' - String.Doc: "italic #8f5902", # class: 'sd' - like a comment - String.Double: "#4e9a06", # class: 's2' - String.Escape: "#4e9a06", # class: 'se' - String.Heredoc: "#4e9a06", # class: 'sh' - String.Interpol: "#4e9a06", # class: 'si' - String.Other: "#4e9a06", # class: 'sx' - String.Regex: "#4e9a06", # class: 'sr' - String.Single: "#4e9a06", # class: 's1' - String.Symbol: "#4e9a06", # class: 'ss' - - Generic: "#000000", # class: 'g' - Generic.Deleted: "#a40000", # class: 'gd' - Generic.Emph: "italic #000000", # class: 'ge' - Generic.Error: "#ef2929", # class: 'gr' - Generic.Heading: "bold #000080", # class: 'gh' - Generic.Inserted: "#00A000", # class: 'gi' - Generic.Output: "#888", # class: 'go' - Generic.Prompt: "#745334", # class: 'gp' - Generic.Strong: "bold #000000", # class: 'gs' - Generic.Subheading: "bold #800080", # class: 'gu' - Generic.Traceback: "bold #a40000", # class: 'gt' + Name: "#000000", # class: 'n' + Name.Attribute: "#c4a000", # class: 'na' - to be revised + Name.Builtin: "#004461", # class: 'nb' + Name.Builtin.Pseudo: "#3465a4", # class: 'bp' + Name.Class: "#000000", # class: 'nc' - to be revised + Name.Constant: "#000000", # class: 'no' - to be revised + Name.Decorator: "#888", # class: 'nd' - to be revised + Name.Entity: "#ce5c00", # class: 'ni' + Name.Exception: "bold #cc0000", # class: 'ne' + Name.Function: "#000000", # class: 'nf' + Name.Property: "#000000", # class: 'py' + Name.Label: "#f57900", # class: 'nl' + Name.Namespace: "#000000", # class: 'nn' - to be revised + Name.Other: "#000000", # class: 'nx' + Name.Tag: "bold #004461", # class: 'nt' - like a keyword + Name.Variable: "#000000", # class: 'nv' - to be revised + Name.Variable.Class: "#000000", # class: 'vc' - to be revised + Name.Variable.Global: "#000000", # class: 'vg' - to be revised + Name.Variable.Instance: "#000000", # class: 'vi' - to be revised + Number: "#990000", # class: 'm' + Literal: "#000000", # class: 'l' + Literal.Date: "#000000", # class: 'ld' + String: "#4e9a06", # class: 's' + String.Backtick: "#4e9a06", # class: 'sb' + String.Char: "#4e9a06", # class: 'sc' + String.Doc: "italic #8f5902", # class: 'sd' - like a comment + String.Double: "#4e9a06", # class: 's2' + String.Escape: "#4e9a06", # class: 'se' + String.Heredoc: "#4e9a06", # class: 'sh' + String.Interpol: "#4e9a06", # class: 'si' + String.Other: "#4e9a06", # class: 'sx' + String.Regex: "#4e9a06", # class: 'sr' + String.Single: "#4e9a06", # class: 's1' + String.Symbol: "#4e9a06", # class: 'ss' + Generic: "#000000", # class: 'g' + Generic.Deleted: "#a40000", # class: 'gd' + Generic.Emph: "italic #000000", # class: 'ge' + Generic.Error: "#ef2929", # class: 'gr' + Generic.Heading: "bold #000080", # class: 'gh' + Generic.Inserted: "#00A000", # class: 'gi' + Generic.Output: "#888", # class: 'go' + Generic.Prompt: "#745334", # class: 'gp' + Generic.Strong: "bold #000000", # class: 'gs' + Generic.Subheading: "bold #800080", # class: 'gu' + Generic.Traceback: "bold #a40000", # class: 'gt' } diff --git a/docs/conf.py b/docs/conf.py index edbd72ba82..9f07903972 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,6 +12,7 @@ # All configuration values have a default; values that are commented out # serve to show the default. +import requests import sys import os @@ -24,9 +25,6 @@ sys.path.insert(0, os.path.abspath("..")) sys.path.insert(0, os.path.abspath("_themes")) -import requests - - # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. @@ -57,9 +55,9 @@ master_doc = "index" # General information about the project. -project = u"Requests" -copyright = u'MMXVIX. A Kenneth Reitz Project' -author = u"Kenneth Reitz" +project = "Requests" +copyright = 'MMXVIX. A Kenneth Reitz Project' +author = "Kenneth Reitz" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -172,7 +170,12 @@ # Custom sidebar templates, maps document names to template names. html_sidebars = { - "index": ["sidebarintro.html", "sourcelink.html", "searchbox.html", "hacks.html"], + "index": [ + "sidebarintro.html", + "sourcelink.html", + "searchbox.html", + "hacks.html", + ], "**": [ "sidebarlogo.html", "localtoc.html", @@ -234,20 +237,26 @@ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', + # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', + # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. - #'preamble': '', + # 'preamble': '', # Latex figure (float) alignment - #'figure_align': 'htbp', + # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, "Requests.tex", u"Requests Documentation", u"Kenneth Reitz", "manual") + ( + master_doc, + "Requests.tex", + "Requests Documentation", + "Kenneth Reitz", + "manual", + ) ] # The name of an image file (relative to this directory) to place at the top of @@ -275,7 +284,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "requests", u"Requests Documentation", [author], 1)] +man_pages = [(master_doc, "requests", "Requests Documentation", [author], 1)] # If true, show URL addresses after external links. # man_show_urls = False @@ -290,7 +299,7 @@ ( master_doc, "Requests", - u"Requests Documentation", + "Requests Documentation", author, "Requests", "One line description of project.", diff --git a/setup.cfg b/setup.cfg index 8d44e0e14b..62baf55304 100644 --- a/setup.cfg +++ b/setup.cfg @@ -10,7 +10,7 @@ requires-dist = urllib3>=1.21.1,<3 [flake8] -ignore = E203, E501, W503 +ignore = E203, E501, W503, F401 per-file-ignores = src/requests/__init__.py:E402, F401 src/requests/compat.py:E402, F401 diff --git a/setup.py b/setup.py index 7d9b52bc3b..631d5e378b 100755 --- a/setup.py +++ b/setup.py @@ -50,7 +50,9 @@ about = {} here = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(here, "src", "requests", "__version__.py"), "r", "utf-8") as f: +with open( + os.path.join(here, "src", "requests", "__version__.py"), "r", "utf-8" +) as f: exec(f.read(), about) with open("README.md", "r", "utf-8") as f: diff --git a/src/requests/__init__.py b/src/requests/__init__.py index 051cda1340..c2bc7032ef 100644 --- a/src/requests/__init__.py +++ b/src/requests/__init__.py @@ -55,9 +55,13 @@ chardet_version = None -def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): +def check_compatibility( + urllib3_version, chardet_version, charset_normalizer_version +): urllib3_version = urllib3_version.split(".") - assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git. + assert urllib3_version != [ + "dev" + ] # Verify urllib3 isn't installed from git. # Sometimes, urllib3 only reports its version as 16.1. if len(urllib3_version) == 2: @@ -98,8 +102,10 @@ def _check_cryptography(cryptography_version): return if cryptography_version < [1, 3, 4]: - warning = "Old version of cryptography ({}) may cause slowdown.".format( - cryptography_version + warning = ( + "Old version of cryptography ({}) may cause slowdown.".format( + cryptography_version + ) ) warnings.warn(warning, RequestsDependencyWarning) @@ -111,8 +117,8 @@ def _check_cryptography(cryptography_version): ) except (AssertionError, ValueError): warnings.warn( - "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " - "version!".format( + "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match " + "a supported version!".format( urllib3.__version__, chardet_version, charset_normalizer_version ), RequestsDependencyWarning, diff --git a/src/requests/_internal_utils.py b/src/requests/_internal_utils.py index f2cf635e29..9a0b4b43c8 100644 --- a/src/requests/_internal_utils.py +++ b/src/requests/_internal_utils.py @@ -5,6 +5,7 @@ Provides utility functions that are consumed internally by Requests which depend on extremely few external helpers (such as compat) """ + import re from .compat import builtin_str @@ -14,8 +15,14 @@ _VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$") _VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$") -_HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR) -_HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE) +_HEADER_VALIDATORS_STR = ( + _VALID_HEADER_NAME_RE_STR, + _VALID_HEADER_VALUE_RE_STR, +) +_HEADER_VALIDATORS_BYTE = ( + _VALID_HEADER_NAME_RE_BYTE, + _VALID_HEADER_VALUE_RE_BYTE, +) HEADER_VALIDATORS = { bytes: _HEADER_VALIDATORS_BYTE, str: _HEADER_VALIDATORS_STR, diff --git a/src/requests/adapters.py b/src/requests/adapters.py index 9a58b16025..c7949ea4ec 100644 --- a/src/requests/adapters.py +++ b/src/requests/adapters.py @@ -141,21 +141,30 @@ def __init__(self): super().__init__() def send( - self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + self, + request, + stream=False, + timeout=None, + verify=True, + cert=None, + proxies=None, ): """Sends PreparedRequest object. Returns Response object. - :param request: The :class:`PreparedRequest ` being sent. + :param request: The :class:`PreparedRequest ` being + sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple - :param verify: (optional) Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use - :param cert: (optional) Any user-provided SSL certificate to be trusted. - :param proxies: (optional) The proxies dictionary to apply to the request. + :param verify: (optional) Either a boolean, controlling whether to + verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be + trusted. + :param proxies: (optional) The proxies dictionary to apply to the + request. """ raise NotImplementedError @@ -181,7 +190,8 @@ class HTTPAdapter(BaseAdapter): connections. If you need granular control over the conditions under which we retry a request, import urllib3's ``Retry`` class and pass that instead. - :param pool_block: Whether the connection pool should block for connections. + :param pool_block: Whether the connection pool should block for + connections. Usage:: @@ -249,7 +259,8 @@ def init_poolmanager( :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. - :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. + :param pool_kwargs: Extra keyword arguments used to initialize the + Pool Manager. """ # save these values for pickling self._pool_connections = connections @@ -271,7 +282,8 @@ def proxy_manager_for(self, proxy, **proxy_kwargs): :class:`HTTPAdapter `. :param proxy: The proxy to return a urllib3 ProxyManager for. - :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. + :param proxy_kwargs: Extra keyword arguments used to configure the + Proxy Manager. :returns: ProxyManager :rtype: urllib3.ProxyManager """ @@ -302,32 +314,33 @@ def proxy_manager_for(self, proxy, **proxy_kwargs): return manager def cert_verify(self, conn, url, verify, cert): - """Verify a SSL certificate. This method should not be called from user - code, and is only exposed for use when subclassing the + """Verify a SSL certificate. This method should not be called from + user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param conn: The urllib3 connection object associated with the cert. :param url: The requested URL. - :param verify: Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use + :param verify: Either a boolean, in which case it controls whether we + verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use. :param cert: The SSL certificate to verify. """ if url.lower().startswith("https") and verify: conn.cert_reqs = "CERT_REQUIRED" - # Only load the CA certificates if 'verify' is a string indicating the CA bundle to use. - # Otherwise, if verify is a boolean, we don't load anything since - # the connection will be using a context with the default certificates already loaded, - # and this avoids a call to the slow load_verify_locations() + # Only load the CA certificates if 'verify' is a string indicating + # the CA bundle to use. Otherwise, if verify is a boolean, we + # don't load anything since the connection will be using a context + # with the default certificates already loaded, and this avoids a + # call to the slow load_verify_locations() if verify is not True: # `verify` must be a str with a path then cert_loc = verify if not os.path.exists(cert_loc): raise OSError( - f"Could not find a suitable TLS CA certificate bundle, " - f"invalid path: {cert_loc}" + f"Could not find a suitable TLS CA certificate " + f"bundle, invalid path: {cert_loc}" ) if not os.path.isdir(cert_loc): @@ -353,7 +366,8 @@ def cert_verify(self, conn, url, verify, cert): ) if conn.key_file and not os.path.exists(conn.key_file): raise OSError( - f"Could not find the TLS key file, invalid path: {conn.key_file}" + f"Could not find the TLS key file, " + f"invalid path: {conn.key_file}" ) def build_response(self, req, resp): @@ -362,7 +376,8 @@ def build_response(self, req, resp): for use when subclassing the :class:`HTTPAdapter ` - :param req: The :class:`PreparedRequest ` used to generate the response. + :param req: The :class:`PreparedRequest ` used to + generate the response. :param resp: The urllib3 response object. :rtype: requests.Response """ @@ -404,14 +419,14 @@ def build_connection_pool_key_attributes(self, request, verify, cert=None): this writing, use the following to determine what keys may be in that dictionary: - * If ``verify`` is ``True``, ``"ssl_context"`` will be set and will be the - default Requests SSL Context + * If ``verify`` is ``True``, ``"ssl_context"`` will be set and will be + the default Requests SSL Context * If ``verify`` is ``False``, ``"ssl_context"`` will not be set but - ``"cert_reqs"`` will be set - * If ``verify`` is a string, (i.e., it is a user-specified trust bundle) - ``"ca_certs"`` will be set if the string is not a directory recognized - by :py:func:`os.path.isdir`, otherwise ``"ca_certs_dir"`` will be - set. + ``"cert_reqs"`` will be set. + * If ``verify`` is a string, (i.e., it is a user-specified trust + bundle) ``"ca_certs"`` will be set if the string is not a directory + recognized by :py:func:`os.path.isdir`, otherwise ``"ca_certs_dir"`` + will be set. * If ``"cert"`` is specified, ``"cert_file"`` will always be set. If ``"cert"`` is a tuple with a second item, ``"key_file"`` will also be present @@ -428,8 +443,8 @@ def build_connection_pool_key_attributes(self, request, verify, cert=None): :class:`~requests.models.PreparedRequest` :param verify: Either a boolean, in which case it controls whether - we verify the server's TLS certificate, or a string, in which case it - must be a path to a CA bundle to use. + we verify the server's TLS certificate, or a string, in which case + it must be a path to a CA bundle to use. :param cert: (optional) Any user-provided SSL certificate for client authentication (a.k.a., mTLS). This may be a string (i.e., just @@ -441,12 +456,17 @@ def build_connection_pool_key_attributes(self, request, verify, cert=None): portion of the Pool Key including scheme, hostname, and port. The second is a dictionary of SSLContext related parameters. """ - return _urllib3_request_context(request, verify, cert, self.poolmanager) + return _urllib3_request_context( + request, verify, cert, self.poolmanager + ) - def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None): - """Returns a urllib3 connection for the given request and TLS settings. - This should not be called from user code, and is only exposed for use - when subclassing the :class:`HTTPAdapter `. + def get_connection_with_tls_context( + self, request, verify, proxies=None, cert=None + ): + """Returns a urllib3 connection for the given request and TLS + settings. This should not be called from user code, and is only + exposed for use when subclassing the :class:`HTTPAdapter + `. :param request: The :class:`PreparedRequest ` object to be sent @@ -465,10 +485,12 @@ def get_connection_with_tls_context(self, request, verify, proxies=None, cert=No """ proxy = select_proxy(request.url, proxies) try: - host_params, pool_kwargs = self.build_connection_pool_key_attributes( - request, - verify, - cert, + host_params, pool_kwargs = ( + self.build_connection_pool_key_attributes( + request, + verify, + cert, + ) ) except ValueError as e: raise InvalidURL(e, request=request) @@ -501,14 +523,16 @@ def get_connection(self, url, proxies=None): :class:`HTTPAdapter `. :param url: The URL to connect to. - :param proxies: (optional) A Requests-style dictionary of proxies used on this request. + :param proxies: (optional) A Requests-style dictionary of proxies used + on this request. :rtype: urllib3.ConnectionPool """ warnings.warn( ( "`get_connection` has been deprecated in favor of " - "`get_connection_with_tls_context`. Custom HTTPAdapter subclasses " - "will need to migrate for Requests>=2.32.2. Please see " + "`get_connection_with_tls_context`." + "Custom HTTPAdapter subclasses will need to migrate for" + "Requests>=2.32.2. Please see " "https://github.com/psf/requests/pull/6710 for more details." ), DeprecationWarning, @@ -553,8 +577,10 @@ def request_url(self, request, proxies): when subclassing the :class:`HTTPAdapter `. - :param request: The :class:`PreparedRequest ` being sent. - :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. + :param request: The :class:`PreparedRequest ` being + sent. + :param proxies: A dictionary of schemes or schemes and hosts to proxy + URLs. :rtype: str """ proxy = select_proxy(request.url, proxies) @@ -584,7 +610,8 @@ def add_headers(self, request, **kwargs): when subclassing the :class:`HTTPAdapter `. - :param request: The :class:`PreparedRequest ` to add headers to. + :param request: The :class:`PreparedRequest ` to add + headers to. :param kwargs: The keyword arguments from the call to send(). """ pass @@ -606,26 +633,37 @@ def proxy_headers(self, proxy): username, password = get_auth_from_url(proxy) if username: - headers["Proxy-Authorization"] = _basic_auth_str(username, password) + headers["Proxy-Authorization"] = _basic_auth_str( + username, password + ) return headers def send( - self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + self, + request, + stream=False, + timeout=None, + verify=True, + cert=None, + proxies=None, ): """Sends PreparedRequest object. Returns Response object. - :param request: The :class:`PreparedRequest ` being sent. + :param request: The :class:`PreparedRequest ` being + sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object - :param verify: (optional) Either a boolean, in which case it controls whether - we verify the server's TLS certificate, or a string, in which case it - must be a path to a CA bundle to use - :param cert: (optional) Any user-provided SSL certificate to be trusted. - :param proxies: (optional) The proxies dictionary to apply to the request. + :param verify: (optional) Either a boolean, in which case it controls + whether we verify the server's TLS certificate, or a string, in + which case it must be a path to a CA bundle to use. + :param cert: (optional) Any user-provided SSL certificate to be + trusted. + :param proxies: (optional) The proxies dictionary to apply to the + request. :rtype: requests.Response """ @@ -647,7 +685,9 @@ def send( proxies=proxies, ) - chunked = not (request.body is None or "Content-Length" in request.headers) + chunked = not ( + request.body is None or "Content-Length" in request.headers + ) if isinstance(timeout, tuple): try: @@ -655,8 +695,9 @@ def send( timeout = TimeoutSauce(connect=connect, read=read) except ValueError: raise ValueError( - f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, " - f"or a single float to set both timeouts to the same value." + f"Invalid timeout {timeout}. Pass a (connect, read) " + f"timeout tuple, or a single float to set both timeouts " + f"to the same value." ) elif isinstance(timeout, TimeoutSauce): pass diff --git a/src/requests/api.py b/src/requests/api.py index 5960744552..7dd75811cf 100644 --- a/src/requests/api.py +++ b/src/requests/api.py @@ -14,33 +14,45 @@ def request(method, url, **kwargs): """Constructs and sends a :class:`Request `. - :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. + :param method: method for the new :class:`Request` object: ``GET``, + ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary, list of tuples or bytes to send in the query string for the :class:`Request`. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. - :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. - :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. - :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. - :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. - ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` - or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content_type'`` is a string - defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers + :param json: (optional) A JSON serializable Python object to send in the + body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the + :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the + :class:`Request`. + :param files: (optional) Dictionary of ``'name': file-like-objects`` + (or ``{'name': file-tuple}``) for multipart encoding upload. + ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple + ``('filename', fileobj, 'content_type')`` or a 4-tuple ``('filename', + fileobj, 'content_type', custom_headers)``, where ``'content_type'`` + is a string defining the content type of the given file and + ``custom_headers`` a dict-like object containing additional headers to add for the file. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. - :param timeout: (optional) How many seconds to wait for the server to send data - before giving up, as a float, or a :ref:`(connect timeout, read - timeout) ` tuple. + :param timeout: (optional) How many seconds to wait for the server to + send data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. :type timeout: float or tuple - :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. + :param allow_redirects: (optional) Boolean. Enable/disable + GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to + ``True``. :type allow_redirects: bool - :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. - :param verify: (optional) Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use. Defaults to ``True``. - :param stream: (optional) if ``False``, the response content will be immediately downloaded. - :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. + :param proxies: (optional) Dictionary mapping protocol to the URL of the + proxy. + :param verify: (optional) Either a boolean, in which case it controls + whether we verify the server's TLS certificate, or a string, in which + case it must be a path to a CA bundle to use. Defaults to ``True``. + :param stream: (optional) if ``False``, the response content will be + immediately downloaded. + :param cert: (optional) if String, path to ssl client cert file (.pem). + If Tuple, ('cert', 'key') pair. :return: :class:`Response ` object :rtype: requests.Response @@ -106,7 +118,8 @@ def post(url, data=None, json=None, **kwargs): :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. - :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the + body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response @@ -121,7 +134,8 @@ def put(url, data=None, **kwargs): :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. - :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the + body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response @@ -136,7 +150,8 @@ def patch(url, data=None, **kwargs): :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. - :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the + body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response diff --git a/src/requests/auth.py b/src/requests/auth.py index 4a7ce6dc14..1ecda2939b 100644 --- a/src/requests/auth.py +++ b/src/requests/auth.py @@ -92,7 +92,9 @@ def __ne__(self, other): return not self == other def __call__(self, r): - r.headers["Authorization"] = _basic_auth_str(self.username, self.password) + r.headers["Authorization"] = _basic_auth_str( + self.username, self.password + ) return r @@ -100,7 +102,9 @@ class HTTPProxyAuth(HTTPBasicAuth): """Attaches HTTP Proxy Authentication to a given Request object.""" def __call__(self, r): - r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password) + r.headers["Proxy-Authorization"] = _basic_auth_str( + self.username, self.password + ) return r @@ -260,7 +264,9 @@ def handle_401(self, r, **kwargs): if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2: self._thread_local.num_401_calls += 1 pat = re.compile(r"digest ", flags=re.IGNORECASE) - self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1)) + self._thread_local.chal = parse_dict_header( + pat.sub("", s_auth, count=1) + ) # Consume content and release the original connection # to allow our new request to reuse the same one. @@ -287,7 +293,9 @@ def __call__(self, r): self.init_per_thread_state() # If we have a saved nonce, skip the 401 if self._thread_local.last_nonce: - r.headers["Authorization"] = self.build_digest_header(r.method, r.url) + r.headers["Authorization"] = self.build_digest_header( + r.method, r.url + ) try: self._thread_local.pos = r.body.tell() except AttributeError: diff --git a/src/requests/cookies.py b/src/requests/cookies.py index f69d0cda9e..c3d6e96840 100644 --- a/src/requests/cookies.py +++ b/src/requests/cookies.py @@ -23,13 +23,13 @@ class MockRequest: """Wraps a `requests.Request` to mimic a `urllib2.Request`. - The code in `http.cookiejar.CookieJar` expects this interface in order to correctly - manage cookie policies, i.e., determine whether a cookie can be set, given the - domains of the request and the cookie. + The code in `http.cookiejar.CookieJar` expects this interface in order + to correctly manage cookie policies, i.e., determine whether a cookie can + be set, given the domains of the request and the cookie. - The original request object is read-only. The client is responsible for collecting - the new headers via `get_new_headers()` and interpreting them appropriately. You - probably want `get_cookie_header`, defined below. + The original request object is read-only. The client is responsible for + collecting the new headers via `get_new_headers()` and interpreting them + appropriately. You probably want `get_cookie_header`, defined below. """ def __init__(self, request): @@ -76,7 +76,8 @@ def get_header(self, name, default=None): return self._r.headers.get(name, self._new_headers.get(name, default)) def add_header(self, key, val): - """cookiejar has no legitimate use for this method; add it back if you find one.""" + """cookiejar has no legitimate use for this method; add it back if you + find one.""" raise NotImplementedError( "Cookie headers should be added with add_unredirected_header()" ) @@ -103,14 +104,15 @@ def host(self): class MockResponse: """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. - ...what? Basically, expose the parsed HTTP headers from the server response - the way `http.cookiejar` expects to see them. + ...what? Basically, expose the parsed HTTP headers from the server + response the way `http.cookiejar` expects to see them. """ def __init__(self, headers): """Make a MockResponse for `cookiejar` to read. - :param headers: a httplib.HTTPMessage or analogous carrying the headers + :param headers: a httplib.HTTPMessage or analogous carrying the + headers """ self._headers = headers @@ -128,7 +130,9 @@ def extract_cookies_to_jar(jar, request, response): :param request: our own requests.Request object :param response: urllib3.HTTPResponse object """ - if not (hasattr(response, "_original_response") and response._original_response): + if not ( + hasattr(response, "_original_response") and response._original_response + ): return # the _original_response field is the wrapped httplib.HTTPResponse object, req = MockRequest(request) @@ -139,7 +143,8 @@ def extract_cookies_to_jar(jar, request, response): def get_cookie_header(jar, request): """ - Produce an appropriate Cookie header string to be sent with `request`, or None. + Produce an appropriate Cookie header string to be sent with `request`, + or None. :rtype: str """ @@ -168,8 +173,9 @@ def remove_cookie_by_name(cookiejar, name, domain=None, path=None): class CookieConflictError(RuntimeError): - """There are two cookies that meet the criteria specified in the cookie jar. - Use .get and .set and include domain and path args in order to be more specific. + """There are two cookies that meet the criteria specified in the cookie + jar. Use .get and .set and include domain and path args in order to be + more specific. """ @@ -208,10 +214,14 @@ def set(self, name, value, **kwargs): order to resolve naming collisions from using one cookie jar over multiple domains. """ - # support client code that unsets cookies by assignment of a None value: + # support client code that unsets cookies by assignment of a None + # value: if value is None: remove_cookie_by_name( - self, name, domain=kwargs.get("domain"), path=kwargs.get("path") + self, + name, + domain=kwargs.get("domain"), + path=kwargs.get("path"), ) return @@ -240,8 +250,8 @@ def keys(self): return list(self.iterkeys()) def itervalues(self): - """Dict-like itervalues() that returns an iterator of values of cookies - from the jar. + """Dict-like itervalues() that returns an iterator of values of + cookies from the jar. .. seealso:: iterkeys() and iteritems(). """ @@ -249,8 +259,8 @@ def itervalues(self): yield cookie.value def values(self): - """Dict-like values() that returns a list of values of cookies from the - jar. + """Dict-like values() that returns a list of values of cookies from + the jar. .. seealso:: keys() and items(). """ @@ -304,8 +314,8 @@ def multiple_domains(self): return False # there is only one domain in jar def get_dict(self, domain=None, path=None): - """Takes as an argument an optional domain and path and returns a plain - old Python dict of name-value pairs of cookies that meet the + """Takes as an argument an optional domain and path and returns a + plain old Python dict of name-value pairs of cookies that meet the requirements. :rtype: dict @@ -335,8 +345,8 @@ def __getitem__(self, name): def __setitem__(self, name, value): """Dict-like __setitem__ for compatibility with client code. Throws - exception if there is already a cookie of that name in the jar. In that - case, use the more explicit set() method instead. + exception if there is already a cookie of that name in the jar. In + that case, use the more explicit set() method instead. """ self.set(name, value) @@ -356,7 +366,8 @@ def set_cookie(self, cookie, *args, **kwargs): return super().set_cookie(cookie, *args, **kwargs) def update(self, other): - """Updates this jar with cookies from another CookieJar or dict-like""" + """Updates this jar with cookies from another CookieJar or + dict-like""" if isinstance(other, cookielib.CookieJar): for cookie in other: self.set_cookie(copy.copy(cookie)) @@ -401,11 +412,14 @@ def _find_no_duplicates(self, name, domain=None, path=None): if domain is None or cookie.domain == domain: if path is None or cookie.path == path: if toReturn is not None: - # if there are multiple cookies that meet passed in criteria + # if there are multiple cookies that meet passed + # in criteria raise CookieConflictError( - f"There are multiple cookies with name, {name!r}" + f"There are multiple cookies with name, " + f"{name!r}" ) - # we will eventually return this as long as no cookie conflict + # we will eventually return this as long as no cookie + # conflict toReturn = cookie.value if toReturn: @@ -477,7 +491,8 @@ def create_cookie(name, value, **kwargs): badargs = set(kwargs) - set(result) if badargs: raise TypeError( - f"create_cookie() got unexpected keyword arguments: {list(badargs)}" + f"create_cookie() got unexpected keyword arguments: " + f" {list(badargs)}" ) result.update(kwargs) @@ -500,7 +515,9 @@ def morsel_to_cookie(morsel): raise TypeError(f"max-age: {morsel['max-age']} must be integer") elif morsel["expires"]: time_template = "%a, %d-%b-%Y %H:%M:%S GMT" - expires = calendar.timegm(time.strptime(morsel["expires"], time_template)) + expires = calendar.timegm( + time.strptime(morsel["expires"], time_template) + ) return create_cookie( comment=morsel["comment"], comment_url=bool(morsel["comment"]), @@ -550,7 +567,9 @@ def merge_cookies(cookiejar, cookies): raise ValueError("You can only merge into CookieJar") if isinstance(cookies, dict): - cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False) + cookiejar = cookiejar_from_dict( + cookies, cookiejar=cookiejar, overwrite=False + ) elif isinstance(cookies, cookielib.CookieJar): try: cookiejar.update(cookies) diff --git a/src/requests/exceptions.py b/src/requests/exceptions.py index 83986b4898..5ff3d2b4f0 100644 --- a/src/requests/exceptions.py +++ b/src/requests/exceptions.py @@ -4,6 +4,7 @@ This module contains the set of Requests' exceptions. """ + from urllib3.exceptions import HTTPError as BaseHTTPError from .compat import JSONDecodeError as CompatJSONDecodeError @@ -15,11 +16,16 @@ class RequestException(IOError): """ def __init__(self, *args, **kwargs): - """Initialize RequestException with `request` and `response` objects.""" + """Initialize RequestException with `request` and `response` + objects.""" response = kwargs.pop("response", None) self.response = response self.request = kwargs.pop("request", None) - if response is not None and not self.request and hasattr(response, "request"): + if ( + response is not None + and not self.request + and hasattr(response, "request") + ): self.request = self.response.request super().__init__(*args, **kwargs) @@ -144,7 +150,8 @@ class RequestsWarning(Warning): class FileModeWarning(RequestsWarning, DeprecationWarning): - """A file was opened in text mode, but Requests determined its binary length.""" + """A file was opened in text mode, but Requests determined its binary + length.""" class RequestsDependencyWarning(RequestsWarning): diff --git a/src/requests/help.py b/src/requests/help.py index 8fbcd6560a..00e5c1d105 100644 --- a/src/requests/help.py +++ b/src/requests/help.py @@ -105,7 +105,9 @@ def info(): } system_ssl = ssl.OPENSSL_VERSION_NUMBER - system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""} + system_ssl_info = { + "version": f"{system_ssl:x}" if system_ssl is not None else "" + } return { "platform": platform_info, diff --git a/src/requests/hooks.py b/src/requests/hooks.py index d181ba2ec2..5976bc7d0f 100644 --- a/src/requests/hooks.py +++ b/src/requests/hooks.py @@ -9,6 +9,7 @@ ``response``: The response generated from a Request. """ + HOOKS = ["response"] diff --git a/src/requests/models.py b/src/requests/models.py index 8f56ca7d23..cab1707ff4 100644 --- a/src/requests/models.py +++ b/src/requests/models.py @@ -8,8 +8,9 @@ import datetime # Import encoding now, to avoid implicit import later. -# Implicit import within threads may cause LookupError when standard library is in a ZIP, -# such as in Embedded Python. See https://github.com/psf/requests/issues/3578. +# Implicit import within threads may cause LookupError when standard library +# is in a ZIP, such as in Embedded Python. See +# https://github.com/psf/requests/issues/3578. import encodings.idna # noqa: F401 from io import UnsupportedOperation @@ -108,8 +109,8 @@ def _encode_params(data): """Encode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of - 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary - if parameters are supplied as a dict. + 2-tuples. Order is retained if data is a list of 2-tuples but + arbitrary if parameters are supplied as a dict. """ if isinstance(data, (str, bytes)): @@ -140,8 +141,9 @@ def _encode_files(files, data): Will successfully encode files when passed as a dict or a list of tuples. Order is retained if data is a list of tuples but arbitrary if parameters are supplied as a dict. - The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) - or 4-tuples (filename, fileobj, contentype, custom_headers). + The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, + fileobj, contentype) or 4-tuples (filename, fileobj, contentype, + custom_headers). """ if not files: raise ValueError("Files must be provided.") @@ -157,15 +159,18 @@ def _encode_files(files, data): val = [val] for v in val: if v is not None: - # Don't call str() on bytestrings: in Py3 it all goes wrong. + # Don't call str() on bytestrings: in Py3 it all goes + # wrong. if not isinstance(v, bytes): v = str(v) new_fields.append( ( - field.decode("utf-8") - if isinstance(field, bytes) - else field, + ( + field.decode("utf-8") + if isinstance(field, bytes) + else field + ), v.encode("utf-8") if isinstance(v, str) else v, ) ) @@ -208,12 +213,16 @@ def register_hook(self, event, hook): """Properly register a hook.""" if event not in self.hooks: - raise ValueError(f'Unsupported event specified, with event name "{event}"') + raise ValueError( + f'Unsupported event specified, with event name "{event}"' + ) if isinstance(hook, Callable): self.hooks[event].append(hook) elif hasattr(hook, "__iter__"): - self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) + self.hooks[event].extend( + h for h in hook if isinstance(h, Callable) + ) def deregister_hook(self, event, hook): """Deregister a previously registered hook. @@ -230,21 +239,25 @@ def deregister_hook(self, event, hook): class Request(RequestHooksMixin): """A user-created :class:`Request ` object. - Used to prepare a :class:`PreparedRequest `, which is sent to the server. + Used to prepare a :class:`PreparedRequest `, which is + sent to the server. :param method: HTTP method to use. :param url: URL to send. :param headers: dictionary of headers to send. - :param files: dictionary of {filename: fileobject} files to multipart upload. + :param files: dictionary of {filename: fileobject} files to multipart + upload. :param data: the body to attach to the request. If a dictionary or list of tuples ``[(key, value)]`` is provided, form-encoding will take place. - :param json: json for the body to attach to the request (if files or data is not specified). + :param json: json for the body to attach to the request (if files or data + is not specified). :param params: URL parameters to append to the URL. If a dictionary or list of tuples ``[(key, value)]`` is provided, form-encoding will take place. :param auth: Auth handler or (user, pass) tuple. - :param cookies: dictionary or CookieJar of cookies to attach to this request. + :param cookies: dictionary or CookieJar of cookies to attach to this + request. :param hooks: dictionary of callback hooks, for internal usage. Usage:: @@ -293,7 +306,8 @@ def __repr__(self): return f"" def prepare(self): - """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" + """Constructs a :class:`PreparedRequest ` for + transmission and returns it.""" p = PreparedRequest() p.prepare( method=self.method, @@ -443,10 +457,11 @@ def prepare_url(self, url, params): if not host: raise InvalidURL(f"Invalid URL {url!r}: No host supplied") - # In general, we want to try IDNA encoding the hostname if the string contains - # non-ASCII characters. This allows users to automatically get the correct IDNA - # behaviour. For strings containing only ASCII characters, we need to also verify - # it doesn't start with a wildcard (*), before allowing the unencoded hostname. + # In general, we want to try IDNA encoding the hostname if the string + # contains non-ASCII characters. This allows users to automatically + # get the correct IDNA behaviour. For strings containing only ASCII + # characters, we need to also verify it doesn't start with a wildcard + # (*), before allowing the unencoded hostname. if not unicode_is_ascii(host): try: host = self._get_idna_encoded_host(host) @@ -477,7 +492,9 @@ def prepare_url(self, url, params): else: query = enc_params - url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) + url = requote_uri( + urlunparse([scheme, netloc, path, None, query, fragment]) + ) self.url = url def prepare_headers(self, headers): @@ -614,9 +631,9 @@ def prepare_cookies(self, cookies): given cookies using cookielib. Due to cookielib's design, the header will not be regenerated if it already exists, meaning this function can only be called once for the life of the - :class:`PreparedRequest ` object. Any subsequent calls - to ``prepare_cookies`` will have no actual effect, unless the "Cookie" - header is removed beforehand. + :class:`PreparedRequest ` object. Any subsequent + calls to ``prepare_cookies`` will have no actual effect, unless the + "Cookie" header is removed beforehand. """ if isinstance(cookies, cookielib.CookieJar): self._cookies = cookies @@ -681,7 +698,8 @@ def __init__(self): #: A list of :class:`Response ` objects from #: the history of the Request. Any redirect responses will end - #: up here. The list is sorted from the oldest to the most recent request. + #: up here. The list is sorted from the oldest to the most recent + # request. self.history = [] #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". @@ -768,10 +786,13 @@ def ok(self): @property def is_redirect(self): - """True if this Response is a well-formed HTTP redirect that could have - been processed automatically (by :meth:`Session.resolve_redirects`). + """True if this Response is a well-formed HTTP redirect that could + have been processed automatically + (by :meth:`Session.resolve_redirects`). """ - return "location" in self.headers and self.status_code in REDIRECT_STATI + return ( + "location" in self.headers and self.status_code in REDIRECT_STATI + ) @property def is_permanent_redirect(self): @@ -783,12 +804,14 @@ def is_permanent_redirect(self): @property def next(self): - """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" + """Returns a PreparedRequest for the next request in a redirect chain, + if there is one.""" return self._next @property def apparent_encoding(self): - """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" + """The apparent encoding, provided by the charset_normalizer or + chardet libraries.""" if chardet is not None: return chardet.detect(self.content)["encoding"] else: @@ -840,7 +863,8 @@ def generate(): raise StreamConsumedError() elif chunk_size is not None and not isinstance(chunk_size, int): raise TypeError( - f"chunk_size must be an int, it is instead a {type(chunk_size)}." + f"chunk_size must be an int, it is instead a " + f"{type(chunk_size)}." ) # simulate reading small chunks of the content reused_chunks = iter_slices(self._content, chunk_size) @@ -894,12 +918,16 @@ def content(self): if self._content is False: # Read the contents. if self._content_consumed: - raise RuntimeError("The content for this response was already consumed") + raise RuntimeError( + "The content for this response was already consumed" + ) if self.status_code == 0 or self.raw is None: self._content = None else: - self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b"" + self._content = ( + b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b"" + ) self._content_consumed = True # don't need to release the connection; that's been handled by urllib3 @@ -913,10 +941,11 @@ def text(self): If Response.encoding is None, encoding will be guessed using ``charset_normalizer`` or ``chardet``. - The encoding of the response content is determined based solely on HTTP - headers, following RFC 2616 to the letter. If you can take advantage of - non-HTTP knowledge to make a better guess at the encoding, you should - set ``r.encoding`` appropriately before accessing this property. + The encoding of the response content is determined based solely on + HTTP headers, following RFC 2616 to the letter. If you can take + advantage of non-HTTP knowledge to make a better guess at the + encoding, you should set ``r.encoding`` appropriately before accessing + this property. """ # Try charset from content-type @@ -934,8 +963,8 @@ def text(self): try: content = str(self.content, encoding, errors="replace") except (LookupError, TypeError): - # A LookupError is raised if the encoding was not found which could - # indicate a misspelling or similar mistake. + # A LookupError is raised if the encoding was not found which + # could indicate a misspelling or similar mistake. # # A TypeError can be raised if encoding is None # @@ -948,19 +977,21 @@ def json(self, **kwargs): r"""Returns the json-encoded content of a response, if any. :param \*\*kwargs: Optional arguments that ``json.loads`` takes. - :raises requests.exceptions.JSONDecodeError: If the response body does not - contain valid json. + :raises requests.exceptions.JSONDecodeError: If the response body does + not contain valid json. """ if not self.encoding and self.content and len(self.content) > 3: # No encoding set. JSON RFC 4627 section 3 states we should expect # UTF-8, -16 or -32. Detect which one to use; If the detection or - # decoding fails, fall back to `self.text` (using charset_normalizer to make - # a best guess). + # decoding fails, fall back to `self.text` (using + # charset_normalizer to make a best guess). encoding = guess_json_utf(self.content) if encoding is not None: try: - return complexjson.loads(self.content.decode(encoding), **kwargs) + return complexjson.loads( + self.content.decode(encoding), **kwargs + ) except UnicodeDecodeError: # Wrong UTF codec detected; usually because it's not UTF-8 # but some other 8-bit codec. This is an RFC violation, @@ -1011,14 +1042,12 @@ def raise_for_status(self): reason = self.reason if 400 <= self.status_code < 500: - http_error_msg = ( - f"{self.status_code} Client Error: {reason} for url: {self.url}" - ) + http_error_msg = f"{self.status_code} Client Error: {reason} for " + f"url: {self.url}" elif 500 <= self.status_code < 600: - http_error_msg = ( - f"{self.status_code} Server Error: {reason} for url: {self.url}" - ) + http_error_msg = f"{self.status_code} Server Error: {reason} for " + f"url: {self.url}" if http_error_msg: raise HTTPError(http_error_msg, response=self) diff --git a/src/requests/sessions.py b/src/requests/sessions.py index b387bc36df..ac4ac7336b 100644 --- a/src/requests/sessions.py +++ b/src/requests/sessions.py @@ -5,6 +5,7 @@ This module provides a Session object to manage and persist settings across requests (cookies, auth, proxies). """ + import os import sys import time @@ -72,7 +73,8 @@ def merge_setting(request_setting, session_setting, dict_class=OrderedDict): # Bypass if not a dictionary (e.g. verify) if not ( - isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping) + isinstance(session_setting, Mapping) + and isinstance(request_setting, Mapping) ): return request_setting @@ -106,12 +108,12 @@ def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): class SessionRedirectMixin: def get_redirect_target(self, resp): """Receives a Response. Returns a redirect URI or ``None``""" - # Due to the nature of how requests processes redirects this method will - # be called at least once upon the original response and at least twice - # on each subsequent redirect response (if any). - # If a custom mixin is used to handle this logic, it may be advantageous - # to cache the redirect location onto the response object as a private - # attribute. + # Due to the nature of how requests processes redirects this method + # will be called at least once upon the original response and at least + # twice on each subsequent redirect response (if any). + # If a custom mixin is used to handle this logic, it may be + # advantageous to cache the redirect location onto the response object + # as a private attribute. if resp.is_redirect: location = resp.headers["location"] # Currently the underlying http module on py3 decode headers @@ -125,7 +127,8 @@ def get_redirect_target(self, resp): return None def should_strip_auth(self, old_url, new_url): - """Decide whether Authorization header should be removed when redirecting""" + """Decide whether Authorization header should be removed when + redirecting""" old_parsed = urlparse(old_url) new_parsed = urlparse(new_url) if old_parsed.hostname != new_parsed.hostname: @@ -168,7 +171,8 @@ def resolve_redirects( yield_requests=False, **adapter_kwargs, ): - """Receives a Response. Returns a generator of Responses or Requests.""" + """Receives a Response. Returns a generator of Responses or + Requests.""" hist = [] # keep track of history @@ -200,7 +204,8 @@ def resolve_redirects( parsed_rurl = urlparse(resp.url) url = ":".join([to_native_string(parsed_rurl.scheme), url]) - # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) + # Normalize url case and attach previous fragment if needed + # (RFC 7231 7.1.2) parsed = urlparse(url) if parsed.fragment == "" and previous_fragment: parsed = parsed._replace(fragment=previous_fragment) @@ -209,8 +214,9 @@ def resolve_redirects( url = parsed.geturl() # Facilitate relative 'location' headers, as allowed by RFC 7231. - # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') - # Compliant with RFC3986, we percent encode the url. + # (e.g. '/path/to/resource' instead of + # 'http://domain.tld/path/to/resource'). Compliant with RFC3986, + # we percent encode the url. if not parsed.netloc: url = urljoin(resp.url, requote_uri(url)) else: @@ -226,7 +232,11 @@ def resolve_redirects( codes.permanent_redirect, ): # https://github.com/psf/requests/issues/3490 - purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding") + purged_headers = ( + "Content-Length", + "Content-Type", + "Transfer-Encoding", + ) for header in purged_headers: prepared_request.headers.pop(header, None) prepared_request.body = None @@ -245,9 +255,10 @@ def resolve_redirects( proxies = self.rebuild_proxies(prepared_request, proxies) self.rebuild_auth(prepared_request, resp) - # A failed tell() sets `_body_position` to `object()`. This non-None - # value ensures `rewindable` will be True, allowing us to raise an - # UnrewindableBodyError, instead of hanging the connection. + # A failed tell() sets `_body_position` to `object()`. This + # non-None value ensures `rewindable` will be True, allowing us to + # raise an UnrewindableBodyError, instead of hanging the + # connection. rewindable = prepared_request._body_position is not None and ( "Content-Length" in headers or "Transfer-Encoding" in headers ) @@ -273,7 +284,9 @@ def resolve_redirects( **adapter_kwargs, ) - extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) + extract_cookies_to_jar( + self.cookies, prepared_request, resp.raw + ) # extract redirect url, if any, for the next loop url = self.get_redirect_target(resp) @@ -281,8 +294,9 @@ def resolve_redirects( def rebuild_auth(self, prepared_request, response): """When being redirected we may want to strip authentication from the - request to avoid leaking credentials. This method intelligently removes - and reapplies authentication where possible to avoid credential loss. + request to avoid leaking credentials. This method intelligently + removes and reapplies authentication where possible to avoid + credential loss. """ headers = prepared_request.headers url = prepared_request.url @@ -313,7 +327,9 @@ def rebuild_proxies(self, prepared_request, proxies): """ headers = prepared_request.headers scheme = urlparse(prepared_request.url).scheme - new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env) + new_proxies = resolve_proxies( + prepared_request, proxies, self.trust_env + ) if "Proxy-Authorization" in headers: del headers["Proxy-Authorization"] @@ -324,15 +340,18 @@ def rebuild_proxies(self, prepared_request, proxies): username, password = None, None # urllib3 handles proxy authorization for us in the standard adapter. - # Avoid appending this to TLS tunneled requests where it may be leaked. + # Avoid appending this to TLS tunneled requests where it may be + # leaked. if not scheme.startswith("https") and username and password: - headers["Proxy-Authorization"] = _basic_auth_str(username, password) + headers["Proxy-Authorization"] = _basic_auth_str( + username, password + ) return new_proxies def rebuild_method(self, prepared_request, response): - """When being redirected we may want to change the method of the request - based on certain specs or browser behavior. + """When being redirected we may want to change the method of the + request based on certain specs or browser behavior. """ method = prepared_request.method @@ -397,9 +416,10 @@ def __init__(self): #: :class:`Request `. self.auth = None - #: Dictionary mapping protocol or protocol and host to the URL of the proxy - #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to - #: be used on each :class:`Request `. + #: Dictionary mapping protocol or protocol and host to the URL of the + #: proxy + #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) + #: to be used on each :class:`Request `. self.proxies = {} #: Event-handling hooks. @@ -414,12 +434,12 @@ def __init__(self): self.stream = False #: SSL Verification default. - #: Defaults to `True`, requiring requests to verify the TLS certificate at the - #: remote end. - #: If verify is set to `False`, requests will accept any TLS certificate - #: presented by the server, and will ignore hostname mismatches and/or - #: expired certificates, which will make your application vulnerable to - #: man-in-the-middle (MitM) attacks. + #: Defaults to `True`, requiring requests to verify the TLS + #: certificate at the remote end. + #: If verify is set to `False`, requests will accept any TLS + #: certificate presented by the server, and will ignore hostname + #: mismatches and/or expired certificates, which will make your + #: application vulnerable to man-in-the-middle (MitM) attacks. #: Only set this to `False` for testing. self.verify = True @@ -437,10 +457,10 @@ def __init__(self): #: authentication and similar. self.trust_env = True - #: A CookieJar containing all currently outstanding cookies set on this - #: session. By default it is a - #: :class:`RequestsCookieJar `, but - #: may be any other ``cookielib.CookieJar`` compatible object. + #: A CookieJar containing all currently outstanding cookies set on + #: this session. By default it is a :class:`RequestsCookieJar + #: `, but may be any other + # ``cookielib.CookieJar`` compatible object. self.cookies = cookiejar_from_dict({}) # Default connection adapters. @@ -523,16 +543,16 @@ def request( :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or + file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. - :param files: (optional) Dictionary of ``'filename': file-like-objects`` - for multipart encoding upload. + :param files: (optional) Dictionary of + ``'filename': file-like-objects`` for multipart encoding upload. :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send @@ -547,16 +567,17 @@ def request( list of events, event must be callable. :param stream: (optional) whether to immediately download the response content. Defaults to ``False``. - :param verify: (optional) Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use. Defaults to ``True``. When set to - ``False``, requests will accept any TLS certificate presented by - the server, and will ignore hostname mismatches and/or expired - certificates, which will make your application vulnerable to - man-in-the-middle (MitM) attacks. Setting verify to ``False`` - may be useful during local development or testing. - :param cert: (optional) if String, path to ssl client cert file (.pem). - If Tuple, ('cert', 'key') pair. + :param verify: (optional) Either a boolean, in which case it controls + whether we verify the server's TLS certificate, or a string, in + which case it must be a path to a CA bundle to use. Defaults to + ``True``. When set to ``False``, requests will accept any TLS + certificate presented by the server, and will ignore hostname + mismatches and/or expired certificates, which will make your + application vulnerable to man-in-the-middle (MitM) attacks. + Setting verify to ``False`` may be useful during local development + or testing. + :param cert: (optional) if String, path to ssl client cert file + (.pem). If Tuple, ('cert', 'key') pair. :rtype: requests.Response """ # Create the Request. @@ -627,9 +648,10 @@ def post(self, url, data=None, json=None, **kwargs): r"""Sends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json to send in the body of the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or + file-like object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the + :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ @@ -681,14 +703,17 @@ def send(self, request, **kwargs): kwargs.setdefault("verify", self.verify) kwargs.setdefault("cert", self.cert) if "proxies" not in kwargs: - kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env) + kwargs["proxies"] = resolve_proxies( + request, self.proxies, self.trust_env + ) # It's possible that users might accidentally send a Request object. # Guard against that specific failure case. if isinstance(request, Request): raise ValueError("You can only send PreparedRequests.") - # Set up variables needed for resolve_redirects and dispatching of hooks + # Set up variables needed for resolve_redirects and dispatching of + # hooks allow_redirects = kwargs.pop("allow_redirects", True) stream = kwargs.get("stream") hooks = request.hooks @@ -733,11 +758,14 @@ def send(self, request, **kwargs): r = history.pop() r.history = history - # If redirects aren't being followed, store the response on the Request for Response.next(). + # If redirects aren't being followed, store the response on the + # Request for Response.next(). if not allow_redirects: try: r._next = next( - self.resolve_redirects(r, request, yield_requests=True, **kwargs) + self.resolve_redirects( + r, request, yield_requests=True, **kwargs + ) ) except StopIteration: pass @@ -776,7 +804,12 @@ def merge_environment_settings(self, url, proxies, stream, verify, cert): verify = merge_setting(verify, self.verify) cert = merge_setting(cert, self.cert) - return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert} + return { + "proxies": proxies, + "stream": stream, + "verify": verify, + "cert": cert, + } def get_adapter(self, url): """ @@ -822,9 +855,10 @@ def session(): .. deprecated:: 1.0.0 - This method has been deprecated since version 1.0.0 and is only kept for - backwards compatibility. New code should use :class:`~requests.sessions.Session` - to create a session. This may be removed at a future date. + This method has been deprecated since version 1.0.0 and is only kept + for backwards compatibility. New code should use + :class:`~requests.sessions.Session` to create a session. This may be + removed at a future date. :rtype: Session """ diff --git a/src/requests/status_codes.py b/src/requests/status_codes.py index c7945a2f06..410b4e84eb 100644 --- a/src/requests/status_codes.py +++ b/src/requests/status_codes.py @@ -59,7 +59,11 @@ 404: ("not_found", "-o-"), 405: ("method_not_allowed", "not_allowed"), 406: ("not_acceptable",), - 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"), + 407: ( + "proxy_authentication_required", + "proxy_auth", + "proxy_authentication", + ), 408: ("request_timeout", "timeout"), 409: ("conflict",), 410: ("gone",), @@ -100,7 +104,11 @@ 507: ("insufficient_storage",), 509: ("bandwidth_limit_exceeded", "bandwidth"), 510: ("not_extended",), - 511: ("network_authentication_required", "network_auth", "network_authentication"), + 511: ( + "network_authentication_required", + "network_auth", + "network_authentication", + ), } codes = LookupDict(name="status_codes") diff --git a/src/requests/structures.py b/src/requests/structures.py index 188e13e482..bc4dbeaa7a 100644 --- a/src/requests/structures.py +++ b/src/requests/structures.py @@ -62,7 +62,9 @@ def __len__(self): def lower_items(self): """Like iteritems(), but with all lowercase keys.""" - return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items()) + return ( + (lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items() + ) def __eq__(self, other): if isinstance(other, Mapping): diff --git a/src/requests/utils.py b/src/requests/utils.py index 699683e5d9..458cf533f3 100644 --- a/src/requests/utils.py +++ b/src/requests/utils.py @@ -24,7 +24,8 @@ from . import certs from .__version__ import __version__ -# to_native_string is unused here, but imported here for backwards compatibility +# to_native_string is unused here, but imported here for backwards +# compatibility from ._internal_utils import ( # noqa: F401 _HEADER_VALIDATORS_BYTE, _HEADER_VALIDATORS_STR, @@ -86,9 +87,13 @@ def proxy_bypass_registry(host): r"Software\Microsoft\Windows\CurrentVersion\Internet Settings", ) # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it - proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0]) + proxyEnable = int( + winreg.QueryValueEx(internetSettings, "ProxyEnable")[0] + ) # ProxyOverride is almost always a string - proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0] + proxyOverride = winreg.QueryValueEx( + internetSettings, "ProxyOverride" + )[0] except (OSError, ValueError): return False if not proxyEnable or not proxyOverride: @@ -98,7 +103,8 @@ def proxy_bypass_registry(host): # '' string by the localhost entry and the corresponding # canonical entry. proxyOverride = proxyOverride.split(";") - # filter out empty strings to avoid re.match return true in the following code. + # filter out empty strings to avoid re.match return true in the + # following code. proxyOverride = filter(None, proxyOverride) # now check if we match one of the registry values. for test in proxyOverride: @@ -152,9 +158,10 @@ def super_len(o): try: fileno = o.fileno() except (io.UnsupportedOperation, AttributeError): - # AttributeError is a surprising exception, seeing as how we've just checked - # that `hasattr(o, 'fileno')`. It happens for objects obtained via - # `Tarfile.extractfile()`, per issue 5229. + # AttributeError is a surprising exception, seeing as how we've + # just checked + # that `hasattr(o, 'fileno')`. It happens for objects obtained + # via `Tarfile.extractfile()`, per issue 5229. pass else: total_length = os.fstat(fileno).st_size @@ -165,11 +172,11 @@ def super_len(o): warnings.warn( ( "Requests has determined the content-length for this " - "request using the binary size of the file: however, the " - "file has been opened in text mode (i.e. without the 'b' " - "flag in the mode). This may lead to an incorrect " - "content-length. In Requests 3.0, support will be removed " - "for files in text mode." + "request using the binary size of the file: however, " + "the file has been opened in text mode (i.e. without " + "the 'b' flag in the mode). This may lead to an " + "incorrect content-length. In Requests 3.0, support " + "will be removed for files in text mode." ), FileModeWarning, ) @@ -180,8 +187,8 @@ def super_len(o): except OSError: # This can happen in some weird situations, such as when the file # is actually a special file descriptor like stdin. In this - # instance, we don't know what the length is, so set it to zero and - # let requests chunk it instead. + # instance, we don't know what the length is, so set it to zero + # and let requests chunk it instead. if total_length is not None: current_position = total_length else: @@ -251,8 +258,9 @@ def get_netrc_auth(url, raise_errors=False): login_i = 0 if _netrc[0] else 1 return (_netrc[login_i], _netrc[2]) except (NetrcParseError, OSError): - # If there was a parsing error or a permissions issue reading the file, - # we'll just skip netrc auth unless explicitly asked to raise errors. + # If there was a parsing error or a permissions issue reading the + # file, we'll just skip netrc auth unless explicitly asked to + # raise errors. if raise_errors: raise @@ -264,27 +272,35 @@ def get_netrc_auth(url, raise_errors=False): def guess_filename(obj): """Tries to guess the filename of the given object.""" name = getattr(obj, "name", None) - if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">": + if ( + name + and isinstance(name, basestring) + and name[0] != "<" + and name[-1] != ">" + ): return os.path.basename(name) def extract_zipped_paths(path): - """Replace nonexistent paths that look like they refer to a member of a zip - archive with the location of an extracted copy of the target, or else + """Replace nonexistent paths that look like they refer to a member of a + zip archive with the location of an extracted copy of the target, or else just return the provided path unchanged. """ if os.path.exists(path): # this is already a valid path, no need to do anything further return path - # find the first valid part of the provided path and treat that as a zip archive - # assume the rest of the path is the name of a member in the archive + # find the first valid part of the provided path and treat that as a zip + # archive assume the rest of the path is the name of a member in the + # archive archive, member = os.path.split(path) while archive and not os.path.exists(archive): archive, prefix = os.path.split(archive) if not prefix: - # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split), - # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users + # If we don't check for an empty prefix after the split (in other + # words, archive remains unchanged after the split), we _can_ end + # up in an infinite loop on a rare corner case affecting a small + # number of users break member = "/".join([prefix, member]) @@ -299,7 +315,8 @@ def extract_zipped_paths(path): tmp = tempfile.gettempdir() extracted_path = os.path.join(tmp, member.split("/")[-1]) if not os.path.exists(extracted_path): - # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition + # use read + write to avoid the creating nested folders, we only want + # the file, avoids mkdir racing condition with atomic_open(extracted_path) as file_handler: file_handler.write(zip_file.read(member)) return extracted_path @@ -496,15 +513,17 @@ def get_encodings_from_content(content): """ warnings.warn( ( - "In requests 3.0, get_encodings_from_content will be removed. For " - "more information, please see the discussion on issue #2266. (This" - " warning should only appear once.)" + "In requests 3.0, get_encodings_from_content will be removed. " + "For more information, please see the discussion on issue #2266. " + "(This warning should only appear once.)" ), DeprecationWarning, ) charset_re = re.compile(r']', flags=re.I) - pragma_re = re.compile(r']', flags=re.I) + pragma_re = re.compile( + r']', flags=re.I + ) xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') return ( @@ -534,7 +553,7 @@ def _parse_content_type_header(header): index_of_equals = param.find("=") if index_of_equals != -1: key = param[:index_of_equals].strip(items_to_strip) - value = param[index_of_equals + 1 :].strip(items_to_strip) + value = param[index_of_equals + 1:].strip(items_to_strip) params_dict[key.lower()] = value return content_type, params_dict @@ -560,7 +579,8 @@ def get_encoding_from_headers(headers): return "ISO-8859-1" if "application/json" in content_type: - # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset + # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt + # since the charset was unset return "utf-8" @@ -587,7 +607,7 @@ def iter_slices(string, slice_length): if slice_length is None or slice_length <= 0: slice_length = len(string) while pos < len(string): - yield string[pos : pos + slice_length] + yield string[pos: pos + slice_length] pos += slice_length @@ -606,8 +626,8 @@ def get_unicode_from_response(r): warnings.warn( ( "In requests 3.0, get_unicode_from_response will be removed. For " - "more information, please see the discussion on issue #2266. (This" - " warning should only appear once.)" + "more information, please see the discussion on issue #2266. " + "(This warning should only appear once.)" ), DeprecationWarning, ) @@ -692,7 +712,9 @@ def address_in_network(ip, net): """ ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0] netaddr, bits = net.split("/") - netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0] + netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[ + 0 + ] network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask return (ipaddr & netmask) == (network & netmask) @@ -791,7 +813,9 @@ def get_proxy(key): if no_proxy: # We need to check whether we match here. We need to see if we match # the end of the hostname, both with and without the port. - no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host) + no_proxy = ( + host for host in no_proxy.replace(" ", "").split(",") if host + ) if is_ipv4_address(parsed.hostname): for proxy_ip in no_proxy: @@ -799,8 +823,8 @@ def get_proxy(key): if address_in_network(parsed.hostname, proxy_ip): return True elif parsed.hostname == proxy_ip: - # If no_proxy ip was defined in plain IP notation instead of cidr notation & - # matches the IP of the index + # If no_proxy ip was defined in plain IP notation instead + # of cidr notation & matches the IP of the index return True else: host_with_port = parsed.hostname @@ -808,9 +832,11 @@ def get_proxy(key): host_with_port += f":{parsed.port}" for host in no_proxy: - if parsed.hostname.endswith(host) or host_with_port.endswith(host): - # The URL does match something in no_proxy, so we don't want - # to apply the proxies on this URL. + if parsed.hostname.endswith(host) or host_with_port.endswith( + host + ): + # The URL does match something in no_proxy, so we don't + # want to apply the proxies on this URL. return True with set_environ("no_proxy", no_proxy_arg): @@ -917,7 +943,8 @@ def default_headers(): def parse_header_links(value): """Return a list of parsed link headers proxies. - i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" + i.e. Link: ; rel=front; type="image/jpeg", + ; rel=back;type="image/jpeg" :rtype: list """ @@ -990,18 +1017,19 @@ def guess_json_utf(data): def prepend_scheme_if_needed(url, new_scheme): - """Given a URL that may or may not have a scheme, prepend the given scheme. - Does not replace a present scheme with the one provided as an argument. + """Given a URL that may or may not have a scheme, prepend the given + scheme. Does not replace a present scheme with the one provided as an + argument. :rtype: str """ parsed = parse_url(url) scheme, auth, host, port, path, query, fragment = parsed - # A defect in urlparse determines that there isn't a netloc present in some - # urls. We previously assumed parsing was overly cautious, and swapped the - # netloc and path. Due to a lack of tests on the original defect, this is - # maintained with parse_url for backwards compatibility. + # A defect in urlparse determines that there isn't a netloc present in + # some urls. We previously assumed parsing was overly cautious, and + # swapped the netloc and path. Due to a lack of tests on the original + # defect, this is maintained with parse_url for backwards compatibility. netloc = parsed.netloc if not netloc: netloc, path = path, netloc @@ -1019,8 +1047,8 @@ def prepend_scheme_if_needed(url, new_scheme): def get_auth_from_url(url): - """Given a url with authentication components, extract them into a tuple of - username,password. + """Given a url with authentication components, extract them into a tuple + of username,password. :rtype: (str,str) """ @@ -1096,4 +1124,6 @@ def rewind_body(prepared_request): "An error occurred when rewinding request body for redirect." ) else: - raise UnrewindableBodyError("Unable to rewind request body for redirect.") + raise UnrewindableBodyError( + "Unable to rewind request body for redirect." + ) diff --git a/tests/test_adapters.py b/tests/test_adapters.py index 6c55d5a130..427672d0f3 100644 --- a/tests/test_adapters.py +++ b/tests/test_adapters.py @@ -4,5 +4,7 @@ def test_request_url_trims_leading_path_separators(): """See also https://github.com/psf/requests/issues/6643.""" a = requests.adapters.HTTPAdapter() - p = requests.Request(method="GET", url="http://127.0.0.1:10000//v:h").prepare() + p = requests.Request( + method="GET", url="http://127.0.0.1:10000//v:h" + ).prepare() assert "/v:h" == a.request_url(p, {}) diff --git a/tests/test_help.py b/tests/test_help.py index 5fca6207ef..0a45f53d08 100644 --- a/tests/test_help.py +++ b/tests/test_help.py @@ -4,7 +4,8 @@ def test_system_ssl(): - """Verify we're actually setting system_ssl when it should be available.""" + """Verify we're actually setting system_ssl when it should be + available.""" assert info()["system_ssl"]["version"] != "" @@ -22,6 +23,7 @@ def test_idna_without_version_attribute(): def test_idna_with_version_attribute(): - """Verify we're actually setting idna version when it should be available.""" + """Verify we're actually setting idna version when it should be + available.""" with mock.patch("requests.help.idna", new=VersionedPackage("2.6")): assert info()["idna"] == {"version": "2.6"} diff --git a/tests/test_hooks.py b/tests/test_hooks.py index 7445525ec8..f18533ca4a 100644 --- a/tests/test_hooks.py +++ b/tests/test_hooks.py @@ -15,7 +15,10 @@ def hook(value): ), ) def test_hooks(hooks_list, result): - assert hooks.dispatch_hook("response", {"response": hooks_list}, "Data") == result + assert ( + hooks.dispatch_hook("response", {"response": hooks_list}, "Data") + == result + ) def test_default_hooks(): diff --git a/tests/test_lowlevel.py b/tests/test_lowlevel.py index 859d07e8a5..c5dc3c20c6 100644 --- a/tests/test_lowlevel.py +++ b/tests/test_lowlevel.py @@ -13,11 +13,10 @@ def echo_response_handler(sock): """Simple handler that will take request and echo it back to requester.""" request_content = consume_socket_content(sock, timeout=0.5) - text_200 = ( - b"HTTP/1.1 200 OK\r\n" - b"Content-Length: %d\r\n\r\n" - b"%s" - ) % (len(request_content), request_content) + text_200 = (b"HTTP/1.1 200 OK\r\n" b"Content-Length: %d\r\n\r\n" b"%s") % ( + len(request_content), + request_content, + ) sock.send(text_200) @@ -42,11 +41,9 @@ def test_chunked_encoding_error(): def incomplete_chunked_response_handler(sock): request_content = consume_socket_content(sock, timeout=0.5) - # The server never ends the request and doesn't provide any valid chunks - sock.send( - b"HTTP/1.1 200 OK\r\n" - b"Transfer-Encoding: chunked\r\n" - ) + # The server never ends the request and doesn't provide any valid + # chunks + sock.send(b"HTTP/1.1 200 OK\r\n" b"Transfer-Encoding: chunked\r\n") return request_content @@ -70,7 +67,9 @@ def test_chunked_upload_uses_only_specified_host_header(): with server as (host, port): url = f"http://{host}:{port}/" - r = requests.post(url, data=data, headers={"Host": custom_host}, stream=True) + r = requests.post( + url, data=data, headers={"Host": custom_host}, stream=True + ) close_server.set() # release server block expected_header = b"Host: %s\r\n" % custom_host.encode("utf-8") @@ -130,24 +129,29 @@ def test_digestauth_401_count_reset_on_redirect(): See https://github.com/psf/requests/issues/1979. """ - text_401 = (b'HTTP/1.1 401 UNAUTHORIZED\r\n' - b'Content-Length: 0\r\n' - b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' - b', opaque="372825293d1c26955496c80ed6426e9e", ' - b'realm="me@kennethreitz.com", qop=auth\r\n\r\n') - - text_302 = (b'HTTP/1.1 302 FOUND\r\n' - b'Content-Length: 0\r\n' - b'Location: /\r\n\r\n') - - text_200 = (b'HTTP/1.1 200 OK\r\n' - b'Content-Length: 0\r\n\r\n') - - expected_digest = (b'Authorization: Digest username="user", ' - b'realm="me@kennethreitz.com", ' - b'nonce="6bf5d6e4da1ce66918800195d6b9130d", uri="/"') - - auth = requests.auth.HTTPDigestAuth('user', 'pass') + text_401 = ( + b"HTTP/1.1 401 UNAUTHORIZED\r\n" + b"Content-Length: 0\r\n" + b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' + b', opaque="372825293d1c26955496c80ed6426e9e", ' + b'realm="me@kennethreitz.com", qop=auth\r\n\r\n' + ) + + text_302 = ( + b"HTTP/1.1 302 FOUND\r\n" + b"Content-Length: 0\r\n" + b"Location: /\r\n\r\n" + ) + + text_200 = b"HTTP/1.1 200 OK\r\n" b"Content-Length: 0\r\n\r\n" + + expected_digest = ( + b'Authorization: Digest username="user", ' + b'realm="me@kennethreitz.com", ' + b'nonce="6bf5d6e4da1ce66918800195d6b9130d", uri="/"' + ) + + auth = requests.auth.HTTPDigestAuth("user", "pass") def digest_response_handler(sock): # Respond to initial GET with a challenge. @@ -163,7 +167,7 @@ def digest_response_handler(sock): # Verify Authorization isn't sent to the redirected host, # then send another challenge. request_content = consume_socket_content(sock, timeout=0.5) - assert b'Authorization:' not in request_content + assert b"Authorization:" not in request_content sock.send(text_401) # Verify Authorization is sent correctly again, and return 200 OK. @@ -177,13 +181,13 @@ def digest_response_handler(sock): server = Server(digest_response_handler, wait_to_close_event=close_server) with server as (host, port): - url = f'http://{host}:{port}/' + url = f"http://{host}:{port}/" r = requests.get(url, auth=auth) # Verify server succeeded in authenticating. assert r.status_code == 200 # Verify Authorization was sent in final request. - assert 'Authorization' in r.request.headers - assert r.request.headers['Authorization'].startswith('Digest ') + assert "Authorization" in r.request.headers + assert r.request.headers["Authorization"].startswith("Digest ") # Verify redirect happened as we expected. assert r.history[0].status_code == 302 close_server.set() @@ -193,17 +197,21 @@ def test_digestauth_401_only_sent_once(): """Ensure we correctly respond to a 401 challenge once, and then stop responding if challenged again. """ - text_401 = (b'HTTP/1.1 401 UNAUTHORIZED\r\n' - b'Content-Length: 0\r\n' - b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' - b', opaque="372825293d1c26955496c80ed6426e9e", ' - b'realm="me@kennethreitz.com", qop=auth\r\n\r\n') - - expected_digest = (b'Authorization: Digest username="user", ' - b'realm="me@kennethreitz.com", ' - b'nonce="6bf5d6e4da1ce66918800195d6b9130d", uri="/"') - - auth = requests.auth.HTTPDigestAuth('user', 'pass') + text_401 = ( + b"HTTP/1.1 401 UNAUTHORIZED\r\n" + b"Content-Length: 0\r\n" + b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' + b', opaque="372825293d1c26955496c80ed6426e9e", ' + b'realm="me@kennethreitz.com", qop=auth\r\n\r\n' + ) + + expected_digest = ( + b'Authorization: Digest username="user", ' + b'realm="me@kennethreitz.com", ' + b'nonce="6bf5d6e4da1ce66918800195d6b9130d", uri="/"' + ) + + auth = requests.auth.HTTPDigestAuth("user", "pass") def digest_failed_response_handler(sock): # Respond to initial GET with a challenge. @@ -219,15 +227,17 @@ def digest_failed_response_handler(sock): # Verify the client didn't respond to second challenge. request_content = consume_socket_content(sock, timeout=0.5) - assert request_content == b'' + assert request_content == b"" return request_content close_server = threading.Event() - server = Server(digest_failed_response_handler, wait_to_close_event=close_server) + server = Server( + digest_failed_response_handler, wait_to_close_event=close_server + ) with server as (host, port): - url = f'http://{host}:{port}/' + url = f"http://{host}:{port}/" r = requests.get(url, auth=auth) # Verify server didn't authenticate us. assert r.status_code == 401 @@ -240,13 +250,15 @@ def test_digestauth_only_on_4xx(): See https://github.com/psf/requests/issues/3772. """ - text_200_chal = (b'HTTP/1.1 200 OK\r\n' - b'Content-Length: 0\r\n' - b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' - b', opaque="372825293d1c26955496c80ed6426e9e", ' - b'realm="me@kennethreitz.com", qop=auth\r\n\r\n') + text_200_chal = ( + b"HTTP/1.1 200 OK\r\n" + b"Content-Length: 0\r\n" + b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' + b', opaque="372825293d1c26955496c80ed6426e9e", ' + b'realm="me@kennethreitz.com", qop=auth\r\n\r\n' + ) - auth = requests.auth.HTTPDigestAuth('user', 'pass') + auth = requests.auth.HTTPDigestAuth("user", "pass") def digest_response_handler(sock): # Respond to GET with a 200 containing www-authenticate header. @@ -256,7 +268,7 @@ def digest_response_handler(sock): # Verify the client didn't respond with auth. request_content = consume_socket_content(sock, timeout=0.5) - assert request_content == b'' + assert request_content == b"" return request_content @@ -264,7 +276,7 @@ def digest_response_handler(sock): server = Server(digest_response_handler, wait_to_close_event=close_server) with server as (host, port): - url = f'http://{host}:{port}/' + url = f"http://{host}:{port}/" r = requests.get(url, auth=auth) # Verify server didn't receive auth from us. assert r.status_code == 200 @@ -273,9 +285,9 @@ def digest_response_handler(sock): _schemes_by_var_prefix = [ - ('http', ['http']), - ('https', ['https']), - ('all', ['http', 'https']), + ("http", ["http"]), + ("https", ["https"]), + ("all", ["http", "https"]), ] _proxy_combos = [] @@ -289,7 +301,9 @@ def digest_response_handler(sock): @pytest.mark.parametrize("var,scheme", _proxy_combos) def test_use_proxy_from_environment(httpbin, var, scheme): url = f"{scheme}://httpbin.org" - fake_proxy = Server() # do nothing with the requests; just close the socket + fake_proxy = ( + Server() + ) # do nothing with the requests; just close the socket with fake_proxy as (host, port): proxy_url = f"socks5://{host}:{port}" kwargs = {var: proxy_url} @@ -306,35 +320,38 @@ def test_use_proxy_from_environment(httpbin, var, scheme): def test_redirect_rfc1808_to_non_ascii_location(): - path = 'š' - expected_path = b'%C5%A1' + path = "š" + expected_path = b"%C5%A1" redirect_request = [] # stores the second request to the server def redirect_resp_handler(sock): consume_socket_content(sock, timeout=0.5) - location = f'//{host}:{port}/{path}' + location = f"//{host}:{port}/{path}" sock.send( ( - b'HTTP/1.1 301 Moved Permanently\r\n' - b'Content-Length: 0\r\n' - b'Location: %s\r\n' - b'\r\n' - ) % location.encode('utf8') + b"HTTP/1.1 301 Moved Permanently\r\n" + b"Content-Length: 0\r\n" + b"Location: %s\r\n" + b"\r\n" + ) + % location.encode("utf8") ) redirect_request.append(consume_socket_content(sock, timeout=0.5)) - sock.send(b'HTTP/1.1 200 OK\r\n\r\n') + sock.send(b"HTTP/1.1 200 OK\r\n\r\n") close_server = threading.Event() server = Server(redirect_resp_handler, wait_to_close_event=close_server) with server as (host, port): - url = f'http://{host}:{port}' + url = f"http://{host}:{port}" r = requests.get(url=url, allow_redirects=True) assert r.status_code == 200 assert len(r.history) == 1 assert r.history[0].status_code == 301 - assert redirect_request[0].startswith(b'GET /' + expected_path + b' HTTP/1.1') - assert r.url == '{}/{}'.format(url, expected_path.decode('ascii')) + assert redirect_request[0].startswith( + b"GET /" + expected_path + b" HTTP/1.1" + ) + assert r.url == "{}/{}".format(url, expected_path.decode("ascii")) close_server.set() @@ -345,16 +362,16 @@ def test_fragment_not_sent_with_request(): server = Server(echo_response_handler, wait_to_close_event=close_server) with server as (host, port): - url = f'http://{host}:{port}/path/to/thing/#view=edit&token=hunter2' + url = f"http://{host}:{port}/path/to/thing/#view=edit&token=hunter2" r = requests.get(url) raw_request = r.content assert r.status_code == 200 - headers, body = raw_request.split(b'\r\n\r\n', 1) - status_line, headers = headers.split(b'\r\n', 1) + headers, body = raw_request.split(b"\r\n\r\n", 1) + status_line, headers = headers.split(b"\r\n", 1) - assert status_line == b'GET /path/to/thing/ HTTP/1.1' - for frag in (b'view', b'edit', b'token', b'hunter2'): + assert status_line == b"GET /path/to/thing/ HTTP/1.1" + for frag in (b"view", b"edit", b"token", b"hunter2"): assert frag not in headers assert frag not in body @@ -370,36 +387,38 @@ def test_fragment_update_on_redirect(): def response_handler(sock): consume_socket_content(sock, timeout=0.5) sock.send( - b'HTTP/1.1 302 FOUND\r\n' - b'Content-Length: 0\r\n' - b'Location: /get#relevant-section\r\n\r\n' + b"HTTP/1.1 302 FOUND\r\n" + b"Content-Length: 0\r\n" + b"Location: /get#relevant-section\r\n\r\n" ) consume_socket_content(sock, timeout=0.5) sock.send( - b'HTTP/1.1 302 FOUND\r\n' - b'Content-Length: 0\r\n' - b'Location: /final-url/\r\n\r\n' + b"HTTP/1.1 302 FOUND\r\n" + b"Content-Length: 0\r\n" + b"Location: /final-url/\r\n\r\n" ) consume_socket_content(sock, timeout=0.5) - sock.send( - b'HTTP/1.1 200 OK\r\n\r\n' - ) + sock.send(b"HTTP/1.1 200 OK\r\n\r\n") close_server = threading.Event() server = Server(response_handler, wait_to_close_event=close_server) with server as (host, port): - url = f'http://{host}:{port}/path/to/thing/#view=edit&token=hunter2' + url = f"http://{host}:{port}/path/to/thing/#view=edit&token=hunter2" r = requests.get(url) assert r.status_code == 200 assert len(r.history) == 2 assert r.history[0].request.url == url - # Verify we haven't overwritten the location with our previous fragment. - assert r.history[1].request.url == f'http://{host}:{port}/get#relevant-section' + # Verify we haven't overwritten the location with our previous + # fragment. + assert ( + r.history[1].request.url + == f"http://{host}:{port}/get#relevant-section" + ) # Verify previous fragment is used and not the original. - assert r.url == f'http://{host}:{port}/final-url/#relevant-section' + assert r.url == f"http://{host}:{port}/final-url/#relevant-section" close_server.set() @@ -409,8 +428,8 @@ def test_json_decode_compatibility_for_alt_utf_encodings(): def response_handler(sock): consume_socket_content(sock, timeout=0.5) sock.send( - b'HTTP/1.1 200 OK\r\n' - b'Content-Length: 18\r\n\r\n' + b"HTTP/1.1 200 OK\r\n" + b"Content-Length: 18\r\n\r\n" b'\xff\xfe{\x00"\x00K0"\x00=\x00"\x00\xab0"\x00\r\n' ) @@ -418,7 +437,7 @@ def response_handler(sock): server = Server(response_handler, wait_to_close_event=close_server) with server as (host, port): - url = f'http://{host}:{port}/' + url = f"http://{host}:{port}/" r = requests.get(url) r.encoding = None with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo: diff --git a/tests/test_requests.py b/tests/test_requests.py index d8fbb23688..2f998d4c26 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -45,7 +45,11 @@ RetryError, ) from requests.exceptions import SSLError as RequestsSSLError -from requests.exceptions import Timeout, TooManyRedirects, UnrewindableBodyError +from requests.exceptions import ( + Timeout, + TooManyRedirects, + UnrewindableBodyError, +) from requests.hooks import default_hooks from requests.models import PreparedRequest, urlencode from requests.sessions import SessionRedirectMixin @@ -91,7 +95,9 @@ def test_entry_points(self): requests.patch requests.post # Not really an entry point, but people rely on it. - from requests.packages.urllib3.poolmanager import PoolManager # noqa:F401 + from requests.packages.urllib3.poolmanager import ( + PoolManager, + ) # noqa:F401 (imported but unused) @pytest.mark.parametrize( "exception, url", @@ -130,17 +136,23 @@ def test_no_body_content_length(self, httpbin, method): @pytest.mark.parametrize("method", ("POST", "PUT", "PATCH", "OPTIONS")) def test_empty_content_length(self, httpbin, method): - req = requests.Request(method, httpbin(method.lower()), data="").prepare() + req = requests.Request( + method, httpbin(method.lower()), data="" + ).prepare() assert req.headers["Content-Length"] == "0" def test_override_content_length(self, httpbin): headers = {"Content-Length": "not zero"} - r = requests.Request("POST", httpbin("post"), headers=headers).prepare() + r = requests.Request( + "POST", httpbin("post"), headers=headers + ).prepare() assert "Content-Length" in r.headers assert r.headers["Content-Length"] == "not zero" def test_path_is_not_double_encoded(self): - request = requests.Request("GET", "http://0.0.0.0/get/test case").prepare() + request = requests.Request( + "GET", "http://0.0.0.0/get/test case" + ).prepare() assert request.path_url == "/get/test%20case" @@ -189,7 +201,9 @@ def test_whitespaces_are_removed_from_url(self): request = requests.Request("GET", " http://example.com").prepare() assert request.url == "http://example.com/" - @pytest.mark.parametrize("scheme", ("http://", "HTTP://", "hTTp://", "HttP://")) + @pytest.mark.parametrize( + "scheme", ("http://", "HTTP://", "hTTp://", "HttP://") + ) def test_mixed_case_scheme_acceptable(self, httpbin, scheme): s = requests.Session() s.proxies = getproxies() @@ -246,7 +260,9 @@ def test_HTTP_302_TOO_MANY_REDIRECTS(self, httpbin): assert e.response.url == url assert len(e.response.history) == 30 else: - pytest.fail("Expected redirect to raise TooManyRedirects but it did not") + pytest.fail( + "Expected redirect to raise TooManyRedirects but it did not" + ) def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin): s = requests.session() @@ -260,7 +276,8 @@ def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin): assert len(e.response.history) == 5 else: pytest.fail( - "Expected custom max number of redirects to be respected but was not" + "Expected custom max number of redirects to be respected " + "but was not" ) def test_http_301_changes_post_to_get(self, httpbin): @@ -326,11 +343,14 @@ def test_header_and_body_removal_on_redirect(self, httpbin): def test_transfer_enc_removal_on_redirect(self, httpbin): purged_headers = ("Transfer-Encoding", "Content-Type") ses = requests.Session() - req = requests.Request("POST", httpbin("post"), data=(b"x" for x in range(1))) + req = requests.Request( + "POST", httpbin("post"), data=(b"x" for x in range(1)) + ) prep = ses.prepare_request(req) assert "Transfer-Encoding" in prep.headers - # Create Response to avoid https://github.com/kevin1024/pytest-httpbin/issues/33 + # Create Response to avoid + # https://github.com/kevin1024/pytest-httpbin/issues/33 resp = requests.Response() resp.raw = io.BytesIO(b"the content") resp.request = prep @@ -351,7 +371,10 @@ def test_fragment_maintained_on_redirect(self, httpbin): r = requests.get(httpbin("redirect-to?url=get") + fragment) assert len(r.history) > 0 - assert r.history[0].request.url == httpbin("redirect-to?url=get") + fragment + assert ( + r.history[0].request.url + == httpbin("redirect-to?url=get") + fragment + ) assert r.url == httpbin("get") + fragment def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin): @@ -388,7 +411,10 @@ def test_cookie_removed_on_expire(self, httpbin): assert s.cookies["foo"] == "bar" s.get( httpbin("response-headers"), - params={"Set-Cookie": "foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT"}, + params={ + "Set-Cookie": "foo=deleted; expires=Thu, 01-Jan-1970 " + "00:00:01 GMT" + }, ) assert "foo" not in s.cookies @@ -460,7 +486,9 @@ def test_cookielib_cookiejar_on_redirect(self, httpbin): # Verify CookieJar isn't being converted to RequestsCookieJar assert isinstance(prep_req._cookies, cookielib.CookieJar) assert isinstance(resp.request._cookies, cookielib.CookieJar) - assert not isinstance(resp.request._cookies, requests.cookies.RequestsCookieJar) + assert not isinstance( + resp.request._cookies, requests.cookies.RequestsCookieJar + ) cookies = {} for c in resp.request._cookies: @@ -556,7 +584,9 @@ def test_set_basicauth(self, httpbin, username, password): r = requests.Request("GET", url, auth=auth) p = r.prepare() - assert p.headers["Authorization"] == _basic_auth_str(username, password) + assert p.headers["Authorization"] == _basic_auth_str( + username, password + ) def test_basicauth_encodes_byte_strings(self): """Ensure b'test' formats as the byte string "test" rather @@ -575,7 +605,8 @@ def test_basicauth_encodes_byte_strings(self): ("http://doesnotexist.google.com", ConnectionError), # Connecting to an invalid port should raise a ConnectionError ("http://localhost:1", ConnectionError), - # Inputing a URL that cannot be parsed should raise an InvalidURL error + # Inputing a URL that cannot be parsed should raise an InvalidURL + # error ("http://fe80::5054:ff:fe5a:fc0", InvalidURL), ), ) @@ -584,15 +615,19 @@ def test_errors(self, url, exception): requests.get(url, timeout=1) def test_proxy_error(self): - # any proxy related error (address resolution, no route to host, etc) should result in a ProxyError + # any proxy related error (address resolution, no route to host, etc) + # should result in a ProxyError with pytest.raises(ProxyError): requests.get( - "http://localhost:1", proxies={"http": "non-resolvable-address"} + "http://localhost:1", + proxies={"http": "non-resolvable-address"}, ) def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure): with pytest.raises(InvalidProxyURL): - requests.get(httpbin_secure(), proxies={"https": "http:/badproxyurl:3128"}) + requests.get( + httpbin_secure(), proxies={"https": "http:/badproxyurl:3128"} + ) with pytest.raises(InvalidProxyURL): requests.get(httpbin(), proxies={"http": "http://:8080"}) @@ -601,7 +636,9 @@ def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure): requests.get(httpbin_secure(), proxies={"https": "https://"}) with pytest.raises(InvalidProxyURL): - requests.get(httpbin(), proxies={"http": "http:///example.com:8080"}) + requests.get( + httpbin(), proxies={"http": "http:///example.com:8080"} + ) def test_respect_proxy_env_on_send_self_prepared_request(self, httpbin): with override_environ(http_proxy=INVALID_PROXY): @@ -707,7 +744,9 @@ def get_netrc_auth_mock(url): def test_DIGEST_HTTP_200_OK_GET(self, httpbin): for authtype in self.digest_auth_algo: auth = HTTPDigestAuth("user", "pass") - url = httpbin("digest-auth", "auth", "user", "pass", authtype, "never") + url = httpbin( + "digest-auth", "auth", "user", "pass", authtype, "never" + ) r = requests.get(url, auth=auth) assert r.status_code == 200 @@ -793,7 +832,9 @@ def test_POSTBIN_GET_POST_FILES(self, httpbin): def test_invalid_files_input(self, httpbin): url = httpbin("post") - post = requests.post(url, files={"random-file-1": None, "random-file-2": 1}) + post = requests.post( + url, files={"random-file-1": None, "random-file-2": 1} + ) assert b'name="random-file-1"' not in post.request.body assert b'name="random-file-2"' in post.request.body @@ -809,10 +850,10 @@ def __len__(self): def read(self, size=None): if size: - ret = self.data[self.index : self.index + size] + ret = self.data[self.index: self.index + size] self.index += size else: - ret = self.data[self.index :] + ret = self.data[self.index:] self.index = self.length return ret @@ -846,7 +887,9 @@ def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin): assert post1.status_code == 200 with open("requirements-dev.txt") as f: - post2 = requests.post(url, data={"some": "data"}, files={"some": f}) + post2 = requests.post( + url, data={"some": "data"}, files={"some": f} + ) assert post2.status_code == 200 post4 = requests.post(url, data='[{"some": "json"}]') @@ -884,7 +927,9 @@ def test_conflicting_post_params(self, httpbin): url = httpbin("post") with open("requirements-dev.txt") as f: with pytest.raises(ValueError): - requests.post(url, data='[{"some": "data"}]', files={"some": f}) + requests.post( + url, data='[{"some": "data"}]', files={"some": f} + ) def test_request_ok_set(self, httpbin): r = requests.get(httpbin("status", "404")) @@ -931,7 +976,8 @@ def test_invalid_ca_certificate_path(self, httpbin_secure): requests.get(httpbin_secure(), verify=INVALID_PATH) assert str( e.value - ) == "Could not find a suitable TLS CA certificate bundle, invalid path: {}".format( + ) == "Could not find a suitable TLS CA certificate bundle, invalid " + "path: {}".format( INVALID_PATH ) @@ -941,7 +987,8 @@ def test_invalid_ssl_certificate_files(self, httpbin_secure): requests.get(httpbin_secure(), cert=INVALID_PATH) assert str( e.value - ) == "Could not find the TLS certificate file, invalid path: {}".format( + ) == "Could not find the TLS certificate file, invalid path: " + "{}".format( INVALID_PATH ) @@ -980,7 +1027,11 @@ def test_env_cert_bundles(self, httpbin, env, expected): s = requests.Session() with mock.patch("os.environ", env): settings = s.merge_environment_settings( - url=httpbin("get"), proxies={}, stream=False, verify=True, cert=None + url=httpbin("get"), + proxies={}, + stream=False, + verify=True, + cert=None, ) assert settings["verify"] == expected @@ -1014,7 +1065,9 @@ def test_https_warnings(self, nosan_server): if item.category.__name__ != "ResourceWarning" ] - warnings_category = tuple(item.category.__name__ for item in warning_records) + warnings_category = tuple( + item.category.__name__ for item in warning_records + ) assert warnings_category == warnings_expected def test_certificate_failure(self, httpbin_secure): @@ -1186,7 +1239,9 @@ def __call__(self, r): prep = s.prepare_request(req) resp = s.send(prep) - assert resp.json()["headers"]["Dummy-Auth-Test"] == "dummy-auth-test-ok" + assert ( + resp.json()["headers"]["Dummy-Auth-Test"] == "dummy-auth-test-ok" + ) def test_prepare_request_with_bytestring_url(self): req = requests.Request("GET", b"https://httpbin.org/") @@ -1464,11 +1519,17 @@ def test_response_chunk_size_type(self): ( (urllib3.exceptions.ProtocolError, tuple(), ChunkedEncodingError), (urllib3.exceptions.DecodeError, tuple(), ContentDecodingError), - (urllib3.exceptions.ReadTimeoutError, (None, "", ""), ConnectionError), + ( + urllib3.exceptions.ReadTimeoutError, + (None, "", ""), + ConnectionError, + ), (urllib3.exceptions.SSLError, tuple(), RequestsSSLError), ), ) - def test_iter_content_wraps_exceptions(self, httpbin, exception, args, expected): + def test_iter_content_wraps_exceptions( + self, httpbin, exception, args, expected + ): r = requests.Response() r.raw = mock.Mock() # ReadTimeoutError can't be initialized by mock @@ -1622,7 +1683,9 @@ def test_session_get_adapter_prefix_matching(self): more_specific_prefix = prefix + "/some/path" url_matching_only_prefix = prefix + "/another/path" - url_matching_more_specific_prefix = more_specific_prefix + "/longer/path" + url_matching_more_specific_prefix = ( + more_specific_prefix + "/longer/path" + ) url_not_matching_prefix = "https://another.example.com/" s = requests.Session() @@ -1661,7 +1724,10 @@ def test_session_get_adapter_prefix_matching_is_case_insensitive(self): my_adapter = HTTPAdapter() s.mount(mixed_case_prefix, my_adapter) - assert s.get_adapter(url_matching_prefix_with_different_case) is my_adapter + assert ( + s.get_adapter(url_matching_prefix_with_different_case) + is my_adapter + ) def test_header_remove_is_case_insensitive(self, httpbin): # From issue #1321 @@ -1696,7 +1762,8 @@ def test_header_keys_are_native(self, httpbin): assert "byte" in p.headers.keys() def test_header_validation(self, httpbin): - """Ensure prepare_headers regex isn't flagging valid header contents.""" + """Ensure prepare_headers regex isn't flagging valid header + contents.""" valid_headers = { "foo": "bar baz qux", "bar": b"fbbq", @@ -1774,7 +1841,9 @@ class MyString(str): class MyBytes(bytes): pass - r_str = requests.get(httpbin("get"), headers={MyString("x-custom"): "myheader"}) + r_str = requests.get( + httpbin("get"), headers={MyString("x-custom"): "myheader"} + ) assert r_str.request.headers["x-custom"] == "myheader" r_bytes = requests.get( @@ -1783,7 +1852,8 @@ class MyBytes(bytes): assert r_bytes.request.headers["x-custom"] == b"myheader" r_mixed = requests.get( - httpbin("get"), headers={MyString("x-custom"): MyBytes(b"myheader")} + httpbin("get"), + headers={MyString("x-custom"): MyBytes(b"myheader")}, ) assert r_mixed.request.headers["x-custom"] == b"myheader" @@ -1813,7 +1883,8 @@ def test_autoset_header_values_are_native(self, httpbin): def test_nonhttp_schemes_dont_check_URLs(self): test_urls = ( - "data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==", + "data:image/gif;base64," + "R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==", "file:///etc/passwd", "magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431", ) @@ -1849,7 +1920,9 @@ def test_should_strip_auth_host_change(self): def test_should_strip_auth_http_downgrade(self): s = requests.Session() - assert s.should_strip_auth("https://example.com/foo", "http://example.com/bar") + assert s.should_strip_auth( + "https://example.com/foo", "http://example.com/bar" + ) def test_should_strip_auth_https_upgrade(self): s = requests.Session() @@ -1910,12 +1983,16 @@ def test_manual_redirect_with_partial_body_read(self, httpbin): def test_prepare_body_position_non_stream(self): data = b"the data" - prep = requests.Request("GET", "http://example.com", data=data).prepare() + prep = requests.Request( + "GET", "http://example.com", data=data + ).prepare() assert prep._body_position is None def test_rewind_body(self): data = io.BytesIO(b"the data") - prep = requests.Request("GET", "http://example.com", data=data).prepare() + prep = requests.Request( + "GET", "http://example.com", data=data + ).prepare() assert prep._body_position == 0 assert prep.body.read() == b"the data" @@ -1929,7 +2006,9 @@ def test_rewind_body(self): def test_rewind_partially_read_body(self): data = io.BytesIO(b"the data") data.read(4) # read some data - prep = requests.Request("GET", "http://example.com", data=data).prepare() + prep = requests.Request( + "GET", "http://example.com", data=data + ).prepare() assert prep._body_position == 4 assert prep.body.read() == b"data" @@ -1952,7 +2031,9 @@ def __iter__(self): return data = BadFileObj("the data") - prep = requests.Request("GET", "http://example.com", data=data).prepare() + prep = requests.Request( + "GET", "http://example.com", data=data + ).prepare() assert prep._body_position == 0 with pytest.raises(UnrewindableBodyError) as e: @@ -1975,7 +2056,9 @@ def __iter__(self): return data = BadFileObj("the data") - prep = requests.Request("GET", "http://example.com", data=data).prepare() + prep = requests.Request( + "GET", "http://example.com", data=data + ).prepare() assert prep._body_position == 0 with pytest.raises(UnrewindableBodyError) as e: @@ -1995,7 +2078,9 @@ def __iter__(self): return data = BadFileObj("the data") - prep = requests.Request("GET", "http://example.com", data=data).prepare() + prep = requests.Request( + "GET", "http://example.com", data=data + ).prepare() assert prep._body_position is not None with pytest.raises(UnrewindableBodyError) as e: @@ -2034,7 +2119,9 @@ def test_redirect_with_wrong_gzipped_header(self, httpbin): ), ), ) - def test_basic_auth_str_is_always_native(self, username, password, auth_str): + def test_basic_auth_str_is_always_native( + self, username, password, auth_str + ): s = _basic_auth_str(username, password) assert isinstance(s, builtin_str) assert s == auth_str @@ -2080,7 +2167,9 @@ def test_response_context_manager(self, httpbin): def test_unconsumed_session_response_closes_connection(self, httpbin): s = requests.session() - with contextlib.closing(s.get(httpbin("stream/4"), stream=True)) as response: + with contextlib.closing( + s.get(httpbin("stream/4"), stream=True) + ) as response: pass assert response._content_consumed is False @@ -2101,7 +2190,9 @@ def test_session_close_proxy_clear(self): "two": mock.Mock(), } session = requests.Session() - with mock.patch.dict(session.adapters["http://"].proxy_manager, proxies): + with mock.patch.dict( + session.adapters["http://"].proxy_manager, proxies + ): session.close() proxies["one"].clear.assert_called_once_with() proxies["two"].clear.assert_called_once_with() @@ -2137,9 +2228,11 @@ def test_response_without_release_conn(self): resp.close() assert resp.raw.closed - def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin): - """Ensure that a byte stream with size 0 will not set both a Content-Length - and Transfer-Encoding header. + def test_empty_stream_with_auth_does_not_set_content_length_header( + self, httpbin + ): + """Ensure that a byte stream with size 0 will not set both a + Content-Length and Transfer-Encoding header. """ auth = ("user", "pass") url = httpbin("post") @@ -2149,9 +2242,11 @@ def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin assert "Transfer-Encoding" in prepared_request.headers assert "Content-Length" not in prepared_request.headers - def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin): - """Ensure that a byte stream with size > 0 will not set both a Content-Length - and Transfer-Encoding header. + def test_stream_with_auth_does_not_set_transfer_encoding_header( + self, httpbin + ): + """Ensure that a byte stream with size > 0 will not set both a + Content-Length and Transfer-Encoding header. """ auth = ("user", "pass") url = httpbin("post") @@ -2186,7 +2281,9 @@ def test_custom_redirect_mixin(self, httpbin): """ url_final = httpbin("html") querystring_malformed = urlencode({"location": url_final}) - url_redirect_malformed = httpbin("response-headers?%s" % querystring_malformed) + url_redirect_malformed = httpbin( + "response-headers?%s" % querystring_malformed + ) querystring_redirect = urlencode({"url": url_redirect_malformed}) url_redirect = httpbin("redirect-to?%s" % querystring_redirect) urls_test = [ @@ -2403,7 +2500,6 @@ def test_expires_none(self): class TestMorselToCookieMaxAge: - """Tests for morsel_to_cookie when morsel contains max-age.""" def test_max_age_valid_int(self): @@ -2442,7 +2538,9 @@ def test_invalid_timeout(self, httpbin, timeout, error_text): requests.get(httpbin("get"), timeout=timeout) assert error_text in str(e) - @pytest.mark.parametrize("timeout", (None, Urllib3Timeout(connect=None, read=None))) + @pytest.mark.parametrize( + "timeout", (None, Urllib3Timeout(connect=None, read=None)) + ) def test_none_timeout(self, httpbin, timeout): """Check that you can set None as a valid timeout value. @@ -2592,7 +2690,10 @@ def test_data_argument_accepts_tuples(data): """ p = PreparedRequest() p.prepare( - method="GET", url="http://www.example.com", data=data, hooks=default_hooks() + method="GET", + url="http://www.example.com", + data=data, + hooks=default_hooks(), ) assert p.body == urlencode(data) @@ -2630,7 +2731,10 @@ def test_urllib3_retries(httpbin): from urllib3.util import Retry s = requests.Session() - s.mount("http://", HTTPAdapter(max_retries=Retry(total=2, status_forcelist=[500]))) + s.mount( + "http://", + HTTPAdapter(max_retries=Retry(total=2, status_forcelist=[500])), + ) with pytest.raises(RetryError): s.get(httpbin("status/500")) @@ -2653,7 +2757,10 @@ class TestPreparingURLs: ("http://google.com", "http://google.com/"), ("http://ジェーピーニック.jp", "http://xn--hckqz9bzb1cyrb.jp/"), ("http://xn--n3h.net/", "http://xn--n3h.net/"), - ("http://ジェーピーニック.jp".encode(), "http://xn--hckqz9bzb1cyrb.jp/"), + ( + "http://ジェーピーニック.jp".encode(), + "http://xn--hckqz9bzb1cyrb.jp/", + ), ("http://straße.de/straße", "http://xn--strae-oqa.de/stra%C3%9Fe"), ( "http://straße.de/straße".encode(), @@ -2962,7 +3069,8 @@ def test_content_length_for_bytes_data(httpbin): @pytest.mark.skipif( is_urllib3_1, - reason="urllib3 2.x encodes all strings to utf-8, urllib3 1.x uses latin-1", + reason="urllib3 2.x encodes all strings to utf-8, urllib3 1.x uses" + " latin-1", ) def test_content_length_for_string_data_counts_bytes(httpbin): data = "This is a string containing multi-byte UTF-8 ☃️" @@ -2976,7 +3084,8 @@ def test_content_length_for_string_data_counts_bytes(httpbin): def test_json_decode_errors_are_serializable_deserializable(): json_decode_error = requests.exceptions.JSONDecodeError( "Extra data", - '{"responseCode":["706"],"data":null}{"responseCode":["706"],"data":null}', + '{"responseCode":["706"],"data":null}{"responseCode":["706"],' + '"data":null}', 36, ) deserialized_error = pickle.loads(pickle.dumps(json_decode_error)) diff --git a/tests/test_structures.py b/tests/test_structures.py index e2fd5baaf2..d47c819733 100644 --- a/tests/test_structures.py +++ b/tests/test_structures.py @@ -32,7 +32,10 @@ def test_lower_items(self): ] def test_repr(self): - assert repr(self.case_insensitive_dict) == "{'Accept': 'application/json'}" + assert ( + repr(self.case_insensitive_dict) + == "{'Accept': 'application/json'}" + ) def test_copy(self): copy = self.case_insensitive_dict.copy() diff --git a/tests/test_testserver.py b/tests/test_testserver.py index c73a3f1f59..c8e3d9827e 100644 --- a/tests/test_testserver.py +++ b/tests/test_testserver.py @@ -61,10 +61,13 @@ def test_basic_response(self): assert r.headers["Content-Length"] == "0" def test_basic_waiting_server(self): - """the server waits for the block_server event to be set before closing""" + """the server waits for the block_server event to be set before + closing""" block_server = threading.Event() - with Server.basic_response_server(wait_to_close_event=block_server) as ( + with Server.basic_response_server( + wait_to_close_event=block_server + ) as ( host, port, ): @@ -79,7 +82,9 @@ def test_multiple_requests(self): """multiple requests can be served""" requests_to_handle = 5 - server = Server.basic_response_server(requests_to_handle=requests_to_handle) + server = Server.basic_response_server( + requests_to_handle=requests_to_handle + ) with server as (host, port): server_url = f"http://{host}:{port}" @@ -91,7 +96,9 @@ def test_multiple_requests(self): with pytest.raises(requests.exceptions.ConnectionError): r = requests.get(server_url) - @pytest.mark.skip(reason="this fails non-deterministically under pytest-xdist") + @pytest.mark.skip( + reason="this fails non-deterministically under pytest-xdist" + ) def test_request_recovery(self): """can check the requests content""" # TODO: figure out why this sometimes fails when using pytest-xdist. @@ -142,7 +149,8 @@ def test_request_recovery_with_bigger_timeout(self): assert server.handler_results[0] == data def test_server_finishes_on_error(self): - """the server thread exits even if an exception exits the context manager""" + """the server thread exits even if an exception exits the context + manager""" server = Server.basic_response_server() with pytest.raises(Exception): with server: diff --git a/tests/test_utils.py b/tests/test_utils.py index 5e9b56ea64..361558e5bd 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -53,7 +53,9 @@ class TestSuperLen: (StringIO.StringIO, "Test"), (BytesIO, b"Test"), pytest.param( - cStringIO, "Test", marks=pytest.mark.skipif("cStringIO is None") + cStringIO, + "Test", + marks=pytest.mark.skipif("cStringIO is None"), ), ), ) @@ -196,7 +198,8 @@ class TestGetEnvironProxies: @pytest.fixture(autouse=True, params=["no_proxy", "NO_PROXY"]) def no_proxy(self, request, monkeypatch): monkeypatch.setenv( - request.param, "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1" + request.param, + "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1", ) @pytest.mark.parametrize( @@ -352,9 +355,11 @@ def test_none(self): # HTML5 meta charset attribute '', # HTML4 pragma directive - '', + '', # XHTML 1.x served with text/html MIME type - '', + '', # XHTML 1.x served as XML '', ), @@ -417,7 +422,8 @@ def test_guess_by_bom(self, encoding, expected): "url, auth", ( ( - f"http://{ENCODED_USER}:{ENCODED_PASSWORD}@request.com/url.html#test", + f"http://{ENCODED_USER}:{ENCODED_PASSWORD}@request.com" + f"/url.html#test", (USER, PASSWORD), ), ("http://user:pass@complex.url.com/path?query=yes", ("user", "pass")), @@ -425,7 +431,10 @@ def test_guess_by_bom(self, encoding, expected): "http://user:pass%20pass@complex.url.com/path?query=yes", ("user", "pass pass"), ), - ("http://user:pass pass@complex.url.com/path?query=yes", ("user", "pass pass")), + ( + "http://user:pass pass@complex.url.com/path?query=yes", + ("user", "pass pass"), + ), ( "http://user%25user:pass@complex.url.com/path?query=yes", ("user%user", "pass"), @@ -536,7 +545,10 @@ def test_select_proxies(url, expected, proxies): @pytest.mark.parametrize( "value, expected", ( - ('foo="is a fish", bar="as well"', {"foo": "is a fish", "bar": "as well"}), + ( + 'foo="is a fish", bar="as well"', + {"foo": "is a fish", "bar": "as well"}, + ), ("key_without_value", {"key_without_value": None}), ), ) @@ -558,7 +570,8 @@ def test_parse_dict_header(value, expected): ), ("text/plain", ("text/plain", {})), ( - "multipart/form-data; boundary = something ; boundary2='something_else' ; no_equals ", + "multipart/form-data; boundary = something ; " + "boundary2='something_else' ; no_equals ", ( "multipart/form-data", { @@ -569,7 +582,8 @@ def test_parse_dict_header(value, expected): ), ), ( - 'multipart/form-data; boundary = something ; boundary2="something_else" ; no_equals ', + 'multipart/form-data; boundary = something ; ' + 'boundary2="something_else" ; no_equals ', ( "multipart/form-data", { @@ -580,7 +594,8 @@ def test_parse_dict_header(value, expected): ), ), ( - "multipart/form-data; boundary = something ; 'boundary2=something_else' ; no_equals ", + "multipart/form-data; boundary = something ; " + "'boundary2=something_else' ; no_equals ", ( "multipart/form-data", { @@ -591,7 +606,8 @@ def test_parse_dict_header(value, expected): ), ), ( - 'multipart/form-data; boundary = something ; "boundary2=something_else" ; no_equals ', + 'multipart/form-data; boundary = something ; ' + '"boundary2=something_else" ; no_equals ', ( "multipart/form-data", { @@ -613,7 +629,9 @@ def test__parse_content_type_header(value, expected): ( (CaseInsensitiveDict(), None), ( - CaseInsensitiveDict({"content-type": "application/json; charset=utf-8"}), + CaseInsensitiveDict( + {"content-type": "application/json; charset=utf-8"} + ), "utf-8", ), (CaseInsensitiveDict({"content-type": "text/plain"}), "ISO-8859-1"), @@ -647,12 +665,19 @@ def test_iter_slices(value, length): ( ( '; rel=front; type="image/jpeg"', - [{"url": "http:/.../front.jpeg", "rel": "front", "type": "image/jpeg"}], + [ + { + "url": "http:/.../front.jpeg", + "rel": "front", + "type": "image/jpeg", + } + ], ), ("", [{"url": "http:/.../front.jpeg"}]), (";", [{"url": "http:/.../front.jpeg"}]), ( - '; type="image/jpeg",;', + '; type="image/jpeg",' + ';', [ {"url": "http:/.../front.jpeg", "type": "image/jpeg"}, {"url": "http://.../back.jpeg"}, @@ -675,7 +700,10 @@ def test_parse_header_links(value, expected): "http://user:pass@example.com/path?query", "http://user:pass@example.com/path?query", ), - ("http://user@example.com/path?query", "http://user@example.com/path?query"), + ( + "http://user@example.com/path?query", + "http://user@example.com/path?query", + ), ), ) def test_prepend_scheme_if_needed(value, expected): @@ -697,7 +725,10 @@ def test_to_native_string(value, expected): @pytest.mark.parametrize( "url, expected", ( - ("http://u:p@example.com/path?a=1#test", "http://example.com/path?a=1"), + ( + "http://u:p@example.com/path?a=1#test", + "http://example.com/path?a=1", + ), ("http://example.com/path", "http://example.com/path"), ("//u:p@example.com/path", "//example.com/path"), ("//example.com/path", "//example.com/path"), @@ -730,11 +761,13 @@ def test_should_bypass_proxies(url, expected, monkeypatch): """ monkeypatch.setenv( "no_proxy", - "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000", + "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, " + "google.com:6000", ) monkeypatch.setenv( "NO_PROXY", - "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000", + "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, " + "google.com:6000", ) assert should_bypass_proxies(url, no_proxy=None) == expected @@ -828,7 +861,9 @@ def test_should_bypass_proxies_no_proxy(url, expected, monkeypatch): ("http://192.168.0.1/", False, ""), ), ) -def test_should_bypass_proxies_win_registry(url, expected, override, monkeypatch): +def test_should_bypass_proxies_win_registry( + url, expected, override, monkeypatch +): """Tests for function should_bypass_proxies to check if proxy can be bypassed or not with Windows registry settings """ @@ -849,7 +884,8 @@ def OpenKey(key, subkey): def QueryValueEx(key, value_name): if key is ie_settings: if value_name == "ProxyEnable": - # this could be a string (REG_SZ) or a 32-bit number (REG_DWORD) + # this could be a string (REG_SZ) or a 32-bit number + # (REG_DWORD) proxyEnableValues.rotate() return [proxyEnableValues[0]] elif value_name == "ProxyOverride": @@ -908,7 +944,8 @@ def QueryValueEx(key, value_name): ), ) def test_set_environ(env_name, value): - """Tests set_environ will set environ values and will restore the environ.""" + """Tests set_environ will set environ values and will restore the + environ.""" environ_copy = copy.deepcopy(os.environ) with set_environ(env_name, value): assert os.environ.get(env_name) == value @@ -929,7 +966,8 @@ def test_set_environ_raises_exception(): @pytest.mark.skipif(os.name != "nt", reason="Test only on Windows") def test_should_bypass_proxies_win_registry_ProxyOverride_value(monkeypatch): """Tests for function should_bypass_proxies to check if proxy - can be bypassed or not with Windows ProxyOverride registry value ending with a semicolon. + can be bypassed or not with Windows ProxyOverride registry value ending + with a semicolon. """ import winreg @@ -948,7 +986,8 @@ def QueryValueEx(key, value_name): return [1] elif value_name == "ProxyOverride": return [ - "192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1;<-loopback>;" + "192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1;" + "<-loopback>;" ] monkeypatch.setenv("NO_PROXY", "") diff --git a/tests/testserver/server.py b/tests/testserver/server.py index da1b65608e..42bce3c786 100644 --- a/tests/testserver/server.py +++ b/tests/testserver/server.py @@ -51,7 +51,9 @@ def __init__( @classmethod def text_response_server(cls, text, request_timeout=0.5, **kwargs): def text_response_handler(sock): - request_content = consume_socket_content(sock, timeout=request_timeout) + request_content = consume_socket_content( + sock, timeout=request_timeout + ) sock.send(text.encode("utf-8")) return request_content