|
16 | 16 |
|
17 | 17 | from __future__ import annotations
|
18 | 18 |
|
| 19 | +import atexit |
19 | 20 | import errno
|
20 | 21 | import itertools
|
21 | 22 | import json
|
|
24 | 25 | import re
|
25 | 26 | import struct
|
26 | 27 | import unicodedata
|
27 |
| -import warnings |
28 | 28 | from contextlib import suppress
|
29 | 29 | from dataclasses import dataclass
|
30 | 30 | from functools import cached_property, partial, total_ordering
|
31 | 31 | from http import HTTPStatus
|
32 | 32 | from typing import TYPE_CHECKING, ClassVar, Iterable, Iterator
|
33 |
| -from urllib.parse import quote, urlencode, urlparse |
| 33 | +from urllib.parse import quote, urlparse |
34 | 34 |
|
35 | 35 | import requests
|
36 | 36 | from typing_extensions import TypedDict
|
@@ -106,6 +106,22 @@ class NotFoundError(requests.exceptions.HTTPError):
|
106 | 106 | pass
|
107 | 107 |
|
108 | 108 |
|
| 109 | +class TimeoutSession(requests.Session): |
| 110 | + def request(self, *args, **kwargs): |
| 111 | + kwargs.setdefault("timeout", 10) |
| 112 | + return super().request(*args, **kwargs) |
| 113 | + |
| 114 | + |
| 115 | +r_session = TimeoutSession() |
| 116 | +r_session.headers.update({"User-Agent": USER_AGENT}) |
| 117 | + |
| 118 | + |
| 119 | +@atexit.register |
| 120 | +def close_session(): |
| 121 | + """Close the requests session on shut down.""" |
| 122 | + r_session.close() |
| 123 | + |
| 124 | + |
109 | 125 | # Utilities.
|
110 | 126 |
|
111 | 127 |
|
@@ -246,21 +262,7 @@ def fetch_url(self, url, **kwargs):
|
246 | 262 | is unreachable.
|
247 | 263 | """
|
248 | 264 | try:
|
249 |
| - # Disable the InsecureRequestWarning that comes from using |
250 |
| - # `verify=false`. |
251 |
| - # https://github.com/kennethreitz/requests/issues/2214 |
252 |
| - # We're not overly worried about the NSA MITMing our lyrics scraper |
253 |
| - with warnings.catch_warnings(): |
254 |
| - warnings.simplefilter("ignore") |
255 |
| - r = requests.get( |
256 |
| - url, |
257 |
| - verify=False, |
258 |
| - headers={ |
259 |
| - "User-Agent": USER_AGENT, |
260 |
| - }, |
261 |
| - timeout=10, |
262 |
| - **kwargs, |
263 |
| - ) |
| 265 | + r = r_session.get(url) |
264 | 266 | except requests.RequestException as exc:
|
265 | 267 | self._log.debug("lyrics request failed: {0}", exc)
|
266 | 268 | return
|
@@ -368,9 +370,7 @@ def warn(self, message: str, *args) -> None:
|
368 | 370 |
|
369 | 371 | def fetch_json(self, *args, **kwargs):
|
370 | 372 | """Wrap the request method to raise an exception on HTTP errors."""
|
371 |
| - kwargs.setdefault("timeout", 10) |
372 |
| - kwargs.setdefault("headers", {"User-Agent": USER_AGENT}) |
373 |
| - r = requests.get(*args, **kwargs) |
| 373 | + r = r_session.get(*args, **kwargs) |
374 | 374 | if r.status_code == HTTPStatus.NOT_FOUND:
|
375 | 375 | raise NotFoundError("HTTP Error: Not Found", response=r)
|
376 | 376 | r.raise_for_status()
|
@@ -535,10 +535,7 @@ class Genius(SearchBackend):
|
535 | 535 | def __init__(self, config, log):
|
536 | 536 | super().__init__(config, log)
|
537 | 537 | self.api_key = config["genius_api_key"].as_str()
|
538 |
| - self.headers = { |
539 |
| - "Authorization": "Bearer %s" % self.api_key, |
540 |
| - "User-Agent": USER_AGENT, |
541 |
| - } |
| 538 | + self.headers = {"Authorization": f"Bearer {self.api_key}"} |
542 | 539 |
|
543 | 540 | def fetch(self, artist: str, title: str, *_) -> str | None:
|
544 | 541 | """Fetch lyrics from genius.com
|
@@ -573,18 +570,13 @@ def _search(self, artist, title):
|
573 | 570 | search_url = self.base_url + "/search"
|
574 | 571 | data = {"q": title + " " + artist.lower()}
|
575 | 572 | try:
|
576 |
| - response = requests.get( |
577 |
| - search_url, |
578 |
| - params=data, |
579 |
| - headers=self.headers, |
580 |
| - timeout=10, |
581 |
| - ) |
| 573 | + r = r_session.get(search_url, params=data, headers=self.headers) |
582 | 574 | except requests.RequestException as exc:
|
583 | 575 | self._log.debug("Genius API request failed: {0}", exc)
|
584 | 576 | return None
|
585 | 577 |
|
586 | 578 | try:
|
587 |
| - return response.json() |
| 579 | + return r.json() |
588 | 580 | except ValueError:
|
589 | 581 | return None
|
590 | 582 |
|
@@ -979,13 +971,7 @@ def get_bing_access_token(self):
|
979 | 971 | }
|
980 | 972 |
|
981 | 973 | oauth_url = "https://datamarket.accesscontrol.windows.net/v2/OAuth2-13"
|
982 |
| - oauth_token = json.loads( |
983 |
| - requests.post( |
984 |
| - oauth_url, |
985 |
| - data=urlencode(params), |
986 |
| - timeout=10, |
987 |
| - ).content |
988 |
| - ) |
| 974 | + oauth_token = r_session.post(oauth_url, params=params).json() |
989 | 975 | if "access_token" in oauth_token:
|
990 | 976 | return "Bearer " + oauth_token["access_token"]
|
991 | 977 | else:
|
@@ -1202,10 +1188,8 @@ def append_translation(self, text, to_lang):
|
1202 | 1188 | "https://api.microsofttranslator.com/v2/Http.svc/"
|
1203 | 1189 | "Translate?text=%s&to=%s" % ("|".join(text_lines), to_lang)
|
1204 | 1190 | )
|
1205 |
| - r = requests.get( |
1206 |
| - url, |
1207 |
| - headers={"Authorization ": self.bing_auth_token}, |
1208 |
| - timeout=10, |
| 1191 | + r = r_session.get( |
| 1192 | + url, headers={"Authorization": self.bing_auth_token} |
1209 | 1193 | )
|
1210 | 1194 | if r.status_code != 200:
|
1211 | 1195 | self._log.debug(
|
|
0 commit comments