diff --git a/Dockerfile b/Dockerfile index 88c83f9..0365b21 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ -# Use official python base image, small and debian edition -FROM amd64/python:3.7.3-slim +# Use official python base image, small edition +FROM docker.io/amd64/python:3.11-slim ARG purpose=dev diff --git a/docker-compose.yml b/docker-compose.yml index 3f3ad15..a42ab97 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -25,21 +25,21 @@ services: - "redis" redis: - image: redis:alpine + image: docker.io/library/redis:alpine # wikireplica simulator mywiki: - image: mariadb:10.4-focal + image: docker.io/library/mariadb:10.11 volumes: - ./docker-replica/replica.sql:/docker-entrypoint-initdb.d/replica.sql environment: MYSQL_USER: repl MYSQL_PASSWORD: repl - MYSQL_DATABASE: repl + MYSQL_DATABASE: mywiki_p MYSQL_RANDOM_ROOT_PASSWORD: 1 db: - image: mariadb:10.1.48-bionic + image: docker.io/library/mariadb:10.11 volumes: - ./schema.sql:/docker-entrypoint-initdb.d/schema.sql environment: diff --git a/docker-replica/replica.sql b/docker-replica/replica.sql index 20628a0..74a4000 100644 --- a/docker-replica/replica.sql +++ b/docker-replica/replica.sql @@ -10,13 +10,8 @@ /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; -CREATE DATABASE /*!32312 IF NOT EXISTS*/ `mywiki_p` /*!40100 DEFAULT CHARACTER SET utf8mb4 */; - USE `mywiki_p`; -DROP DATABASE IF EXISTS repl; -GRANT SELECT ON mywiki_p.* TO 'repl'@'%'; - DROP TABLE IF EXISTS `actor`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; diff --git a/quarry/web/api.py b/quarry/web/api.py index dd54037..171dd25 100644 --- a/quarry/web/api.py +++ b/quarry/web/api.py @@ -60,7 +60,7 @@ def star_query() -> Union[Tuple[str, int], str]: @api_blueprint.route("/api/query/meta", methods=["POST"]) -def api_set_meta() -> Tuple[Union[str, Response], int]: +def api_set_meta() -> tuple[str, int] | Response: if get_user() is None: return "Authentication required", 401 @@ -81,14 +81,13 @@ def api_set_meta() -> Tuple[Union[str, Response], int]: query.description = request.form["description"] g.conn.session.add(query) g.conn.session.commit() - return ( - Response(json.dumps({"id": query.id}), mimetype="application/json"), - 200, + return Response( + json.dumps({"id": query.id}), mimetype="application/json", status=200 ) @api_blueprint.route("/api/query/run", methods=["POST"]) -def api_run_query() -> Tuple[Union[str, Response], int]: +def api_run_query() -> tuple[str, int] | Response: if get_user() is None: return "Authentication required", 401 text = request.form["text"] @@ -141,17 +140,15 @@ def api_run_query() -> Tuple[Union[str, Response], int]: g.conn.session.add(query_rev) g.conn.session.add(query_run) g.conn.session.commit() - return ( - Response( - json.dumps({"qrun_id": query_run.id}), - mimetype="application/json", - ), - 200, + return Response( + json.dumps({"qrun_id": query_run.id}), + mimetype="application/json", + status=200, ) @api_blueprint.route("/api/query/stop", methods=["POST"]) -def api_stop_query() -> Tuple[Union[str, Response], int]: +def api_stop_query() -> tuple[str, int] | Response: if get_user() is None: return "Authentication required", 401 @@ -164,17 +161,17 @@ def api_stop_query() -> Tuple[Union[str, Response], int]: query_run = ( g.conn.session.query(QueryRun).filter(QueryRun.id == qrun_id).one() ) - result_dictionary = ast.literal_eval(query_run.extra_info) - if "connection_id" in result_dictionary: - g.replica.connection = db_of_process - cur = g.replica.connection.cursor() - try: - cur.execute("KILL %s;", (result_dictionary["connection_id"])) - output = "job stopped" - except OperationalError: - output = "job not running" - else: - output = "job not running" + output = "job not running" + if query_run.extra_info: + result_dictionary = ast.literal_eval(query_run.extra_info) + if "connection_id" in result_dictionary: + g.replica.connection = db_of_process + cur = g.replica.connection.cursor() + try: + cur.execute("KILL %s;", (result_dictionary["connection_id"])) + output = "job stopped" + except OperationalError: + output = "job not running" # Stopping the job usually gets a stopped status. However some jobs stopped # before the stop button was pressed, and didn't update the DB to reflect @@ -185,14 +182,13 @@ def api_stop_query() -> Tuple[Union[str, Response], int]: query_run.status = QueryRun.STATUS_STOPPED g.conn.session.add(query_run) g.conn.session.commit() - return ( - Response(json.dumps({"stopped": output}), mimetype="application/json"), - 200, + return Response( + json.dumps({"stopped": output}), mimetype="application/json", status=200 ) @api_blueprint.route("/api/preferences/get/") -def pref_get(key) -> Response: +def pref_get(key) -> tuple[str, int] | Response: if get_user() is None: return "Authentication required", 401 @@ -209,16 +205,15 @@ def pref_get(key) -> Response: @api_blueprint.route("/api/preferences/set//") -def pref_set(key, value) -> Union[Tuple[str, int], Tuple[Response, int]]: +def pref_set(key, value) -> tuple[str, int] | Response: if get_user() is None: return "Authentication required", 401 get_preferences()[key] = None if value == "null" else value - return ( - Response( - json.dumps({"key": key, "success": ""}), mimetype="application/json" - ), - 201, + return Response( + json.dumps({"key": key, "success": ""}), + mimetype="application/json", + status=201, ) diff --git a/quarry/web/metrics.py b/quarry/web/metrics.py index 4fa7388..66f681f 100644 --- a/quarry/web/metrics.py +++ b/quarry/web/metrics.py @@ -29,7 +29,7 @@ def collect(self): labels=["status"], ) - for (status_id, query_count) in queries_per_status: + for status_id, query_count in queries_per_status: metric_family.add_metric( [QueryRun.STATUS_MESSAGES[status_id]], query_count ) diff --git a/quarry/web/models/queryrun.py b/quarry/web/models/queryrun.py index ab5d9e1..d93ed2f 100644 --- a/quarry/web/models/queryrun.py +++ b/quarry/web/models/queryrun.py @@ -1,3 +1,4 @@ +import datetime import json from sqlalchemy import ( Column, @@ -37,7 +38,7 @@ class QueryRun(Base): id = Column(Integer, primary_key=True) query_rev_id = Column(Integer, ForeignKey("query_revision.id")) status = Column(Integer) - timestamp = Column(DateTime) + timestamp = Column(DateTime, default=datetime.datetime.utcnow) task_id = Column(String) extra_info = Column(UnicodeText) diff --git a/quarry/web/output.py b/quarry/web/output.py index 16a5a72..5433c06 100644 --- a/quarry/web/output.py +++ b/quarry/web/output.py @@ -4,7 +4,7 @@ import types from flask import Response, escape -from werkzeug.contrib.iterio import IterI +from .utils.iterio import IterI import xlsxwriter diff --git a/quarry/web/redissession.py b/quarry/web/redissession.py index 2b6f5da..8b2ab44 100644 --- a/quarry/web/redissession.py +++ b/quarry/web/redissession.py @@ -36,7 +36,7 @@ def get_redis_expiration_time(self, app, session): return timedelta(days=1) def open_session(self, app, request): - sid = request.cookies.get(app.session_cookie_name) + sid = request.cookies.get(app.config["SESSION_COOKIE_NAME"]) if not sid: sid = self.generate_sid() return self.session_class(sid=sid, new=True) @@ -51,7 +51,9 @@ def save_session(self, app, session, response): if not session: self.redis.delete(self.prefix + session.sid) if session.modified: - response.delete_cookie(app.session_cookie_name, domain=domain) + response.delete_cookie( + app.config["SESSION_COOKIE_NAME"], domain=domain + ) return redis_exp = self.get_redis_expiration_time(app, session) cookie_exp = self.get_expiration_time(app, session) @@ -62,7 +64,7 @@ def save_session(self, app, session, response): time=int(redis_exp.total_seconds()), ) response.set_cookie( - app.session_cookie_name, + app.config["SESSION_COOKIE_NAME"], session.sid, expires=cookie_exp, httponly=True, diff --git a/quarry/web/results.py b/quarry/web/results.py index 9a1b6aa..f56e6cd 100644 --- a/quarry/web/results.py +++ b/quarry/web/results.py @@ -6,9 +6,7 @@ from decimal import Decimal from typing import List -INITIAL_SQL = """ -CREATE TABLE resultsets (id, headers, rowcount) -""" +INITIAL_SQL = "CREATE TABLE resultsets (id, headers, rowcount)" def get_unique_columns(raw_columns: List[str]) -> List[str]: @@ -57,12 +55,15 @@ def start_resultset(self, columns, rowcount): unique_columns = get_unique_columns(columns) sanitized_columns = [self._quote_identifier(c) for c in unique_columns] + # Create table that will store the resultset table_name = self._get_current_resultset_table() sql = "CREATE TABLE %s (__id__ INTEGER PRIMARY KEY, %s)" % ( table_name, ", ".join(sanitized_columns), ) self.db.execute(sql) + + # Add the new one to the resultset index table self.db.execute( "INSERT INTO resultsets (id, headers, rowcount) VALUES (?, ?, ?)", (self.resultset_id, json.dumps(unique_columns), rowcount), diff --git a/quarry/web/utils/iterio.py b/quarry/web/utils/iterio.py new file mode 100644 index 0000000..5fcaaba --- /dev/null +++ b/quarry/web/utils/iterio.py @@ -0,0 +1,348 @@ +# -*- coding: utf-8 -*- +r""" + File copied from werkzeug.contrib.iterio as it was deleted in original library + https://raw.githubusercontent.com/pallets/werkzeug/0c487f76985574012d63b3ebae2d6a0ef6d41f60/src/werkzeug/contrib/iterio.py + It is possible to integrate this BSD file into our MIT repo + ~~~~~~~~~~~~~~~~~~~~~~~ + + This module implements a :class:`IterIO` that converts an iterator into + a stream object and the other way round. Converting streams into + iterators requires the `greenlet`_ module. + + To convert an iterator into a stream all you have to do is to pass it + directly to the :class:`IterIO` constructor. In this example we pass it + a newly created generator:: + + def foo(): + yield "something\n" + yield "otherthings" + stream = IterIO(foo()) + print stream.read() # read the whole iterator + + The other way round works a bit different because we have to ensure that + the code execution doesn't take place yet. An :class:`IterIO` call with a + callable as first argument does two things. The function itself is passed + an :class:`IterIO` stream it can feed. The object returned by the + :class:`IterIO` constructor on the other hand is not an stream object but + an iterator:: + + def foo(stream): + stream.write("some") + stream.write("thing") + stream.flush() + stream.write("otherthing") + iterator = IterIO(foo) + print iterator.next() # prints something + print iterator.next() # prints otherthing + iterator.next() # raises StopIteration + + .. _greenlet: https://github.com/python-greenlet/greenlet + + :copyright: 2007 Pallets + :license: BSD-3-Clause +""" +try: + import greenlet +except ImportError: + greenlet = None + + +def _mixed_join(iterable, sentinel): + """concatenate any string type in an intelligent way.""" + iterator = iter(iterable) + first_item = next(iterator, sentinel) + if isinstance(first_item, bytes): + return first_item + b"".join(iterator) + return first_item + "".join(iterator) + + +def _newline(reference_string): + if isinstance(reference_string, bytes): + return b"\n" + return "\n" + + +class IterIO(object): + """Instances of this object implement an interface compatible with the + standard Python :class:`file` object. Streams are either read-only or + write-only depending on how the object is created. + + If the first argument is an iterable a file like object is returned that + returns the contents of the iterable. In case the iterable is empty + read operations will return the sentinel value. + + If the first argument is a callable then the stream object will be + created and passed to that function. The caller itself however will + not receive a stream but an iterable. The function will be executed + step by step as something iterates over the returned iterable. Each + call to :meth:`flush` will create an item for the iterable. If + :meth:`flush` is called without any writes in-between the sentinel + value will be yielded. + + Note for Python 3: due to the incompatible interface of bytes and + streams you should set the sentinel value explicitly to an empty + bytestring (``b''``) if you are expecting to deal with bytes as + otherwise the end of the stream is marked with the wrong sentinel + value. + + .. versionadded:: 0.9 + `sentinel` parameter was added. + """ + + def __new__(cls, obj, sentinel=""): + try: + iterator = iter(obj) + except TypeError: + return IterI(obj, sentinel) + return IterO(iterator, sentinel) + + def __iter__(self): + return self + + def tell(self): + if self.closed: + raise ValueError("I/O operation on closed file") + return self.pos + + def isatty(self): + if self.closed: + raise ValueError("I/O operation on closed file") + return False + + def seek(self, pos, mode=0): + if self.closed: + raise ValueError("I/O operation on closed file") + raise IOError(9, "Bad file descriptor") + + def truncate(self, size=None): + if self.closed: + raise ValueError("I/O operation on closed file") + raise IOError(9, "Bad file descriptor") + + def write(self, s): + if self.closed: + raise ValueError("I/O operation on closed file") + raise IOError(9, "Bad file descriptor") + + def writelines(self, list): + if self.closed: + raise ValueError("I/O operation on closed file") + raise IOError(9, "Bad file descriptor") + + def read(self, n=-1): + if self.closed: + raise ValueError("I/O operation on closed file") + raise IOError(9, "Bad file descriptor") + + def readlines(self, sizehint=0): + if self.closed: + raise ValueError("I/O operation on closed file") + raise IOError(9, "Bad file descriptor") + + def readline(self, length=None): + if self.closed: + raise ValueError("I/O operation on closed file") + raise IOError(9, "Bad file descriptor") + + def flush(self): + if self.closed: + raise ValueError("I/O operation on closed file") + raise IOError(9, "Bad file descriptor") + + def __next__(self): + if self.closed: + raise StopIteration() + line = self.readline() + if not line: + raise StopIteration() + return line + + +class IterI(IterIO): + """Convert an stream into an iterator.""" + + def __new__(cls, func, sentinel=""): + if greenlet is None: + raise RuntimeError("IterI requires greenlet support") + stream = object.__new__(cls) + stream._parent = greenlet.getcurrent() + stream._buffer = [] + stream.closed = False + stream.sentinel = sentinel + stream.pos = 0 + + def run(): + func(stream) + stream.close() + + g = greenlet.greenlet(run, stream._parent) + while 1: + rv = g.switch() + if not rv: + return + yield rv[0] + + def close(self): + if not self.closed: + self.closed = True + self._flush_impl() + + def write(self, s): + if self.closed: + raise ValueError("I/O operation on closed file") + if s: + self.pos += len(s) + self._buffer.append(s) + + def writelines(self, list): + for item in list: + self.write(item) + + def flush(self): + if self.closed: + raise ValueError("I/O operation on closed file") + self._flush_impl() + + def _flush_impl(self): + data = _mixed_join(self._buffer, self.sentinel) + self._buffer = [] + if not data and self.closed: + self._parent.switch() + else: + self._parent.switch((data,)) + + +class IterO(IterIO): + """Iter output. Wrap an iterator and give it a stream like interface.""" + + def __new__(cls, gen, sentinel=""): + self = object.__new__(cls) + self._gen = gen + self._buf = None + self.sentinel = sentinel + self.closed = False + self.pos = 0 + return self + + def __iter__(self): + return self + + def _buf_append(self, string): + """Replace string directly without appending to an empty string, + avoiding type issues.""" + if not self._buf: + self._buf = string + else: + self._buf += string + + def close(self): + if not self.closed: + self.closed = True + if hasattr(self._gen, "close"): + self._gen.close() + + def seek(self, pos, mode=0): + if self.closed: + raise ValueError("I/O operation on closed file") + if mode == 1: + pos += self.pos + elif mode == 2: + self.read() + self.pos = min(self.pos, self.pos + pos) + return + elif mode != 0: + raise IOError("Invalid argument") + buf = [] + try: + tmp_end_pos = len(self._buf or "") + while pos > tmp_end_pos: + item = next(self._gen) + tmp_end_pos += len(item) + buf.append(item) + except StopIteration: + pass + if buf: + self._buf_append(_mixed_join(buf, self.sentinel)) + self.pos = max(0, pos) + + def read(self, n=-1): + if self.closed: + raise ValueError("I/O operation on closed file") + if n < 0: + self._buf_append(_mixed_join(self._gen, self.sentinel)) + result = self._buf[self.pos :] + self.pos += len(result) + return result + new_pos = self.pos + n + buf = [] + try: + tmp_end_pos = 0 if self._buf is None else len(self._buf) + while new_pos > tmp_end_pos or (self._buf is None and not buf): + item = next(self._gen) + tmp_end_pos += len(item) + buf.append(item) + except StopIteration: + pass + if buf: + self._buf_append(_mixed_join(buf, self.sentinel)) + + if self._buf is None: + return self.sentinel + + new_pos = max(0, new_pos) + try: + return self._buf[self.pos : new_pos] + finally: + self.pos = min(new_pos, len(self._buf)) + + def readline(self, length=None): + if self.closed: + raise ValueError("I/O operation on closed file") + + nl_pos = -1 + if self._buf: + nl_pos = self._buf.find(_newline(self._buf), self.pos) + buf = [] + try: + if self._buf is None: + pos = self.pos + else: + pos = len(self._buf) + while nl_pos < 0: + item = next(self._gen) + local_pos = item.find(_newline(item)) + buf.append(item) + if local_pos >= 0: + nl_pos = pos + local_pos + break + pos += len(item) + except StopIteration: + pass + if buf: + self._buf_append(_mixed_join(buf, self.sentinel)) + + if self._buf is None: + return self.sentinel + + if nl_pos < 0: + new_pos = len(self._buf) + else: + new_pos = nl_pos + 1 + if length is not None and self.pos + length < new_pos: + new_pos = self.pos + length + try: + return self._buf[self.pos : new_pos] + finally: + self.pos = min(new_pos, len(self._buf)) + + def readlines(self, sizehint=0): + total = 0 + lines = [] + line = self.readline() + while line: + lines.append(line) + total += len(line) + if 0 < sizehint <= total: + break + line = self.readline() + return lines diff --git a/quarry/web/utils/pagination.py b/quarry/web/utils/pagination.py index ab4a02e..bbf2bae 100644 --- a/quarry/web/utils/pagination.py +++ b/quarry/web/utils/pagination.py @@ -113,7 +113,6 @@ def get_pagination_links(self, page_items): self.direction == "next" or (self.direction == "prev" and page_items_count == self.limit) ): - prev_link = self.get_page_link( page_key=self.get_page_key_from_page_item(page_items[0]), limit=-1 * self.limit, diff --git a/quarry/web/webhelpers.py b/quarry/web/webhelpers.py index 55ca19b..5c96069 100644 --- a/quarry/web/webhelpers.py +++ b/quarry/web/webhelpers.py @@ -27,7 +27,6 @@ def timesince(dt, default="just now"): ) for period, singular, plural in periods: - if period: return "%d %s ago" % (period, singular if period == 1 else plural) diff --git a/requirements.txt b/requirements.txt index 31378aa..83bf734 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,32 +1,13 @@ -billiard==3.6.4.0 -celery==5.1.2 -certifi==2021.5.30 -charset-normalizer==2.0.4 -click==7.1.2 -flask==1.1.4 -greenlet==1.1.0 -idna==3.2 -itsdangerous==1.1.0 -jinja2==2.11.3 -kombu==5.1.0 -markupsafe==2.0.1 -mwoauth==0.3.7 -oauthlib==3.1.1 -prometheus_client==0.12.0 -prometheus_flask_exporter==0.18.7 -pyjwt==1.7.1 -pymysql==1.0.2 -pytz==2021.1 -pyyaml==5.4.1 -redis==3.5.3 -requests==2.26.0 -requests-oauthlib==1.3.0 -six==1.16.0 -sqlalchemy==1.4.22 -urllib3==1.26.6 -vine==5.0.0 -werkzeug==0.16.0 -xlsxwriter==3.0.3 -importlib-metadata==4.6.3 -zipp==3.5.0 -typing-extensions==3.10.0.0 +flask==2.3.2 +PyMySQL==1.0.3 +requests==2.28.1 +Werkzeug==2.3.4 +SQLAlchemy==2.0.15 +PyYAML==6.0 +redis==4.5.5 +mwoauth==0.3.8 +XlsxWriter==3.1.1 +celery==5.2.7 +prometheus_client==0.16.0 +prometheus_flask_exporter==0.22.4 +greenlet==2.0.2 diff --git a/test-requirements.txt b/test-requirements.txt index 7bca192..231b178 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -5,8 +5,8 @@ pytest-cov pytest-mock pytest-redis mock-alchemy -black==20.8b0 -mypy==0.971 +black==23.3.0 +mypy==1.3.0 types-redis types-requests types-PyYAML diff --git a/tests/conftest.py b/tests/conftest.py index 2f3b96f..669f511 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -22,10 +22,10 @@ def client(redisdb, mocker): # Make sure that we only ever use the redisdb function # from pytest-redis mocker.patch("redis.Redis", return_value=redisdb) - mocker.patch( - "celery.backends.redis.RedisBackend.client", - new_callable=mocker.PropertyMock(return_value=redisdb), - ) + # mocker.patch( + # "celery.backends.redis.RedisBackend.client", + # new_callable=mocker.PropertyMock(return_value=redisdb), + # ) # kill_context is going to try to close a bunch of things # that don't actually exist; just skip it. diff --git a/tests/test_login.py b/tests/test_login.py index d44d431..5b0f703 100644 --- a/tests/test_login.py +++ b/tests/test_login.py @@ -25,23 +25,23 @@ def setup_method(self, mocker, client): flask_sess["user_id"] = "MyUserID" flask_sess["request_token"] = "request token" flask_sess["preferences"] = {"breakfast": "waffles", "lunch": "tacos"} - flask_sess["return_to_url"] = "return/to/url" + flask_sess["return_to_url"] = "/return/to/url" def test_login(self, mocker): mocker.patch( - "mwoauth.Handshaker.initiate", return_value=("loginredir", "fake_token") + "mwoauth.Handshaker.initiate", return_value=("/loginredir", "fake_token") ) response = self.client.get("/login") assert response.status_code == 302 - assert response.headers["Location"] == "http://localhost/loginredir" + assert response.headers["Location"] == "/loginredir" def test_oauth_callback(self, mocker): print("first try") response = self.client.get("/oauth-callback?woopity=bloopity") assert response.status_code == 302 - assert response.headers["Location"] == "http://localhost/" + assert response.headers["Location"] == "/" mocker.patch("mwoauth.Handshaker.complete", return_value=("fake_token")) mocker.patch( @@ -52,10 +52,10 @@ def test_oauth_callback(self, mocker): response = self.client.get("/oauth-callback?woopity=bloopity") assert response.status_code == 302 - assert response.headers["Location"] == "http://localhost/return/to/url" + assert response.headers["Location"] == "/return/to/url" def test_logout(self, mocker): response = self.client.get("/logout") assert response.status_code == 302 - assert response.headers["Location"] == "http://localhost/" + assert response.headers["Location"] == "/" diff --git a/tests/test_query.py b/tests/test_query.py index 0cbccad..8a8f196 100644 --- a/tests/test_query.py +++ b/tests/test_query.py @@ -70,7 +70,7 @@ def test_new_query(self, mocker): assert response.status_code == 302 assert ( - response.headers["Location"] == "http://localhost/query/%d" % self.query_id + response.headers["Location"] == "/query/%d" % self.query_id ) self.db_session.filter.assert_has_calls([mocker.call(User.id == "MyUserID")]) @@ -80,7 +80,7 @@ def test_new_query(self, mocker): response = self.client.get("/query/new") assert response.status_code == 302 - assert response.headers["Location"] == "http://localhost/login?next=/query/new" + assert response.headers["Location"] == "/login?next=/query/new" def test_query_show(self, mocker): response = self.client.get("/query/%s" % self.query_id) @@ -172,7 +172,7 @@ def test_fork_query(self, mocker): self.db_session.assert_has_calls([mocker.call.add(Query)]) assert response.status_code == 302 - assert response.headers["Location"] == "http://localhost/query/%d" % ( + assert response.headers["Location"] == "/query/%d" % ( self.query_id + 1, ) diff --git a/tests/test_results.py b/tests/test_results.py index d1e479a..f43f4d3 100644 --- a/tests/test_results.py +++ b/tests/test_results.py @@ -100,8 +100,11 @@ def test_start_resultset(self, mocker): self.db_session.assert_has_calls( [ mocker.call( - 'CREATE TABLE resultset_555 (__id__ INTEGER PRIMARY KEY, "manny", "moe", ' - '"moe_2", "moe_3", """jack""", "nulltest")' + 'CREATE TABLE resultsets (id, headers, rowcount)', + 'CREATE TABLE resultset_555 (__id__ INTEGER PRIMARY KEY, "manny", "moe", "moe_2", "moe_3", ' + '"""jack""", "nulltest")', + 'INSERT INTO resultsets (id, headers, rowcount) VALUES (?, ?, ?)', + (555, '["manny", "moe", "moe_2", "moe_3", "\\"jack\\"", "nulltest\\u0000"]', 7) ) ] ) diff --git a/tests/test_user.py b/tests/test_user.py index 7c8d123..fe5345e 100644 --- a/tests/test_user.py +++ b/tests/test_user.py @@ -93,7 +93,7 @@ def test_sudo(self, mocker): ) ] ) - assert response.headers["Location"] == "http://localhost/" + assert response.headers["Location"] == "/" assert response.status_code == 302 def test_user_page(self, mocker): diff --git a/tests/test_worker.py b/tests/test_worker.py index 21e83eb..3e82b23 100644 --- a/tests/test_worker.py +++ b/tests/test_worker.py @@ -109,7 +109,7 @@ def test_run_query(self, mocker): worker.run_query(self.run_id) # This isn't the full set of queries; comparing the actual session - # queries is messy. This should a least make sure that the DB + # queries is messy. This should at least make sure that the DB # is getting hit. self.db_session.assert_has_calls( [ diff --git a/tox.ini b/tox.ini index 1d69ec3..b1d24c4 100644 --- a/tox.ini +++ b/tox.ini @@ -1,10 +1,12 @@ [tox] -envlist = py37-{flake8,pytest,black,mypy} +envlist = py311-{flake8,pytest,black,mypy} skipsdist = True [flake8] exclude = bin,lib,include,.venv,.tox,dist,doc,build,*.egg max-line-length = 120 +# get into conflict with black +extend-ignore = E203 [mypy] ignore_missing_imports = True @@ -13,19 +15,19 @@ warn_unused_ignores = True warn_unreachable = True warn_unused_configs = True -[testenv:py37-flake8] -commands = flake8 +[testenv:py311-flake8] +commands = flake8 --extend-ignore=E203 deps = -rtest-requirements.txt -[testenv:py37-pytest] +[testenv:py311-pytest] commands = python -m pytest --cov=quarry -ra {posargs} deps = -rtest-requirements.txt -rrequirements.txt -[testenv:py37-black] -commands = black -l 80 -t py37 --check --diff quarry/web/ +[testenv:py311-black] +commands = black -l 80 -t py311 --check --diff quarry/web/ deps = black -[testenv:py37-mypy] +[testenv:py311-mypy] commands = mypy quarry --config-file tox.ini deps = -rtest-requirements.txt