Skip to content

Commit 5b63cf6

Browse files
committed
odbc core initial work
1 parent 1685e26 commit 5b63cf6

File tree

6 files changed

+253
-16
lines changed

6 files changed

+253
-16
lines changed

.github/workflows/ci-test.yml

Lines changed: 26 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -22,18 +22,23 @@ jobs:
2222
matrix:
2323
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
2424
os: ["ubuntu-latest", "macOS-latest", "windows-latest"]
25-
backend: ["local", "db"]
25+
backend: ["local", "mongodb", "mysql"]
2626
exclude:
2727
# ToDo: take if back when the connection become stable
2828
# or resolve using `InMemoryMongoClient`
29-
- { os: "macOS-latest", backend: "db" }
29+
- { os: "macOS-latest", backend: "mongodb" }
30+
- { os: "macOS-latest", backend: "mysql" }
3031
env:
31-
CACHIER_TEST_HOST: "localhost"
32-
CACHIER_TEST_PORT: "27017"
3332
#CACHIER_TEST_DB: "dummy_db"
3433
#CACHIER_TEST_USERNAME: "myuser"
3534
#CACHIER_TEST_PASSWORD: "yourpassword"
35+
# CACHIER_MONGODB_TEST_HOST: "localhost"
36+
CACHIER_MONGODB_TEST_PORT: "27017"
3637
CACHIER_TEST_VS_DOCKERIZED_MONGO: "true"
38+
# CACHIER_MYSQL_TEST_PORT: "3306"
39+
CACHIER_TEST_VS_DOCKERIZED_MYSQL: "true"
40+
CACHIER_TEST_PYODBC_CONNECTION_STRING: "DRIVER={MySQL ODBC Driver};SERVER=localhost;PORT=3306;DATABASE=test;USER=root;PASSWORD=password;"
41+
3742

3843
steps:
3944
- uses: actions/checkout@v4
@@ -50,35 +55,45 @@ jobs:
5055
5156
- name: Unit tests (local)
5257
if: matrix.backend == 'local'
53-
run: pytest -m "not mongo"
58+
run: pytest -m "not mongo and not mysql"
5459

5560
- name: Setup docker (missing on MacOS)
56-
if: runner.os == 'macOS' && matrix.backend == 'db'
61+
if: runner.os == 'macOS' && (matrix.backend == 'mongodb' || matrix.backend == 'mysql')
5762
run: |
5863
brew install docker
5964
colima start
6065
# For testcontainers to find the Colima socket
6166
sudo ln -sf $HOME/.colima/default/docker.sock /var/run/docker.sock
6267
# ToDo: find a way to cache docker images
6368
#- name: Cache Container Images
64-
# if: matrix.backend == 'db'
69+
# if: matrix.backend == 'mongodb'
6570
# uses: borda/cache-container-images-action@b32a5e804cb39af3c3d134fc03ab76eac0bfcfa9
6671
# with:
6772
# prefix-key: "mongo-db"
6873
# images: mongo:latest
6974
- name: Start MongoDB in docker
70-
if: matrix.backend == 'db'
75+
if: matrix.backend == 'mongodb'
7176
run: |
7277
# start MongoDB in a container
73-
docker run -d -p ${{ env.CACHIER_TEST_PORT }}:27017 --name mongodb mongo:latest
78+
docker run -d -p ${{ env.CACHIER_MONGODB_TEST_PORT }}:27017 --name mongodb mongo:latest
7479
# wait for MongoDB to start, which is in average 5 seconds
7580
sleep 5
7681
# show running containers
7782
docker ps -a
78-
- name: Unit tests (DB)
79-
if: matrix.backend == 'db'
83+
- name: Unit tests (MongoDB)
84+
if: matrix.backend == 'mongodb'
8085
run: pytest -m "mongo"
8186

87+
- name: Start MySQL in docker
88+
if: matrix.backend == 'mysql'
89+
run: |
90+
docker run -d -p ${{ env.CACHIER_MYSQL_TEST_PORT }}:3306 --name mysql -e MYSQL_ROOT_PASSWORD=password -e MYSQL_DATABASE=test mysql:latest
91+
sleep 10
92+
docker ps -a
93+
- name: Unit tests (MySQL)
94+
if: matrix.backend == 'mysql'
95+
run: pytest -m "pyodbc"
96+
8297
- name: "Upload coverage to Codecov"
8398
continue-on-error: true
8499
uses: codecov/codecov-action@v4

src/cachier/_types.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,4 +6,4 @@
66

77
HashFunc = Callable[..., str]
88
Mongetter = Callable[[], "pymongo.collection.Collection"]
9-
Backend = Literal["pickle", "mongo", "memory"]
9+
Backend = Literal["pickle", "mongo", "odbc", "memory"]

src/cachier/core.py

Lines changed: 22 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
from .cores.base import RecalculationNeeded, _BaseCore
2828
from .cores.memory import _MemoryCore
2929
from .cores.mongo import _MongoCore
30+
from .cores.odbc import _OdbcCore
3031
from .cores.pickle import _PickleCore
3132

3233
MAX_WORKERS_ENVAR_NAME = "CACHIER_MAX_WORKERS"
@@ -110,6 +111,8 @@ def cachier(
110111
hash_params: Optional[HashFunc] = None,
111112
backend: Optional[Backend] = None,
112113
mongetter: Optional[Mongetter] = None,
114+
odbc_connection_string: Optional[str] = None,
115+
odbc_table_name: Optional[str] = None,
113116
stale_after: Optional[datetime.timedelta] = None,
114117
next_time: Optional[bool] = None,
115118
cache_dir: Optional[Union[str, os.PathLike]] = None,
@@ -137,13 +140,21 @@ def cachier(
137140
hash_params : callable, optional
138141
backend : str, optional
139142
The name of the backend to use. Valid options currently include
140-
'pickle', 'mongo' and 'memory'. If not provided, defaults to
143+
'pickle', 'mongo', 'odbc' and 'memory'. If not provided, defaults to
141144
'pickle' unless the 'mongetter' argument is passed, in which
142-
case the mongo backend is automatically selected.
145+
case the mongo backend is automatically selected, or the
146+
'odbc_connection_string' argument is passed, in which case the odbc
147+
backend is automatically selected.
143148
mongetter : callable, optional
144149
A callable that takes no arguments and returns a pymongo.Collection
145150
object with writing permissions. If unset a local pickle cache is used
146151
instead.
152+
odbc_connection_string : str, optional
153+
A connection string to an ODBC database. If provided, the ODBC core
154+
will be used.
155+
odbc_table_name : str, optional
156+
The name of the table to use in the ODBC database. If not provided,
157+
defaults to 'cachier'.
147158
stale_after : datetime.timedelta, optional
148159
The time delta after which a cached result is considered stale. Calls
149160
made after the result goes stale will trigger a recalculation of the
@@ -190,6 +201,8 @@ def cachier(
190201
# Override the backend parameter if a mongetter is provided.
191202
if callable(mongetter):
192203
backend = "mongo"
204+
if odbc_connection_string is not None:
205+
backend = "odbc"
193206
core: _BaseCore
194207
if backend == "pickle":
195208
core = _PickleCore(
@@ -205,6 +218,13 @@ def cachier(
205218
mongetter=mongetter,
206219
wait_for_calc_timeout=wait_for_calc_timeout,
207220
)
221+
elif backend == "odbc":
222+
core = _OdbcCore(
223+
hash_func=hash_func,
224+
wait_for_calc_timeout=wait_for_calc_timeout,
225+
connection_string=odbc_connection_string,
226+
table_name=odbc_table_name,
227+
)
208228
elif backend == "memory":
209229
core = _MemoryCore(
210230
hash_func=hash_func, wait_for_calc_timeout=wait_for_calc_timeout

src/cachier/cores/odbc.py

Lines changed: 117 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,117 @@
1+
"""A pyodbc-based caching core for cachier."""
2+
3+
# This file is part of Cachier.
4+
# https://github.com/python-cachier/cachier
5+
6+
# Licensed under the MIT license:
7+
# http://www.opensource.org/licenses/MIT-license
8+
# Copyright (c) 2016, Shay Palachy <[email protected]>
9+
10+
# standard library imports
11+
import pickle
12+
import time
13+
import datetime
14+
15+
pyodbc = None
16+
# third party imports
17+
with suppress(ImportError):
18+
import pyodbc
19+
20+
# local imports
21+
from .base import _BaseCore, RecalculationNeeded
22+
23+
class _OdbcCore(_BaseCore):
24+
25+
def __init__(
26+
self,
27+
hash_func,
28+
wait_for_calc_timeout,
29+
connection_string,
30+
table_name,
31+
):
32+
if "pyodbc" not in sys.modules:
33+
warnings.warn(
34+
"`pyodbc` was not found. pyodbc cores will not function.",
35+
ImportWarning,
36+
stacklevel=2,
37+
) # pragma: no cover
38+
super().__init__(hash_func, wait_for_calc_timeout)
39+
self.connection_string = connection_string
40+
self.table_name = table_name
41+
self.ensure_table_exists()
42+
43+
def ensure_table_exists(self):
44+
with pyodbc.connect(self.connection_string) as conn:
45+
cursor = conn.cursor()
46+
cursor.execute(f"""
47+
IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = N'{self.table_name}')
48+
BEGIN
49+
CREATE TABLE {self.table_name} (
50+
key NVARCHAR(255),
51+
value VARBINARY(MAX),
52+
time DATETIME,
53+
being_calculated BIT,
54+
PRIMARY KEY (key)
55+
);
56+
END
57+
""")
58+
conn.commit()
59+
60+
def get_entry_by_key(self, key):
61+
with pyodbc.connect(self.connection_string) as conn:
62+
cursor = conn.cursor()
63+
cursor.execute(f"SELECT value, time, being_calculated FROM {self.table_name} WHERE key = ?", key)
64+
row = cursor.fetchone()
65+
if row:
66+
return {
67+
"value": pickle.loads(row.value),
68+
"time": row.time,
69+
"being_calculated": row.being_calculated,
70+
}
71+
return None
72+
73+
def set_entry(self, key, func_res):
74+
with pyodbc.connect(self.connection_string) as conn:
75+
cursor = conn.cursor()
76+
cursor.execute(f"""
77+
MERGE INTO {self.table_name} USING (SELECT 1 AS dummy) AS src ON (key = ?)
78+
WHEN MATCHED THEN
79+
UPDATE SET value = ?, time = GETDATE(), being_calculated = 0
80+
WHEN NOT MATCHED THEN
81+
INSERT (key, value, time, being_calculated) VALUES (?, ?, GETDATE(), 0);
82+
""", key, pickle.dumps(func_res), key, pickle.dumps(func_res))
83+
conn.commit()
84+
85+
def mark_entry_being_calculated(self, key):
86+
with pyodbc.connect(self.connection_string) as conn:
87+
cursor = conn.cursor()
88+
cursor.execute(f"UPDATE {self.table_name} SET being_calculated = 1 WHERE key = ?", key)
89+
conn.commit()
90+
91+
def mark_entry_not_calculated(self, key):
92+
with pyodbc.connect(self.connection_string) as conn:
93+
cursor = conn.cursor()
94+
cursor.execute(f"UPDATE {self.table_name} SET being_calculated = 0 WHERE key = ?", key)
95+
conn.commit()
96+
97+
def wait_on_entry_calc(self, key):
98+
start_time = datetime.datetime.now()
99+
while True:
100+
entry = self.get_entry_by_key(key)
101+
if entry and not entry['being_calculated']:
102+
return entry['value']
103+
if (datetime.datetime.now() - start_time).total_seconds() > self.wait_for_calc_timeout:
104+
raise RecalculationNeeded()
105+
time.sleep(1)
106+
107+
def clear_cache(self):
108+
with pyodbc.connect(self.connection_string) as conn:
109+
cursor = conn.cursor()
110+
cursor.execute(f"DELETE FROM {self.table_name}")
111+
conn.commit()
112+
113+
def clear_being_calculated(self):
114+
with pyodbc.connect(self.connection_string) as conn:
115+
cursor = conn.cursor()
116+
cursor.execute(f"UPDATE {self.table_name} SET being_calculated = 0")
117+
conn.commit()

tests/test_mongo_core.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@
2626

2727

2828
class CfgKey:
29-
HOST = "TEST_HOST"
30-
PORT = "TEST_PORT"
29+
HOST = "MONGODB_TEST_HOST"
30+
PORT = "MONGODB_TEST_PORT"
3131
# UNAME = "TEST_USERNAME"
3232
# PWD = "TEST_PASSWORD"
3333
# DB = "TEST_DB"

tests/test_odbc_core.py

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
"""Testing the MongoDB core of cachier."""
2+
3+
# standard library imports
4+
import datetime
5+
from time import sleep
6+
7+
# third party imports
8+
import pytest
9+
from birch import Birch # type: ignore[import-not-found]
10+
11+
# local imports
12+
from cachier import cachier
13+
# from cachier.cores.base import RecalculationNeeded
14+
# from cachier.cores.odbc import _OdbcCore
15+
16+
17+
class CfgKey:
18+
"""Configuration keys for testing."""
19+
TEST_VS_DOCKERIZED_MYSQL = "TEST_VS_DOCKERIZED_MYSQL"
20+
TEST_PYODBC_CONNECTION_STRING = "TEST_PYODBC_CONNECTION_STRING"
21+
22+
23+
CFG = Birch(
24+
namespace="cachier",
25+
defaults={CfgKey.TEST_VS_DOCKERIZED_MYSQL: False},
26+
)
27+
28+
# Configuration for ODBC connection for tests
29+
CONCT_STR = CFG.mget(CfgKey.TEST_PYODBC_CONNECTION_STRING)
30+
# TABLE_NAME = "test_cache_table"
31+
32+
33+
@pytest.mark.odbc
34+
def test_odbc_entry_creation_and_retrieval(odbc_core):
35+
"""Test inserting and retrieving an entry from ODBC cache."""
36+
37+
@cachier(backend='odbc', odbc_connection_string=CONCT_STR)
38+
def sample_function(arg_1, arg_2):
39+
return arg_1 + arg_2
40+
41+
sample_function.clear_cache()
42+
assert sample_function(1, 2) == 3 # Test cache miss and insertion
43+
assert sample_function(1, 2) == 3 # Test cache hit
44+
45+
46+
@pytest.mark.odbc
47+
def test_odbc_stale_after(odbc_core):
48+
"""Test ODBC core handling stale_after parameter."""
49+
stale_after = datetime.timedelta(seconds=1)
50+
51+
@cachier(backend='odbc', odbc_connection_string=CONCT_STR, stale_after=stale_after)
52+
def stale_test_function(arg_1, arg_2):
53+
return arg_1 + arg_2 + datetime.datetime.now().timestamp() # Add timestamp to ensure unique values
54+
55+
initial_value = stale_test_function(5, 10)
56+
sleep(2) # Wait for the entry to become stale
57+
assert stale_test_function(5, 10) != initial_value # Should recompute since stale
58+
59+
60+
@pytest.mark.odbc
61+
def test_odbc_clear_cache(odbc_core):
62+
"""Test clearing the ODBC cache."""
63+
@cachier(backend='odbc', odbc_connection_string=CONCT_STR)
64+
def clearable_function(arg):
65+
return arg
66+
67+
clearable_function.clear_cache() # Ensure clean state
68+
assert clearable_function(3) == 3 # Populate cache
69+
clearable_function.clear_cache() # Clear cache
70+
# The next call should recompute result indicating that cache was cleared
71+
assert clearable_function(3) == 3
72+
73+
74+
@pytest.mark.odbc
75+
def test_odbc_being_calculated_flag(odbc_core):
76+
"""Test handling of 'being_calculated' flag in ODBC core."""
77+
@cachier(backend='odbc', odbc_connection_string=CONCT_STR)
78+
def slow_function(arg):
79+
sleep(2) # Simulate long computation
80+
return arg * 2
81+
82+
slow_function.clear_cache()
83+
result1 = slow_function(4)
84+
result2 = slow_function(4) # Should hit cache, not wait for recalculation
85+
assert result1 == result2

0 commit comments

Comments
 (0)