-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdatabase.py
More file actions
246 lines (214 loc) · 7.79 KB
/
database.py
File metadata and controls
246 lines (214 loc) · 7.79 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
import logging
import os
from urllib.parse import urlparse
import psycopg2
from flask import g
from psycopg2.pool import SimpleConnectionPool
logger = logging.getLogger(__name__)
_POOL = None
_MAX_RETRIES = 3
def _normalize_url(url: str | None) -> str | None:
if not url:
return url
# psycopg2 accepts both, but normalize for consistency
if url.startswith("postgres://"):
return url.replace("postgres://", "postgresql://", 1)
return url
def get_database_url() -> tuple[str | None, str | None]:
"""Resolve a usable Postgres URL and indicate its source key.
Priority:
- DATABASE_URL
- DATABASE_URL_UNPOOLED
- POSTGRES_URL
- POSTGRES_URL_NON_POOLING
- Compose from PG*/POSTGRES* parts
"""
candidates = [
("DATABASE_URL", os.getenv("DATABASE_URL")),
("DATABASE_URL_UNPOOLED", os.getenv("DATABASE_URL_UNPOOLED")),
("POSTGRES_URL", os.getenv("POSTGRES_URL")),
("POSTGRES_URL_NON_POOLING", os.getenv("POSTGRES_URL_NON_POOLING")),
]
for source, url in candidates:
if url:
return _normalize_url(url), source
# Compose from parts
pg_host = os.getenv("PGHOST") or os.getenv("POSTGRES_HOST")
pg_user = os.getenv("PGUSER") or os.getenv("POSTGRES_USER")
pg_pass = os.getenv("PGPASSWORD") or os.getenv("POSTGRES_PASSWORD")
pg_db = os.getenv("PGDATABASE") or os.getenv("POSTGRES_DATABASE")
if pg_host and pg_user and pg_pass and pg_db:
url = f"postgresql://{pg_user}:{pg_pass}@{pg_host}/{pg_db}?sslmode=require"
return url, "PG_*_COMPOSED"
return None, None
def _safe_dsn_summary(url: str | None, source: str | None) -> str:
if not url:
return "No database URL configured"
parsed = urlparse(url)
dbname = (parsed.path or "").lstrip("/")
return (
f"host={parsed.hostname} db={dbname} user={parsed.username} "
f"scheme={parsed.scheme} source={source}"
)
def _is_connection_alive(conn) -> bool:
"""Check if a database connection is still usable."""
if conn is None:
return False
try:
if conn.closed:
return False
# Rollback any pending transaction before testing
conn.rollback()
cur = conn.cursor()
cur.execute("SELECT 1")
cur.fetchone()
cur.close()
return True
except Exception:
return False
def _reset_pool():
"""Reset the connection pool when connections become stale."""
global _POOL
if _POOL is not None:
try:
_POOL.closeall()
except Exception as e:
logger.debug("Error closing pool: %s", e)
_POOL = None
def connect_db():
"""Create or reuse a global connection pool and fetch a connection with retry logic."""
global _POOL
url, source = get_database_url()
if not url:
logger.error("Database URL not found in environment.")
return None
for attempt in range(_MAX_RETRIES):
try:
if _POOL is None:
_POOL = SimpleConnectionPool(minconn=1, maxconn=10, dsn=url)
logger.info("Initialized DB pool (%s)", _safe_dsn_summary(url, source))
conn = _POOL.getconn()
if not _is_connection_alive(conn):
logger.warning(
"Got stale connection from pool, resetting pool (attempt %d)",
attempt + 1,
)
try:
_POOL.putconn(conn, close=True)
except Exception as e:
logger.debug("Failed to close stale connection: %s", e)
_reset_pool()
continue
# Rollback any pending transaction before setting autocommit
try:
conn.rollback()
except Exception as e:
logger.debug("Failed to rollback transaction: %s", e)
conn.autocommit = True
return conn
except psycopg2.OperationalError as e:
logger.warning(
"DB connection error (attempt %d/%d): %s", attempt + 1, _MAX_RETRIES, e
)
_reset_pool()
if attempt == _MAX_RETRIES - 1:
logger.error("Failed to connect after %d attempts", _MAX_RETRIES)
return None
except psycopg2.Error as e:
logger.error(
"Error obtaining DB connection (%s): %s",
_safe_dsn_summary(url, source),
e,
)
return None
return None
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
"""
if "db" not in g:
g.db = connect_db()
g._db_from_pool = True if g.db is not None else False
return g.db
def close_db(e=None):
"""Closes the database connection."""
global _POOL
db = g.pop("db", None)
from_pool = g.pop("_db_from_pool", False)
if db is not None:
if from_pool and _POOL is not None:
try:
if db.closed:
logger.debug("Connection already closed, not returning to pool")
else:
_POOL.putconn(db)
except Exception as exc:
logger.debug("Failed to return connection to pool: %s", exc)
try:
if not db.closed:
db.close()
except Exception as exc2:
logger.debug("Failed to close connection: %s", exc2)
else:
try:
if not db.closed:
db.close()
except Exception as exc:
logger.debug("Failed to close connection: %s", exc)
logger.debug("Database connection released.")
def init_db():
conn = get_db() # Use get_db instead of connect_db
if conn is None:
logger.error("Failed to connect to the database.")
return False
try:
cur = conn.cursor()
cur.execute("""
CREATE TABLE IF NOT EXISTS articles (
id SERIAL PRIMARY KEY,
title TEXT NOT NULL,
content TEXT NOT NULL,
date_published TIMESTAMP NOT NULL,
is_published BOOLEAN NOT NULL DEFAULT FALSE,
slug TEXT UNIQUE NOT NULL
)
""")
cur.execute("""
CREATE TABLE IF NOT EXISTS projects (
id SERIAL PRIMARY KEY,
title TEXT NOT NULL,
description TEXT NOT NULL,
image_url TEXT,
technologies TEXT, -- Comma-separated or JSON
github_link TEXT,
live_demo_link TEXT,
date_added TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
)
""")
cur.execute("""
CREATE TABLE IF NOT EXISTS article_views (
id SERIAL PRIMARY KEY,
article_slug TEXT NOT NULL,
ip_address TEXT NOT NULL,
user_agent TEXT,
viewed_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE(article_slug, ip_address)
)
""")
# Helpful index for blog listing performance
cur.execute(
"CREATE INDEX IF NOT EXISTS idx_articles_published_date ON articles (is_published, date_published DESC)"
)
# Using autocommit, but safe to call commit in case autocommit was disabled
try:
conn.commit()
except Exception as exc:
logger.debug("Commit failed (likely autocommit on): %s", exc)
logger.info("Database initialized or already exists.")
return True
except psycopg2.Error as e:
logger.error(f"Error initializing database: {e}")
return False
finally:
# Connection is now managed by app context, no close here
pass