-
Notifications
You must be signed in to change notification settings - Fork 98
Expand file tree
/
Copy pathdatabases.py
More file actions
403 lines (315 loc) · 14.3 KB
/
databases.py
File metadata and controls
403 lines (315 loc) · 14.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
from __future__ import annotations
import contextlib
from collections.abc import AsyncIterator, Awaitable, Callable, Iterator, Sequence
from contextlib import AbstractAsyncContextManager
from typing import Any
import peewee
from playhouse import postgres_ext as ext
from peewee_async.result_wrappers import fetch_models
from .connection import ConnectionContextManager, connection_context
from .pool import MysqlPoolBackend, PoolBackend, PostgresqlPoolBackend, PsycopgPoolBackend
from .transactions import Transaction
from .utils import CursorProtocol, __log__
FetchResults = Callable[["AioDatabase", CursorProtocol], Awaitable[Any]]
def fetchmany(count: int | None) -> FetchResults:
async def _fetch_results(db: AioDatabase, cursor: CursorProtocol) -> Sequence[Any]:
if count == 1:
return await cursor.fetchone()
if count is not None:
return await cursor.fetchmany(count)
return await cursor.fetchall()
return _fetch_results
fetchone = fetchmany(1)
fetchall = fetchmany(None)
class AioDatabase(peewee.Database):
"""Base async database driver providing **single drop-in sync**
connection and **async connections pool** interface.
:param pool_params: parameters that are passed to the pool
Example::
database = Psycopg3Database(
'database': 'postgres',
'host': '127.0.0.1',
'port': 5432,
'password': 'postgres',
'user': 'postgres',
'pool_params': {
"min_size": 0,
"max_size": 5,
'max_lifetime': 15
}
)
See also:
https://peewee.readthedocs.io/en/latest/peewee/api.html#Database
"""
_allow_sync = False # whether sync queries are allowed
pool_backend_cls: type[PoolBackend]
pool_backend: PoolBackend
def __init__(self, *args: Any, **kwargs: Any) -> None:
self.pool_params: dict[str, Any] = {}
super().__init__(*args, **kwargs)
def init_pool_params_defaults(self) -> None:
pass
def init_pool_params(self) -> None:
self.init_pool_params_defaults()
pool_params = self.connect_params.pop("pool_params", {})
self.pool_params.update(pool_params)
self.pool_params.update(self.connect_params)
def init(self, database: str | None, **kwargs: Any) -> None:
super().init(database, **kwargs)
self.init_pool_params()
self.pool_backend = self.pool_backend_cls(database=self.database, **self.pool_params)
async def aio_connect(self) -> None:
"""Creates a connection pool"""
if self.deferred:
raise Exception("Error, database must be initialized before creating a connection pool")
await self.pool_backend.connect()
@property
def is_connected(self) -> bool:
"""Checks if pool is connected"""
return self.pool_backend.is_connected
async def aio_close(self) -> None:
"""Close pool backend. The pool is closed until you run aio_connect manually."""
if self.deferred:
raise Exception("Error, database must be initialized before creating a connection pool")
await self.pool_backend.close()
async def _aio_begin(self, use_savepoint: bool = False) -> Transaction:
_connection_context = connection_context.get()
if _connection_context is None:
raise peewee.OperationalError("This method can only be called within the aio_connection context manager")
tr = Transaction(_connection_context.connection, is_savepoint=use_savepoint)
await tr.begin()
return tr
async def aio_begin(self) -> Transaction:
"""
Start a new database transaction.
This method executes the SQL `BEGIN` statement and returns a
`Transaction` object representing the started transaction.
Notes:
- This method must be called within an active :meth:`aio_connection)` context manager.
- The returned :meth:`Transaction` object should be used to manage commit or rollback operations.
Returns:
Transaction: An instance representing the active transaction.
"""
return await self._aio_begin()
async def aio_savepoint(self) -> Transaction:
"""
Start a new transaction savepoint.
This method executes the SQL `SAVEPOINT` statement and returns
a `Transaction` object representing the created savepoint.
Notes:
- This method must be called within an active :meth:`aio_connection` context manager.
- The returned :meth:`Transaction` object should be used to manage commit or rollback operations.
Returns:
Transaction: An instance representing the active savepoint.
"""
return await self._aio_begin(use_savepoint=True)
def aio_atomic(self) -> AbstractAsyncContextManager[None]:
"""Create an async context-manager which runs any queries in the wrapped block
in a transaction (or save-point if blocks are nested).
Calls to :meth:`.aio_atomic()` can be nested.
"""
return self._aio_atomic(use_savepoint=True)
def aio_transaction(self) -> AbstractAsyncContextManager[None]:
"""Create an async context-manager that runs all queries in the wrapped block in a transaction.
Calls to :meth:`.aio_transaction()` cannot be nested. If so OperationalError will be raised.
"""
return self._aio_atomic(use_savepoint=False)
@contextlib.asynccontextmanager
async def _aio_atomic(self, use_savepoint: bool = False) -> AsyncIterator[None]:
async with self.aio_connection() as connection:
_connection_context = connection_context.get()
assert _connection_context is not None
_is_root = not _connection_context.transaction_is_opened
_is_nested = _connection_context.transaction_is_opened
if _is_nested and not use_savepoint:
raise peewee.OperationalError("Transaction already opened")
try:
async with Transaction(connection, is_savepoint=_is_nested):
if _is_root:
_connection_context.transaction_is_opened = True
yield
finally:
if _is_root:
_connection_context.transaction_is_opened = False
def set_allow_sync(self, value: bool) -> None:
"""Allow or forbid sync queries for the database. See also
the :meth:`.allow_sync()` context manager.
"""
self._allow_sync = value
@contextlib.contextmanager
def allow_sync(self) -> Iterator[None]:
"""Allow sync queries within context. Close sync
connection on exit if connected.
Example::
with database.allow_sync():
PageBlock.create_table(True)
"""
old_allow_sync = self._allow_sync
self._allow_sync = True
try:
yield
except:
raise
finally:
self._allow_sync = old_allow_sync
self.close()
def execute_sql(self, *args: Any, **kwargs: Any) -> Any:
"""Sync execute SQL query, `allow_sync` must be set to True."""
assert self._allow_sync, (
"Error, sync query is not allowed! Call the `.set_allow_sync()` or use the `.allow_sync()` context manager."
)
return super().execute_sql(*args, **kwargs)
def aio_connection(self) -> ConnectionContextManager:
if self.deferred:
raise Exception("Error, database must be initialized before creating a connection pool")
return ConnectionContextManager(self.pool_backend)
async def aio_execute_sql(
self, sql: str, params: Sequence[Any] | None = None, fetch_results: FetchResults | None = None
) -> Any:
__log__.debug((sql, params))
with peewee.__exception_wrapper__:
async with self.aio_connection() as connection:
async with connection.cursor() as cursor:
await cursor.execute(sql, params or ())
if fetch_results is not None:
return await fetch_results(self, cursor)
async def aio_execute(self, query: Any, fetch_results: FetchResults | None = None) -> Any:
"""Execute *SELECT*, *INSERT*, *UPDATE* or *DELETE* query asyncronously.
:param query: peewee query instance created with ``Model.select()``,
``Model.update()`` etc.
:param fetch_results: function with cursor param. It let you get data manually and
don't need to close cursor It will be closed automatically.
:return: result depends on query type, it's the same as for sync `query.execute()`
"""
ctx = self.get_sql_context()
sql, params = ctx.sql(query).query()
fetch_results = fetch_results or getattr(query, "fetch_results", None)
return await self.aio_execute_sql(sql, params, fetch_results=fetch_results)
async def aio_last_insert_id(self, cursor: CursorProtocol, query: peewee.Insert) -> int:
return cursor.lastrowid
async def aio_rows_affected(self, cursor: CursorProtocol) -> int:
return cursor.rowcount
async def aio_sequence_exists(self, seq: str) -> bool:
raise NotImplementedError
async def aio_get_tables(self, schema: str | None = None) -> list[str]:
raise NotImplementedError
async def aio_create_tables(self, models: list[Any], **options: Any) -> None:
"""
Async version of **peewee.Database.create_tables**
https://docs.peewee-orm.com/en/4.0.0/peewee/api.html#Database.create_tables
"""
for model in peewee.sort_models(models):
await model.aio_create_table(**options)
async def aio_drop_tables(self, models: list[Any], **kwargs: Any) -> None:
"""
Async version of **peewee.Database.drop_tables**
https://docs.peewee-orm.com/en/4.0.0/peewee/api.html#Database.drop_tables
"""
for model in reversed(peewee.sort_models(models)):
await model.aio_drop_table(**kwargs)
async def aio_table_exists(self, table_name: Any, schema: str | None = None) -> bool:
if peewee.is_model(table_name):
model = table_name
table_name = model._meta.table_name
schema = model._meta.schema
return table_name in await self.aio_get_tables(schema=schema)
class AioPostgresDatabase(AioDatabase):
async def aio_last_insert_id(self, cursor: CursorProtocol, query: peewee.Insert) -> Any:
if query._query_type == peewee.Insert.SIMPLE:
try:
return (await cursor.fetchmany(1))[0][0]
except (IndexError, KeyError, TypeError):
return None
return await fetch_models(cursor, query)
async def aio_sequence_exists(self, sequence: str) -> bool:
res = await self.aio_execute_sql(
"""
SELECT COUNT(*) FROM pg_class, pg_namespace
WHERE relkind='S'
AND pg_class.relnamespace = pg_namespace.oid
AND relname=%s""",
[
sequence,
],
fetch_results=fetchone,
)
return bool(res[0])
async def aio_get_tables(self, schema: str | None = None) -> list[str]:
query = "SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname = %s ORDER BY tablename"
return [row for (row,) in await self.aio_execute_sql(query, (schema or "public",), fetch_results=fetchall)]
class Psycopg3Database(AioPostgresDatabase, ext.Psycopg3Database):
"""Extension for `playhouse.Psycopg3Database` providing extra methods
for managing async connection based on psycopg3 pool backend.
Example::
database = Psycopg3Database(
'database': 'postgres',
'host': '127.0.0.1',
'port': 5432,
'password': 'postgres',
'user': 'postgres',
'pool_params': {
"min_size": 0,
"max_size": 5,
'max_lifetime': 15
}
)
See also:
https://docs.peewee-orm.com/en/4.0.0/peewee/api.html#PostgresqlDatabase
https://www.psycopg.org/psycopg3/docs/advanced/pool.html
"""
pool_backend_cls = PsycopgPoolBackend
class PostgresqlDatabase(AioPostgresDatabase, ext.PostgresqlExtDatabase):
"""Extension for `playhouse.PostgresqlDatabase` providing extra methods
for managing async connection based on aiopg pool backend.
Example::
database = PostgresqlDatabase(
'database': 'postgres',
'host': '127.0.0.1',
'port':5432,
'password': 'postgres',
'user': 'postgres',
'pool_params': {
"minsize": 0,
"maxsize": 5,
"timeout": 30,
'pool_recycle': 1.5
}
)
See also:
https://docs.peewee-orm.com/en/4.0.0/peewee/api.html#PostgresqlDatabase
https://aiopg.readthedocs.io/en/stable/
"""
pool_backend_cls = PostgresqlPoolBackend
def init_pool_params_defaults(self) -> None:
self.pool_params.update({"enable_json": True, "enable_hstore": self._register_hstore})
class MySQLDatabase(AioDatabase, peewee.MySQLDatabase):
"""MySQL database driver providing **single drop-in sync**
connection and **async connections pool** interface.
Example::
database = MySQLDatabase(
'database': 'mysql',
'host': '127.0.0.1',
'port': 3306,
'user': 'root',
'password': 'mysql',
'connect_timeout': 30,
"pool_params": {
"minsize": 0,
"maxsize": 5,
"pool_recycle": 2
}
)
See also:
https://docs.peewee-orm.com/en/4.0.0/peewee/api.html#MySQLDatabase
https://aiomysql.readthedocs.io/en/stable/
"""
pool_backend_cls = MysqlPoolBackend
def init_pool_params_defaults(self) -> None:
self.pool_params.update({"autocommit": True})
async def aio_get_tables(self, schema: str | None = None) -> list[str]:
query = (
"SELECT table_name FROM information_schema.tables "
"WHERE table_schema = DATABASE() AND table_type != %s "
"ORDER BY table_name"
)
return [row for (row,) in await self.aio_execute_sql(query, ("VIEW",), fetch_results=fetchall)]