Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package python-databases for
openSUSE:Factory checked in at 2021-10-21 23:55:39
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-databases (Old)
and /work/SRC/openSUSE:Factory/.python-databases.new.1890 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-databases"
Thu Oct 21 23:55:39 2021 rev:5 rq:926785 version:0.5.3
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-databases/python-databases.changes
2021-06-01 10:38:02.764872737 +0200
+++
/work/SRC/openSUSE:Factory/.python-databases.new.1890/python-databases.changes
2021-10-21 23:55:59.840040024 +0200
@@ -1,0 +2,19 @@
+Sun Oct 17 14:40:53 UTC 2021 - Ben Greiner <[email protected]>
+
+- Update to version 0.5.3
+ * Support dialect+driver for default database drivers like
+ postgresql+asyncpg (#396)
+ * Documentation of low-level transaction (#390)
+- Release 0.5.2
+ * Reset counter for failed connections (#385)
+ * Avoid dangling task-local connections after
+ Database.disconnect() (#211)
+- Release 0.5.1
+ * Make database connect and disconnect calls idempotent (#379)
+ * Fix in_ and notin_ queries in SQLAlchemy 1.4 (#378)
+- Release 0.5.0 (August 26th, 2021)
+ * Support SQLAlchemy 1.4 (#299)
+ * Fix concurrent transactions (#328)
+- Drop sqlalchemy-14.patch fixed upstream
+
+-------------------------------------------------------------------
Old:
----
databases-0.4.3.tar.gz
sqlalchemy-14.patch
New:
----
databases-0.5.3.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-databases.spec ++++++
--- /var/tmp/diff_new_pack.Qd6vWq/_old 2021-10-21 23:56:00.200040208 +0200
+++ /var/tmp/diff_new_pack.Qd6vWq/_new 2021-10-21 23:56:00.204040211 +0200
@@ -19,20 +19,19 @@
%{?!python_module:%define python_module() python-%{**} python3-%{**}}
%define skip_python2 1
Name: python-databases
-Version: 0.4.3
+Version: 0.5.3
Release: 0
Summary: Async database support for Python
License: BSD-3-Clause
URL: https://github.com/encode/databases
Source:
https://github.com/encode/databases/archive/%{version}.tar.gz#/databases-%{version}.tar.gz
-# PATCH-FIX-UPSTREAM sqlalchemy-14.patch gh#encode/databases#299 [email protected]
-# Upgrade used API of SQLAlchemy to 1.4
-Patch0: sqlalchemy-14.patch
BuildRequires: %{python_module setuptools}
BuildRequires: fdupes
BuildRequires: python-rpm-macros
-Requires: python-sqlalchemy >= 1.3
-Suggests: python-aiocontextvars
+Requires: python-sqlalchemy >= 1.4
+%if 0%{?python_version_nodots} < 37
+Requires: python-aiocontextvars
+%endif
Suggests: python-aiomysql
Suggests: python-aiopg
Suggests: python-aiosqlite
@@ -40,12 +39,11 @@
BuildArch: noarch
# SECTION test requirements
BuildRequires: %{python_module aiosqlite}
+BuildRequires: %{python_module aiocontextvars if %python-base < 3.7}
BuildRequires: %{python_module asyncpg}
BuildRequires: %{python_module pytest}
BuildRequires: %{python_module requests}
-BuildRequires: %{python_module sqlalchemy >= 1.3}
-BuildRequires: (python3-aiocontextvars if python3-base < 3.7)
-BuildRequires: (python36-aiocontextvars if python36-base)
+BuildRequires: %{python_module sqlalchemy >= 1.4}
# /SECTION
%python_subpackages
@@ -77,6 +75,7 @@
%files %{python_files}
%doc README.md
%license LICENSE.md
-%{python_sitelib}/*
+%{python_sitelib}/databases
+%{python_sitelib}/databases-%{version}*-info
%changelog
++++++ databases-0.4.3.tar.gz -> databases-0.5.3.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/.github/workflows/publish.yml
new/databases-0.5.3/.github/workflows/publish.yml
--- old/databases-0.4.3/.github/workflows/publish.yml 2021-03-26
16:17:54.000000000 +0100
+++ new/databases-0.5.3/.github/workflows/publish.yml 2021-10-10
12:42:34.000000000 +0200
@@ -16,7 +16,11 @@
- uses: "actions/setup-python@v1"
with:
python-version: 3.7
- - name: "Publish"
+ - name: "Install dependencies"
+ run: "scripts/install"
+ - name: "Build package & docs"
+ run: "scripts/build"
+ - name: "Publish to PyPI & deploy docs"
run: "scripts/publish"
env:
TWINE_USERNAME: __token__
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/.github/workflows/test-suite.yml
new/databases-0.5.3/.github/workflows/test-suite.yml
--- old/databases-0.4.3/.github/workflows/test-suite.yml 2021-03-26
16:17:54.000000000 +0100
+++ new/databases-0.5.3/.github/workflows/test-suite.yml 2021-10-10
12:42:34.000000000 +0200
@@ -47,5 +47,5 @@
run: "scripts/install"
- name: "Run tests"
env:
- TEST_DATABASE_URLS: "sqlite:///testsuite,
mysql://username:password@localhost:3306/testsuite,
postgresql://username:password@localhost:5432/testsuite,
postgresql+aiopg://username:[email protected]:5432/testsuite"
+ TEST_DATABASE_URLS: "sqlite:///testsuite,
sqlite+aiosqlite:///testsuite,
mysql://username:password@localhost:3306/testsuite,
mysql+aiomysql://username:password@localhost:3306/testsuite,
postgresql://username:password@localhost:5432/testsuite,
postgresql+aiopg://username:[email protected]:5432/testsuite,
postgresql+asyncpg://username:password@localhost:5432/testsuite"
run: "scripts/test"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/CHANGELOG.md
new/databases-0.5.3/CHANGELOG.md
--- old/databases-0.4.3/CHANGELOG.md 2021-03-26 16:17:54.000000000 +0100
+++ new/databases-0.5.3/CHANGELOG.md 2021-10-10 12:42:34.000000000 +0200
@@ -4,11 +4,47 @@
The format is based on [Keep a
Changelog](https://keepachangelog.com/en/1.0.0/).
+## 0.5.3 (October 10th, 2021)
+
+### Added
+
+* Support `dialect+driver` for default database drivers like
`postgresql+asyncpg` (#396)
+
+### Fixed
+
+* Documentation of low-level transaction (#390)
+
+## 0.5.2 (September 10th, 2021)
+
+### Fixed
+
+* Reset counter for failed connections (#385)
+* Avoid dangling task-local connections after Database.disconnect() (#211)
+
+## 0.5.1 (September 2nd, 2021)
+
+### Added
+
+* Make database `connect` and `disconnect` calls idempotent (#379)
+
+### Fixed
+
+* Fix `in_` and `notin_` queries in SQLAlchemy 1.4 (#378)
+
+## 0.5.0 (August 26th, 2021)
+
+### Added
+* Support SQLAlchemy 1.4 (#299)
+
+### Fixed
+
+* Fix concurrent transactions (#328)
+
## 0.4.3 (March 26th, 2021)
### Fixed
-* Pin SQLAlchemy to 1.13.* (#314)
+* Pin SQLAlchemy to <1.4 (#314)
## 0.4.2 (March 14th, 2021)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/README.md
new/databases-0.5.3/README.md
--- old/databases-0.4.3/README.md 2021-03-26 16:17:54.000000000 +0100
+++ new/databases-0.5.3/README.md 2021-10-10 12:42:34.000000000 +0200
@@ -19,8 +19,6 @@
**Documentation**:
[https://www.encode.io/databases/](https://www.encode.io/databases/)
-**Community**:
[https://discuss.encode.io/c/databases](https://discuss.encode.io/c/databases)
-
**Requirements**: Python 3.6+
---
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/databases/__init__.py
new/databases-0.5.3/databases/__init__.py
--- old/databases-0.4.3/databases/__init__.py 2021-03-26 16:17:54.000000000
+0100
+++ new/databases-0.5.3/databases/__init__.py 2021-10-10 12:42:34.000000000
+0200
@@ -1,4 +1,4 @@
from databases.core import Database, DatabaseURL
-__version__ = "0.4.3"
+__version__ = "0.5.3"
__all__ = ["Database", "DatabaseURL"]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/databases/backends/aiopg.py
new/databases-0.5.3/databases/backends/aiopg.py
--- old/databases-0.4.3/databases/backends/aiopg.py 2021-03-26
16:17:54.000000000 +0100
+++ new/databases-0.5.3/databases/backends/aiopg.py 2021-10-10
12:42:34.000000000 +0200
@@ -7,11 +7,11 @@
import aiopg
from aiopg.sa.engine import APGCompiler_psycopg2
from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
+from sqlalchemy.engine.cursor import CursorResultMetaData
from sqlalchemy.engine.interfaces import Dialect, ExecutionContext
-from sqlalchemy.engine.result import ResultMetaData, RowProxy
+from sqlalchemy.engine.row import Row
from sqlalchemy.sql import ClauseElement
from sqlalchemy.sql.ddl import DDLElement
-from sqlalchemy.types import TypeEngine
from databases.core import DatabaseURL
from databases.interfaces import ConnectionBackend, DatabaseBackend,
TransactionBackend
@@ -114,14 +114,20 @@
async def fetch_all(self, query: ClauseElement) ->
typing.List[typing.Mapping]:
assert self._connection is not None, "Connection is not acquired"
- query, args, context = self._compile(query)
+ query_str, args, context = self._compile(query)
cursor = await self._connection.cursor()
try:
- await cursor.execute(query, args)
+ await cursor.execute(query_str, args)
rows = await cursor.fetchall()
- metadata = ResultMetaData(context, cursor.description)
+ metadata = CursorResultMetaData(context, cursor.description)
return [
- RowProxy(metadata, row, metadata._processors, metadata._keymap)
+ Row(
+ metadata,
+ metadata._processors,
+ metadata._keymap,
+ Row._default_key_style,
+ row,
+ )
for row in rows
]
finally:
@@ -129,24 +135,30 @@
async def fetch_one(self, query: ClauseElement) ->
typing.Optional[typing.Mapping]:
assert self._connection is not None, "Connection is not acquired"
- query, args, context = self._compile(query)
+ query_str, args, context = self._compile(query)
cursor = await self._connection.cursor()
try:
- await cursor.execute(query, args)
+ await cursor.execute(query_str, args)
row = await cursor.fetchone()
if row is None:
return None
- metadata = ResultMetaData(context, cursor.description)
- return RowProxy(metadata, row, metadata._processors,
metadata._keymap)
+ metadata = CursorResultMetaData(context, cursor.description)
+ return Row(
+ metadata,
+ metadata._processors,
+ metadata._keymap,
+ Row._default_key_style,
+ row,
+ )
finally:
cursor.close()
async def execute(self, query: ClauseElement) -> typing.Any:
assert self._connection is not None, "Connection is not acquired"
- query, args, context = self._compile(query)
+ query_str, args, context = self._compile(query)
cursor = await self._connection.cursor()
try:
- await cursor.execute(query, args)
+ await cursor.execute(query_str, args)
return cursor.lastrowid
finally:
cursor.close()
@@ -165,13 +177,19 @@
self, query: ClauseElement
) -> typing.AsyncGenerator[typing.Any, None]:
assert self._connection is not None, "Connection is not acquired"
- query, args, context = self._compile(query)
+ query_str, args, context = self._compile(query)
cursor = await self._connection.cursor()
try:
- await cursor.execute(query, args)
- metadata = ResultMetaData(context, cursor.description)
+ await cursor.execute(query_str, args)
+ metadata = CursorResultMetaData(context, cursor.description)
async for row in cursor:
- yield RowProxy(metadata, row, metadata._processors,
metadata._keymap)
+ yield Row(
+ metadata,
+ metadata._processors,
+ metadata._keymap,
+ Row._default_key_style,
+ row,
+ )
finally:
cursor.close()
@@ -181,7 +199,9 @@
def _compile(
self, query: ClauseElement
) -> typing.Tuple[str, dict, CompilationContext]:
- compiled = query.compile(dialect=self._dialect)
+ compiled = query.compile(
+ dialect=self._dialect, compile_kwargs={"render_postcompile": True}
+ )
execution_context = self._dialect.execution_ctx_cls()
execution_context.dialect = self._dialect
@@ -196,6 +216,7 @@
compiled._result_columns,
compiled._ordered_columns,
compiled._textual_ordered_columns,
+ compiled._loose_column_name_matching,
)
else:
args = {}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/databases/backends/mysql.py
new/databases-0.5.3/databases/backends/mysql.py
--- old/databases-0.4.3/databases/backends/mysql.py 2021-03-26
16:17:54.000000000 +0100
+++ new/databases-0.5.3/databases/backends/mysql.py 2021-10-10
12:42:34.000000000 +0200
@@ -5,11 +5,11 @@
import aiomysql
from sqlalchemy.dialects.mysql import pymysql
+from sqlalchemy.engine.cursor import CursorResultMetaData
from sqlalchemy.engine.interfaces import Dialect, ExecutionContext
-from sqlalchemy.engine.result import ResultMetaData, RowProxy
+from sqlalchemy.engine.row import Row
from sqlalchemy.sql import ClauseElement
from sqlalchemy.sql.ddl import DDLElement
-from sqlalchemy.types import TypeEngine
from databases.core import LOG_EXTRA, DatabaseURL
from databases.interfaces import ConnectionBackend, DatabaseBackend,
TransactionBackend
@@ -102,14 +102,20 @@
async def fetch_all(self, query: ClauseElement) ->
typing.List[typing.Mapping]:
assert self._connection is not None, "Connection is not acquired"
- query, args, context = self._compile(query)
+ query_str, args, context = self._compile(query)
cursor = await self._connection.cursor()
try:
- await cursor.execute(query, args)
+ await cursor.execute(query_str, args)
rows = await cursor.fetchall()
- metadata = ResultMetaData(context, cursor.description)
+ metadata = CursorResultMetaData(context, cursor.description)
return [
- RowProxy(metadata, row, metadata._processors, metadata._keymap)
+ Row(
+ metadata,
+ metadata._processors,
+ metadata._keymap,
+ Row._default_key_style,
+ row,
+ )
for row in rows
]
finally:
@@ -117,24 +123,30 @@
async def fetch_one(self, query: ClauseElement) ->
typing.Optional[typing.Mapping]:
assert self._connection is not None, "Connection is not acquired"
- query, args, context = self._compile(query)
+ query_str, args, context = self._compile(query)
cursor = await self._connection.cursor()
try:
- await cursor.execute(query, args)
+ await cursor.execute(query_str, args)
row = await cursor.fetchone()
if row is None:
return None
- metadata = ResultMetaData(context, cursor.description)
- return RowProxy(metadata, row, metadata._processors,
metadata._keymap)
+ metadata = CursorResultMetaData(context, cursor.description)
+ return Row(
+ metadata,
+ metadata._processors,
+ metadata._keymap,
+ Row._default_key_style,
+ row,
+ )
finally:
await cursor.close()
async def execute(self, query: ClauseElement) -> typing.Any:
assert self._connection is not None, "Connection is not acquired"
- query, args, context = self._compile(query)
+ query_str, args, context = self._compile(query)
cursor = await self._connection.cursor()
try:
- await cursor.execute(query, args)
+ await cursor.execute(query_str, args)
if cursor.lastrowid == 0:
return cursor.rowcount
return cursor.lastrowid
@@ -155,13 +167,19 @@
self, query: ClauseElement
) -> typing.AsyncGenerator[typing.Any, None]:
assert self._connection is not None, "Connection is not acquired"
- query, args, context = self._compile(query)
+ query_str, args, context = self._compile(query)
cursor = await self._connection.cursor()
try:
- await cursor.execute(query, args)
- metadata = ResultMetaData(context, cursor.description)
+ await cursor.execute(query_str, args)
+ metadata = CursorResultMetaData(context, cursor.description)
async for row in cursor:
- yield RowProxy(metadata, row, metadata._processors,
metadata._keymap)
+ yield Row(
+ metadata,
+ metadata._processors,
+ metadata._keymap,
+ Row._default_key_style,
+ row,
+ )
finally:
await cursor.close()
@@ -171,7 +189,9 @@
def _compile(
self, query: ClauseElement
) -> typing.Tuple[str, dict, CompilationContext]:
- compiled = query.compile(dialect=self._dialect)
+ compiled = query.compile(
+ dialect=self._dialect, compile_kwargs={"render_postcompile": True}
+ )
execution_context = self._dialect.execution_ctx_cls()
execution_context.dialect = self._dialect
@@ -186,6 +206,7 @@
compiled._result_columns,
compiled._ordered_columns,
compiled._textual_ordered_columns,
+ compiled._loose_column_name_matching,
)
else:
args = {}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/databases/backends/postgres.py
new/databases-0.5.3/databases/backends/postgres.py
--- old/databases-0.4.3/databases/backends/postgres.py 2021-03-26
16:17:54.000000000 +0100
+++ new/databases-0.5.3/databases/backends/postgres.py 2021-10-10
12:42:34.000000000 +0200
@@ -104,8 +104,29 @@
self._dialect = dialect
self._column_map, self._column_map_int, self._column_map_full =
column_maps
+ @property
+ def _mapping(self) -> asyncpg.Record:
+ return self._row
+
+ def keys(self) -> typing.KeysView:
+ import warnings
+
+ warnings.warn(
+ "The `Row.keys()` method is deprecated to mimic SQLAlchemy
behaviour, "
+ "use `Row._mapping.keys()` instead.",
+ DeprecationWarning,
+ )
+ return self._mapping.keys()
+
def values(self) -> typing.ValuesView:
- return self._row.values()
+ import warnings
+
+ warnings.warn(
+ "The `Row.values()` method is deprecated to mimic SQLAlchemy
behaviour, "
+ "use `Row._mapping.values()` instead.",
+ DeprecationWarning,
+ )
+ return self._mapping.values()
def __getitem__(self, key: typing.Any) -> typing.Any:
if len(self._column_map) == 0: # raw query
@@ -149,16 +170,16 @@
async def fetch_all(self, query: ClauseElement) ->
typing.List[typing.Mapping]:
assert self._connection is not None, "Connection is not acquired"
- query, args, result_columns = self._compile(query)
- rows = await self._connection.fetch(query, *args)
+ query_str, args, result_columns = self._compile(query)
+ rows = await self._connection.fetch(query_str, *args)
dialect = self._dialect
column_maps = self._create_column_maps(result_columns)
return [Record(row, result_columns, dialect, column_maps) for row in
rows]
async def fetch_one(self, query: ClauseElement) ->
typing.Optional[typing.Mapping]:
assert self._connection is not None, "Connection is not acquired"
- query, args, result_columns = self._compile(query)
- row = await self._connection.fetchrow(query, *args)
+ query_str, args, result_columns = self._compile(query)
+ row = await self._connection.fetchrow(query_str, *args)
if row is None:
return None
return Record(
@@ -185,8 +206,8 @@
async def execute(self, query: ClauseElement) -> typing.Any:
assert self._connection is not None, "Connection is not acquired"
- query, args, result_columns = self._compile(query)
- return await self._connection.fetchval(query, *args)
+ query_str, args, result_columns = self._compile(query)
+ return await self._connection.fetchval(query_str, *args)
async def execute_many(self, queries: typing.List[ClauseElement]) -> None:
assert self._connection is not None, "Connection is not acquired"
@@ -201,16 +222,18 @@
self, query: ClauseElement
) -> typing.AsyncGenerator[typing.Any, None]:
assert self._connection is not None, "Connection is not acquired"
- query, args, result_columns = self._compile(query)
+ query_str, args, result_columns = self._compile(query)
column_maps = self._create_column_maps(result_columns)
- async for row in self._connection.cursor(query, *args):
+ async for row in self._connection.cursor(query_str, *args):
yield Record(row, result_columns, self._dialect, column_maps)
def transaction(self) -> TransactionBackend:
return PostgresTransaction(connection=self)
def _compile(self, query: ClauseElement) -> typing.Tuple[str, list, tuple]:
- compiled = query.compile(dialect=self._dialect)
+ compiled = query.compile(
+ dialect=self._dialect, compile_kwargs={"render_postcompile": True}
+ )
if not isinstance(query, DDLElement):
compiled_params = sorted(compiled.params.items())
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/databases/backends/sqlite.py
new/databases-0.5.3/databases/backends/sqlite.py
--- old/databases-0.4.3/databases/backends/sqlite.py 2021-03-26
16:17:54.000000000 +0100
+++ new/databases-0.5.3/databases/backends/sqlite.py 2021-10-10
12:42:34.000000000 +0200
@@ -4,11 +4,11 @@
import aiosqlite
from sqlalchemy.dialects.sqlite import pysqlite
+from sqlalchemy.engine.cursor import CursorResultMetaData
from sqlalchemy.engine.interfaces import Dialect, ExecutionContext
-from sqlalchemy.engine.result import ResultMetaData, RowProxy
+from sqlalchemy.engine.row import Row
from sqlalchemy.sql import ClauseElement
from sqlalchemy.sql.ddl import DDLElement
-from sqlalchemy.types import TypeEngine
from databases.core import LOG_EXTRA, DatabaseURL
from databases.interfaces import ConnectionBackend, DatabaseBackend,
TransactionBackend
@@ -88,32 +88,44 @@
async def fetch_all(self, query: ClauseElement) ->
typing.List[typing.Mapping]:
assert self._connection is not None, "Connection is not acquired"
- query, args, context = self._compile(query)
+ query_str, args, context = self._compile(query)
- async with self._connection.execute(query, args) as cursor:
+ async with self._connection.execute(query_str, args) as cursor:
rows = await cursor.fetchall()
- metadata = ResultMetaData(context, cursor.description)
+ metadata = CursorResultMetaData(context, cursor.description)
return [
- RowProxy(metadata, row, metadata._processors, metadata._keymap)
+ Row(
+ metadata,
+ metadata._processors,
+ metadata._keymap,
+ Row._default_key_style,
+ row,
+ )
for row in rows
]
async def fetch_one(self, query: ClauseElement) ->
typing.Optional[typing.Mapping]:
assert self._connection is not None, "Connection is not acquired"
- query, args, context = self._compile(query)
+ query_str, args, context = self._compile(query)
- async with self._connection.execute(query, args) as cursor:
+ async with self._connection.execute(query_str, args) as cursor:
row = await cursor.fetchone()
if row is None:
return None
- metadata = ResultMetaData(context, cursor.description)
- return RowProxy(metadata, row, metadata._processors,
metadata._keymap)
+ metadata = CursorResultMetaData(context, cursor.description)
+ return Row(
+ metadata,
+ metadata._processors,
+ metadata._keymap,
+ Row._default_key_style,
+ row,
+ )
async def execute(self, query: ClauseElement) -> typing.Any:
assert self._connection is not None, "Connection is not acquired"
- query, args, context = self._compile(query)
+ query_str, args, context = self._compile(query)
async with self._connection.cursor() as cursor:
- await cursor.execute(query, args)
+ await cursor.execute(query_str, args)
if cursor.lastrowid == 0:
return cursor.rowcount
return cursor.lastrowid
@@ -127,11 +139,17 @@
self, query: ClauseElement
) -> typing.AsyncGenerator[typing.Any, None]:
assert self._connection is not None, "Connection is not acquired"
- query, args, context = self._compile(query)
- async with self._connection.execute(query, args) as cursor:
- metadata = ResultMetaData(context, cursor.description)
+ query_str, args, context = self._compile(query)
+ async with self._connection.execute(query_str, args) as cursor:
+ metadata = CursorResultMetaData(context, cursor.description)
async for row in cursor:
- yield RowProxy(metadata, row, metadata._processors,
metadata._keymap)
+ yield Row(
+ metadata,
+ metadata._processors,
+ metadata._keymap,
+ Row._default_key_style,
+ row,
+ )
def transaction(self) -> TransactionBackend:
return SQLiteTransaction(self)
@@ -139,7 +157,9 @@
def _compile(
self, query: ClauseElement
) -> typing.Tuple[str, list, CompilationContext]:
- compiled = query.compile(dialect=self._dialect)
+ compiled = query.compile(
+ dialect=self._dialect, compile_kwargs={"render_postcompile": True}
+ )
execution_context = self._dialect.execution_ctx_cls()
execution_context.dialect = self._dialect
@@ -158,6 +178,7 @@
compiled._result_columns,
compiled._ordered_columns,
compiled._textual_ordered_columns,
+ compiled._loose_column_name_matching,
)
query_message = compiled.string.replace(" \n", " ").replace("\n", " ")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/databases/core.py
new/databases-0.5.3/databases/core.py
--- old/databases-0.4.3/databases/core.py 2021-03-26 16:17:54.000000000
+0100
+++ new/databases-0.5.3/databases/core.py 2021-10-10 12:42:34.000000000
+0200
@@ -5,7 +5,7 @@
import sys
import typing
from types import TracebackType
-from urllib.parse import SplitResult, parse_qsl, urlsplit, unquote
+from urllib.parse import SplitResult, parse_qsl, unquote, urlsplit
from sqlalchemy import text
from sqlalchemy.sql import ClauseElement
@@ -14,9 +14,9 @@
from databases.interfaces import ConnectionBackend, DatabaseBackend,
TransactionBackend
if sys.version_info >= (3, 7): # pragma: no cover
- from contextvars import ContextVar
+ import contextvars as contextvars
else: # pragma: no cover
- from aiocontextvars import ContextVar
+ import aiocontextvars as contextvars
try: # pragma: no cover
import click
@@ -62,13 +62,15 @@
self._force_rollback = force_rollback
- backend_str = self.SUPPORTED_BACKENDS[self.url.scheme]
+ backend_str = self._get_backend()
backend_cls = import_from_string(backend_str)
assert issubclass(backend_cls, DatabaseBackend)
self._backend = backend_cls(self.url, **self.options)
# Connections are stored as task-local state.
- self._connection_context = ContextVar("connection_context") # type:
ContextVar
+ self._connection_context = contextvars.ContextVar(
+ "connection_context"
+ ) # type: contextvars.ContextVar
# When `force_rollback=True` is used, we use a single global
# connection, within a transaction that always rolls back.
@@ -79,7 +81,9 @@
"""
Establish the connection pool.
"""
- assert not self.is_connected, "Already connected."
+ if self.is_connected:
+ logger.debug("Already connected, skipping connection")
+ return None
await self._backend.connect()
logger.info(
@@ -102,7 +106,9 @@
"""
Close all connections in the connection pool.
"""
- assert self.is_connected, "Already disconnected."
+ if not self.is_connected:
+ logger.debug("Already disconnected, skipping disconnection")
+ return None
if self._force_rollback:
assert self._global_connection is not None
@@ -112,6 +118,8 @@
self._global_transaction = None
self._global_connection = None
+ else:
+ self._connection_context =
contextvars.ContextVar("connection_context")
await self._backend.disconnect()
logger.info(
@@ -173,6 +181,11 @@
async for record in connection.iterate(query, values):
yield record
+ def _new_connection(self) -> "Connection":
+ connection = Connection(self._backend)
+ self._connection_context.set(connection)
+ return connection
+
def connection(self) -> "Connection":
if self._global_connection is not None:
return self._global_connection
@@ -180,14 +193,23 @@
try:
return self._connection_context.get()
except LookupError:
- connection = Connection(self._backend)
- self._connection_context.set(connection)
- return connection
+ return self._new_connection()
def transaction(
self, *, force_rollback: bool = False, **kwargs: typing.Any
) -> "Transaction":
- return Transaction(self.connection, force_rollback=force_rollback,
**kwargs)
+ try:
+ connection = self._connection_context.get()
+ is_root = not connection._transaction_stack
+ if is_root:
+ newcontext = contextvars.copy_context()
+ get_conn = lambda: newcontext.run(self._new_connection)
+ else:
+ get_conn = self.connection
+ except LookupError:
+ get_conn = self.connection
+
+ return Transaction(get_conn, force_rollback=force_rollback, **kwargs)
@contextlib.contextmanager
def force_rollback(self) -> typing.Iterator[None]:
@@ -198,6 +220,12 @@
finally:
self._force_rollback = initial
+ def _get_backend(self) -> str:
+ try:
+ return self.SUPPORTED_BACKENDS[self.url.scheme]
+ except KeyError:
+ return self.SUPPORTED_BACKENDS[self.url.dialect]
+
class Connection:
def __init__(self, backend: DatabaseBackend) -> None:
@@ -215,8 +243,12 @@
async def __aenter__(self) -> "Connection":
async with self._connection_lock:
self._connection_counter += 1
- if self._connection_counter == 1:
- await self._connection.acquire()
+ try:
+ if self._connection_counter == 1:
+ await self._connection.acquire()
+ except Exception as e:
+ self._connection_counter -= 1
+ raise e
return self
async def __aexit__(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/docs/connections_and_transactions.md
new/databases-0.5.3/docs/connections_and_transactions.md
--- old/databases-0.4.3/docs/connections_and_transactions.md 2021-03-26
16:17:54.000000000 +0100
+++ new/databases-0.5.3/docs/connections_and_transactions.md 2021-10-10
12:42:34.000000000 +0200
@@ -59,19 +59,27 @@
## Transactions
-Transactions are managed by async context blocks:
+Transactions are managed by async context blocks.
+
+A transaction can be acquired from the database connection pool:
```python
async with database.transaction():
...
```
+It can also be acquired from a specific database connection:
+
+```python
+async with database.connection() as connection:
+ async with connection.transaction():
+ ...
+```
For a lower-level transaction API:
```python
transaction = await database.transaction()
try:
- await transaction.start()
...
except:
await transaction.rollback()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/docs/index.md
new/databases-0.5.3/docs/index.md
--- old/databases-0.4.3/docs/index.md 2021-03-26 16:17:54.000000000 +0100
+++ new/databases-0.5.3/docs/index.md 2021-10-10 12:42:34.000000000 +0200
@@ -37,7 +37,8 @@
$ pip install databases[sqlite]
```
-Driver support is providing using one of [asyncpg][asyncpg],
[aiomysql][aiomysql], or [aiosqlite][aiosqlite].
+Driver support is provided using one of [asyncpg][asyncpg],
[aiomysql][aiomysql], or [aiosqlite][aiosqlite].
+Note that if you are using any synchronous SQLAlchemy functions such as
`engine.create_all()` or [alembic][alembic] migrations then you still have to
install a synchronous DB driver: [psycopg2][psycopg2] for PostgreSQL and
[pymysql][pymysql] for MySQL.
---
@@ -91,6 +92,8 @@
[sqlalchemy-core]: https://docs.sqlalchemy.org/en/latest/core/
[sqlalchemy-core-tutorial]:
https://docs.sqlalchemy.org/en/latest/core/tutorial.html
[alembic]: https://alembic.sqlalchemy.org/en/latest/
+[psycopg2]: https://www.psycopg.org/
+[pymysql]: https://github.com/PyMySQL/PyMySQL
[asyncpg]: https://github.com/MagicStack/asyncpg
[aiomysql]: https://github.com/aio-libs/aiomysql
[aiosqlite]: https://github.com/jreese/aiosqlite
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/mkdocs.yml
new/databases-0.5.3/mkdocs.yml
--- old/databases-0.4.3/mkdocs.yml 2021-03-26 16:17:54.000000000 +0100
+++ new/databases-0.5.3/mkdocs.yml 2021-10-10 12:42:34.000000000 +0200
@@ -15,5 +15,7 @@
- Tests & Migrations: 'tests_and_migrations.md'
markdown_extensions:
+ - mkautodoc
- admonition
- - codehilite
+ - pymdownx.highlight
+ - pymdownx.superfences
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/requirements.txt
new/databases-0.5.3/requirements.txt
--- old/databases-0.4.3/requirements.txt 2021-03-26 16:17:54.000000000
+0100
+++ new/databases-0.5.3/requirements.txt 2021-10-10 12:42:34.000000000
+0200
@@ -23,3 +23,12 @@
pytest-cov
starlette
requests
+
+# Documentation
+mkdocs
+mkdocs-material
+mkautodoc
+
+# Packaging
+twine
+wheel
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/scripts/README.md
new/databases-0.5.3/scripts/README.md
--- old/databases-0.4.3/scripts/README.md 2021-03-26 16:17:54.000000000
+0100
+++ new/databases-0.5.3/scripts/README.md 2021-10-10 12:42:34.000000000
+0200
@@ -1,9 +1,11 @@
# Development Scripts
+* `scripts/build` - Build package and documentation.
* `scripts/clean` - Delete any build artifacts.
+* `scripts/docs` - Run documentation server locally.
* `scripts/install` - Install dependencies in a virtual environment.
-* `scripts/test` - Run the test suite.
* `scripts/lint` - Run the code linting.
* `scripts/publish` - Publish the latest version to PyPI.
+* `scripts/test` - Run the test suite.
Styled after GitHub's ["Scripts to Rule Them
All"](https://github.com/github/scripts-to-rule-them-all).
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/scripts/build
new/databases-0.5.3/scripts/build
--- old/databases-0.4.3/scripts/build 1970-01-01 01:00:00.000000000 +0100
+++ new/databases-0.5.3/scripts/build 2021-10-10 12:42:34.000000000 +0200
@@ -0,0 +1,13 @@
+#!/bin/sh -e
+
+if [ -d 'venv' ] ; then
+ PREFIX="venv/bin/"
+else
+ PREFIX=""
+fi
+
+set -x
+
+${PREFIX}python setup.py sdist bdist_wheel
+${PREFIX}twine check dist/*
+${PREFIX}mkdocs build
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/scripts/docs
new/databases-0.5.3/scripts/docs
--- old/databases-0.4.3/scripts/docs 1970-01-01 01:00:00.000000000 +0100
+++ new/databases-0.5.3/scripts/docs 2021-10-10 12:42:34.000000000 +0200
@@ -0,0 +1,10 @@
+#!/bin/sh -e
+
+export PREFIX=""
+if [ -d 'venv' ] ; then
+ export PREFIX="venv/bin/"
+fi
+
+set -x
+
+${PREFIX}mkdocs serve
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/scripts/publish
new/databases-0.5.3/scripts/publish
--- old/databases-0.4.3/scripts/publish 2021-03-26 16:17:54.000000000 +0100
+++ new/databases-0.5.3/scripts/publish 2021-10-10 12:42:34.000000000 +0200
@@ -1,7 +1,6 @@
#!/bin/sh -e
VERSION_FILE="databases/__init__.py"
-PYTHONPATH=.
if [ -d 'venv' ] ; then
PREFIX="venv/bin/"
@@ -23,7 +22,5 @@
set -x
-${PREFIX}pip install twine wheel mkdocs mkdocs-material mkautodoc
-${PREFIX}python setup.py sdist bdist_wheel
${PREFIX}twine upload dist/*
${PREFIX}mkdocs gh-deploy --force
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/setup.py new/databases-0.5.3/setup.py
--- old/databases-0.4.3/setup.py 2021-03-26 16:17:54.000000000 +0100
+++ new/databases-0.5.3/setup.py 2021-10-10 12:42:34.000000000 +0200
@@ -48,7 +48,7 @@
packages=get_packages("databases"),
package_data={"databases": ["py.typed"]},
data_files=[("", ["LICENSE.md"])],
- install_requires=['sqlalchemy<1.4', 'aiocontextvars;python_version<"3.7"'],
+ install_requires=['sqlalchemy>=1.4,<1.5',
'aiocontextvars;python_version<"3.7"'],
extras_require={
"postgresql": ["asyncpg"],
"mysql": ["aiomysql"],
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/tests/test_database_url.py
new/databases-0.5.3/tests/test_database_url.py
--- old/databases-0.4.3/tests/test_database_url.py 2021-03-26
16:17:54.000000000 +0100
+++ new/databases-0.5.3/tests/test_database_url.py 2021-10-10
12:42:34.000000000 +0200
@@ -1,7 +1,9 @@
-from databases import DatabaseURL
from urllib.parse import quote
+
import pytest
+from databases import DatabaseURL
+
def test_database_url_repr():
u = DatabaseURL("postgresql://localhost/name")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/tests/test_databases.py
new/databases-0.5.3/tests/test_databases.py
--- old/databases-0.4.3/tests/test_databases.py 2021-03-26 16:17:54.000000000
+0100
+++ new/databases-0.5.3/tests/test_databases.py 2021-10-10 12:42:34.000000000
+0200
@@ -3,6 +3,8 @@
import decimal
import functools
import os
+import re
+from unittest.mock import MagicMock, patch
import pytest
import sqlalchemy
@@ -14,6 +16,11 @@
DATABASE_URLS = [url.strip() for url in
os.environ["TEST_DATABASE_URLS"].split(",")]
+class AsyncMock(MagicMock):
+ async def __call__(self, *args, **kwargs):
+ return super(AsyncMock, self).__call__(*args, **kwargs)
+
+
class MyEpochType(sqlalchemy.types.TypeDecorator):
impl = sqlalchemy.Integer
@@ -71,14 +78,18 @@
)
[email protected](autouse=True, scope="module")
[email protected](autouse=True, scope="function")
def create_test_database():
# Create test databases with tables creation
for url in DATABASE_URLS:
database_url = DatabaseURL(url)
- if database_url.scheme == "mysql":
+ if database_url.scheme in ["mysql", "mysql+aiomysql"]:
url = str(database_url.replace(driver="pymysql"))
- elif database_url.scheme == "postgresql+aiopg":
+ elif database_url.scheme in [
+ "postgresql+aiopg",
+ "sqlite+aiosqlite",
+ "postgresql+asyncpg",
+ ]:
url = str(database_url.replace(driver=None))
engine = sqlalchemy.create_engine(url)
metadata.create_all(engine)
@@ -89,9 +100,13 @@
# Drop test databases
for url in DATABASE_URLS:
database_url = DatabaseURL(url)
- if database_url.scheme == "mysql":
+ if database_url.scheme in ["mysql", "mysql+aiomysql"]:
url = str(database_url.replace(driver="pymysql"))
- elif database_url.scheme == "postgresql+aiopg":
+ elif database_url.scheme in [
+ "postgresql+aiopg",
+ "sqlite+aiosqlite",
+ "postgresql+asyncpg",
+ ]:
url = str(database_url.replace(driver=None))
engine = sqlalchemy.create_engine(url)
metadata.drop_all(engine)
@@ -268,6 +283,30 @@
@pytest.mark.parametrize("database_url", DATABASE_URLS)
@async_adapter
+async def test_queries_after_error(database_url):
+ """
+ Test that the basic `execute()` works after a previous error.
+ """
+
+ class DBException(Exception):
+ pass
+
+ async with Database(database_url) as database:
+ with patch.object(
+ database.connection()._connection,
+ "acquire",
+ new=AsyncMock(side_effect=DBException),
+ ):
+ with pytest.raises(DBException):
+ query = notes.select()
+ await database.fetch_all(query)
+
+ query = notes.select()
+ await database.fetch_all(query)
+
+
[email protected]("database_url", DATABASE_URLS)
+@async_adapter
async def test_results_support_mapping_interface(database_url):
"""
Casting results to a dict should work, since the interface defines them
@@ -336,8 +375,8 @@
query = "SELECT 1 AS id, 2 AS id"
row = await database.fetch_one(query=query)
- assert list(row.keys()) == ["id", "id"]
- assert list(row.values()) == [1, 2]
+ assert list(row._mapping.keys()) == ["id", "id"]
+ assert list(row._mapping.values()) == [1, 2]
@pytest.mark.parametrize("database_url", DATABASE_URLS)
@@ -447,9 +486,12 @@
database_url = DatabaseURL(database_url)
- if database_url.scheme != "postgresql":
+ if database_url.scheme not in ["postgresql", "postgresql+asyncpg"]:
pytest.skip("Test (currently) only supports asyncpg")
+ if database_url.scheme == "postgresql+asyncpg":
+ database_url = database_url.replace(driver=None)
+
def insert_independently():
engine = sqlalchemy.create_engine(str(database_url))
conn = engine.connect()
@@ -736,6 +778,14 @@
await database.disconnect()
assert not database.is_connected
+ # connect and disconnect idempotence
+ await database.connect()
+ await database.connect()
+ assert database.is_connected
+ await database.disconnect()
+ await database.disconnect()
+ assert not database.is_connected
+
@pytest.mark.parametrize("database_url", DATABASE_URLS)
@async_adapter
@@ -800,12 +850,16 @@
"""
async with Database(database_url) as database:
async with database.connection() as connection:
- async with database.transaction(force_rollback=True):
+ async with connection.transaction(force_rollback=True):
# Get the raw connection
raw_connection = connection.raw_connection
# Insert query
- if database.url.scheme in ["mysql", "postgresql+aiopg"]:
+ if database.url.scheme in [
+ "mysql",
+ "mysql+aiomysql",
+ "postgresql+aiopg",
+ ]:
insert_query = "INSERT INTO notes (text, completed) VALUES
(%s, %s)"
else:
insert_query = "INSERT INTO notes (text, completed) VALUES
($1, $2)"
@@ -813,18 +867,22 @@
# execute()
values = ("example1", True)
- if database.url.scheme in ["mysql", "postgresql+aiopg"]:
+ if database.url.scheme in [
+ "mysql",
+ "mysql+aiomysql",
+ "postgresql+aiopg",
+ ]:
cursor = await raw_connection.cursor()
await cursor.execute(insert_query, values)
- elif database.url.scheme == "postgresql":
+ elif database.url.scheme in ["postgresql",
"postgresql+asyncpg"]:
await raw_connection.execute(insert_query, *values)
- elif database.url.scheme == "sqlite":
+ elif database.url.scheme in ["sqlite", "sqlite+aiosqlite"]:
await raw_connection.execute(insert_query, values)
# execute_many()
values = [("example2", False), ("example3", True)]
- if database.url.scheme == "mysql":
+ if database.url.scheme in ["mysql", "mysql+aiomysql"]:
cursor = await raw_connection.cursor()
await cursor.executemany(insert_query, values)
elif database.url.scheme == "postgresql+aiopg":
@@ -839,13 +897,17 @@
select_query = "SELECT notes.id, notes.text, notes.completed
FROM notes"
# fetch_all()
- if database.url.scheme in ["mysql", "postgresql+aiopg"]:
+ if database.url.scheme in [
+ "mysql",
+ "mysql+aiomysql",
+ "postgresql+aiopg",
+ ]:
cursor = await raw_connection.cursor()
await cursor.execute(select_query)
results = await cursor.fetchall()
- elif database.url.scheme == "postgresql":
+ elif database.url.scheme in ["postgresql",
"postgresql+asyncpg"]:
results = await raw_connection.fetch(select_query)
- elif database.url.scheme == "sqlite":
+ elif database.url.scheme in ["sqlite", "sqlite+aiosqlite"]:
results = await
raw_connection.execute_fetchall(select_query)
assert len(results) == 3
@@ -858,7 +920,7 @@
assert results[2][2] == True
# fetch_one()
- if database.url.scheme == "postgresql":
+ if database.url.scheme in ["postgresql", "postgresql+asyncpg"]:
result = await raw_connection.fetchrow(select_query)
else:
cursor = await raw_connection.cursor()
@@ -981,7 +1043,7 @@
@async_adapter
async def test_column_names(database_url, select_query):
"""
- Test that column names are exposed correctly through `.keys()` on each row.
+ Test that column names are exposed correctly through `._mapping.keys()` on
each row.
"""
async with Database(database_url) as database:
async with database.transaction(force_rollback=True):
@@ -993,6 +1055,87 @@
results = await database.fetch_all(query=select_query)
assert len(results) == 1
- assert sorted(results[0].keys()) == ["completed", "id", "text"]
+ assert sorted(results[0]._mapping.keys()) == ["completed", "id",
"text"]
assert results[0]["text"] == "example1"
assert results[0]["completed"] == True
+
+
[email protected]("database_url", DATABASE_URLS)
+@async_adapter
+async def test_parallel_transactions(database_url):
+ """
+ Test parallel transaction execution.
+ """
+
+ async def test_task(db):
+ async with db.transaction():
+ await db.fetch_one("SELECT 1")
+
+ async with Database(database_url) as database:
+ await database.fetch_one("SELECT 1")
+
+ tasks = [test_task(database) for i in range(4)]
+ await asyncio.gather(*tasks)
+
+
[email protected]("database_url", DATABASE_URLS)
+@async_adapter
+async def test_posgres_interface(database_url):
+ """
+ Since SQLAlchemy 1.4, `Row.values()` is removed and `Row.keys()` is
deprecated.
+ Custom postgres interface mimics more or less this behaviour by
deprecating those
+ two methods
+ """
+ database_url = DatabaseURL(database_url)
+
+ if database_url.scheme not in ["postgresql", "postgresql+asyncpg"]:
+ pytest.skip("Test is only for asyncpg")
+
+ async with Database(database_url) as database:
+ async with database.transaction(force_rollback=True):
+ query = notes.insert()
+ values = {"text": "example1", "completed": True}
+ await database.execute(query, values)
+
+ query = notes.select()
+ result = await database.fetch_one(query=query)
+
+ with pytest.warns(
+ DeprecationWarning,
+ match=re.escape(
+ "The `Row.keys()` method is deprecated to mimic SQLAlchemy
behaviour, "
+ "use `Row._mapping.keys()` instead."
+ ),
+ ):
+ assert (
+ list(result.keys())
+ == [k for k in result]
+ == ["id", "text", "completed"]
+ )
+
+ with pytest.warns(
+ DeprecationWarning,
+ match=re.escape(
+ "The `Row.values()` method is deprecated to mimic
SQLAlchemy behaviour, "
+ "use `Row._mapping.values()` instead."
+ ),
+ ):
+ # avoid checking `id` at index 0 since it may change depending
on the launched tests
+ assert list(result.values())[1:] == ["example1", True]
+
+
[email protected]("database_url", DATABASE_URLS)
+@async_adapter
+async def test_postcompile_queries(database_url):
+ """
+ Since SQLAlchemy 1.4, IN operators needs to do render_postcompile
+ """
+ async with Database(database_url) as database:
+ query = notes.insert()
+ values = {"text": "example1", "completed": True}
+ await database.execute(query, values)
+
+ query = notes.select().where(notes.c.id.in_([2, 3]))
+ results = await database.fetch_all(query=query)
+
+ assert len(results) == 0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/databases-0.4.3/tests/test_integration.py
new/databases-0.5.3/tests/test_integration.py
--- old/databases-0.4.3/tests/test_integration.py 2021-03-26
16:17:54.000000000 +0100
+++ new/databases-0.5.3/tests/test_integration.py 2021-10-10
12:42:34.000000000 +0200
@@ -28,9 +28,13 @@
# Create test databases
for url in DATABASE_URLS:
database_url = DatabaseURL(url)
- if database_url.scheme == "mysql":
+ if database_url.scheme in ["mysql", "mysql+aiomysql"]:
url = str(database_url.replace(driver="pymysql"))
- elif database_url.scheme == "postgresql+aiopg":
+ elif database_url.scheme in [
+ "postgresql+aiopg",
+ "sqlite+aiosqlite",
+ "postgresql+asyncpg",
+ ]:
url = str(database_url.replace(driver=None))
engine = sqlalchemy.create_engine(url)
metadata.create_all(engine)
@@ -41,9 +45,13 @@
# Drop test databases
for url in DATABASE_URLS:
database_url = DatabaseURL(url)
- if database_url.scheme == "mysql":
+ if database_url.scheme in ["mysql", "mysql+aiomysql"]:
url = str(database_url.replace(driver="pymysql"))
- elif database_url.scheme == "postgresql+aiopg":
+ elif database_url.scheme in [
+ "postgresql+aiopg",
+ "sqlite+aiosqlite",
+ "postgresql+asyncpg",
+ ]:
url = str(database_url.replace(driver=None))
engine = sqlalchemy.create_engine(url)
metadata.drop_all(engine)