diff --git a/.flake8 b/.flake8
index 9697fc96..3a8b87a8 100644
--- a/.flake8
+++ b/.flake8
@@ -1,3 +1,3 @@
[flake8]
ignore = E402,E731,W503,W504,E252
-exclude = .git,__pycache__,build,dist,.eggs,.github,.local
+exclude = .git,__pycache__,build,dist,.eggs,.github,.local,.venv
diff --git a/.github/workflows/install-postgres.sh b/.github/workflows/install-postgres.sh
index c3f27186..4ffbb4d6 100755
--- a/.github/workflows/install-postgres.sh
+++ b/.github/workflows/install-postgres.sh
@@ -27,11 +27,16 @@ if [ "${ID}" = "debian" -o "${ID}" = "ubuntu" ]; then
apt-get install -y --no-install-recommends \
"postgresql-${PGVERSION}" \
"postgresql-contrib-${PGVERSION}"
+elif [ "${ID}" = "almalinux" ]; then
+ yum install -y \
+ "postgresql-server" \
+ "postgresql-devel" \
+ "postgresql-contrib"
elif [ "${ID}" = "centos" ]; then
- el="EL-${VERSION_ID}-$(arch)"
+ el="EL-${VERSION_ID%.*}-$(arch)"
baseurl="https://download.postgresql.org/pub/repos/yum/reporpms"
yum install -y "${baseurl}/${el}/pgdg-redhat-repo-latest.noarch.rpm"
- if [ ${VERSION_ID} -ge 8 ]; then
+ if [ ${VERSION_ID%.*} -ge 8 ]; then
dnf -qy module disable postgresql
fi
yum install -y \
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index e984a351..1eba94a5 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -37,7 +37,7 @@ jobs:
mkdir -p dist/
echo "${VERSION}" > dist/VERSION
- - uses: actions/upload-artifact@v2
+ - uses: actions/upload-artifact@v3
with:
name: dist
path: dist/
@@ -50,37 +50,56 @@ jobs:
PIP_DISABLE_PIP_VERSION_CHECK: 1
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
fetch-depth: 50
submodules: true
- name: Set up Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v4
+ with:
+ python-version: "3.x"
- name: Build source distribution
run: |
pip install -U setuptools wheel pip
python setup.py sdist
- - uses: actions/upload-artifact@v2
+ - uses: actions/upload-artifact@v3
with:
name: dist
path: dist/*.tar.*
- build-wheels:
+ build-wheels-matrix:
needs: validate-release-request
+ runs-on: ubuntu-latest
+ outputs:
+ include: ${{ steps.set-matrix.outputs.include }}
+ steps:
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
+ with:
+ python-version: "3.x"
+ - run: pip install cibuildwheel==2.10.2
+ - id: set-matrix
+ run: |
+ MATRIX_INCLUDE=$(
+ {
+ cibuildwheel --print-build-identifiers --platform linux --arch x86_64,aarch64 | grep cp | jq -nRc '{"only": inputs, "os": "ubuntu-latest"}' \
+ && cibuildwheel --print-build-identifiers --platform macos --arch x86_64,arm64 | grep cp | jq -nRc '{"only": inputs, "os": "macos-latest"}' \
+ && cibuildwheel --print-build-identifiers --platform windows --arch x86,AMD64 | grep cp | jq -nRc '{"only": inputs, "os": "windows-latest"}'
+ } | jq -sc
+ )
+ echo "include=$MATRIX_INCLUDE" >> $GITHUB_OUTPUT
+
+ build-wheels:
+ needs: build-wheels-matrix
runs-on: ${{ matrix.os }}
+ name: Build ${{ matrix.only }}
+
strategy:
matrix:
- os: [ubuntu-latest, macos-latest, windows-latest]
- cibw_python: ["cp36-*", "cp37-*", "cp38-*", "cp39-*", "cp310-*"]
- cibw_arch: ["auto64", "auto32"]
- exclude:
- - os: macos-latest
- cibw_arch: "auto32"
- - os: ubuntu-latest
- cibw_arch: "auto32"
+ include: ${{ fromJson(needs.build-wheels-matrix.outputs.include) }}
defaults:
run:
@@ -90,24 +109,30 @@ jobs:
PIP_DISABLE_PIP_VERSION_CHECK: 1
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
fetch-depth: 50
submodules: true
- - uses: pypa/cibuildwheel@v2.8.0
+ - name: Set up QEMU
+ if: runner.os == 'Linux'
+ uses: docker/setup-qemu-action@v2
+
+ - uses: pypa/cibuildwheel@v2.10.2
+ with:
+ only: ${{ matrix.only }}
env:
CIBW_BUILD_VERBOSITY: 1
- CIBW_BUILD: ${{ matrix.cibw_python }}
- CIBW_ARCHS: ${{ matrix.cibw_arch }}
+ CIBW_MANYLINUX_X86_64_IMAGE: manylinux_2_28
+ CIBW_MANYLINUX_AARCH64_IMAGE: manylinux_2_28
- - uses: actions/upload-artifact@v2
+ - uses: actions/upload-artifact@v3
with:
name: dist
path: wheelhouse/*.whl
publish-docs:
- needs: validate-release-request
+ needs: [build-sdist, build-wheels]
runs-on: ubuntu-latest
env:
@@ -115,15 +140,15 @@ jobs:
steps:
- name: Checkout source
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
with:
fetch-depth: 5
submodules: true
- name: Set up Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v4
with:
- python-version: 3.8
+ python-version: "3.x"
- name: Build docs
run: |
@@ -131,7 +156,7 @@ jobs:
make htmldocs
- name: Checkout gh-pages
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
with:
fetch-depth: 5
ref: gh-pages
@@ -157,12 +182,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
fetch-depth: 5
submodules: false
- - uses: actions/download-artifact@v2
+ - uses: actions/download-artifact@v3
with:
name: dist
path: dist/
@@ -171,7 +196,7 @@ jobs:
id: relver
run: |
set -e
- echo ::set-output name=version::$(cat dist/VERSION)
+ echo "version=$(cat dist/VERSION)" >> $GITHUB_OUTPUT
rm dist/VERSION
- name: Merge and tag the PR
@@ -197,7 +222,7 @@ jobs:
ls -al dist/
- name: Upload to PyPI
- uses: pypa/gh-action-pypi-publish@master
+ uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_TOKEN }}
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index d61573db..a120e9a6 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -17,13 +17,10 @@ jobs:
# job.
strategy:
matrix:
- python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
+ python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
os: [ubuntu-latest, macos-latest, windows-latest]
loop: [asyncio, uvloop]
exclude:
- # uvloop does not support Python 3.6
- - loop: uvloop
- python-version: "3.6"
# uvloop does not support windows
- loop: uvloop
os: windows-latest
@@ -38,7 +35,7 @@ jobs:
PIP_DISABLE_PIP_VERSION_CHECK: 1
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
fetch-depth: 50
submodules: true
@@ -54,7 +51,7 @@ jobs:
__version__\s*=\s*(?:['"])([[:PEP440:]])(?:['"])
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v4
if: steps.release.outputs.version == 0
with:
python-version: ${{ matrix.python-version }}
@@ -79,7 +76,7 @@ jobs:
test-postgres:
strategy:
matrix:
- postgres-version: ["9.5", "9.6", "10", "11", "12", "13", "14"]
+ postgres-version: ["9.5", "9.6", "10", "11", "12", "13", "14", "15"]
runs-on: ubuntu-latest
@@ -87,7 +84,7 @@ jobs:
PIP_DISABLE_PIP_VERSION_CHECK: 1
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
fetch-depth: 50
submodules: true
@@ -114,8 +111,10 @@ jobs:
>> "${GITHUB_ENV}"
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v4
if: steps.release.outputs.version == 0
+ with:
+ python-version: "3.x"
- name: Install Python Deps
if: steps.release.outputs.version == 0
diff --git a/README.rst b/README.rst
index 2ed14726..e5212156 100644
--- a/README.rst
+++ b/README.rst
@@ -13,8 +13,8 @@ of PostgreSQL server binary protocol for use with Python's ``asyncio``
framework. You can read more about asyncpg in an introductory
`blog post `_.
-asyncpg requires Python 3.6 or later and is supported for PostgreSQL
-versions 9.5 to 14. Older PostgreSQL versions or other databases implementing
+asyncpg requires Python 3.7 or later and is supported for PostgreSQL
+versions 9.5 to 15. Older PostgreSQL versions or other databases implementing
the PostgreSQL protocol *may* work, but are not being actively tested.
diff --git a/asyncpg/_testbase/__init__.py b/asyncpg/_testbase/__init__.py
index 3dd8a314..7aca834f 100644
--- a/asyncpg/_testbase/__init__.py
+++ b/asyncpg/_testbase/__init__.py
@@ -438,6 +438,7 @@ def tearDown(self):
class HotStandbyTestCase(ClusterTestCase):
+
@classmethod
def setup_cluster(cls):
cls.master_cluster = cls.new_cluster(pg_cluster.TempCluster)
diff --git a/asyncpg/_version.py b/asyncpg/_version.py
index 7e897c90..693e3bed 100644
--- a/asyncpg/_version.py
+++ b/asyncpg/_version.py
@@ -10,4 +10,4 @@
# supported platforms, publish the packages on PyPI, merge the PR
# to the target branch, create a Git tag pointing to the commit.
-__version__ = '0.26.0'
+__version__ = '0.28.0.dev0'
diff --git a/asyncpg/cluster.py b/asyncpg/cluster.py
index 0999e41c..4467cc2a 100644
--- a/asyncpg/cluster.py
+++ b/asyncpg/cluster.py
@@ -626,7 +626,7 @@ def init(self, **settings):
'pg_basebackup init exited with status {:d}:\n{}'.format(
process.returncode, output.decode()))
- if self._pg_version <= (11, 0):
+ if self._pg_version < (12, 0):
with open(os.path.join(self._data_dir, 'recovery.conf'), 'w') as f:
f.write(textwrap.dedent("""\
standby_mode = 'on'
diff --git a/asyncpg/compat.py b/asyncpg/compat.py
index 348b8caa..b9b13fa5 100644
--- a/asyncpg/compat.py
+++ b/asyncpg/compat.py
@@ -8,10 +8,9 @@
import asyncio
import pathlib
import platform
-import sys
+import typing
-PY_37 = sys.version_info >= (3, 7)
SYSTEM = platform.uname().system
@@ -20,7 +19,7 @@
CSIDL_APPDATA = 0x001a
- def get_pg_home_directory() -> pathlib.Path:
+ def get_pg_home_directory() -> typing.Optional[pathlib.Path]:
# We cannot simply use expanduser() as that returns the user's
# home directory, whereas Postgres stores its config in
# %AppData% on Windows.
@@ -32,16 +31,11 @@ def get_pg_home_directory() -> pathlib.Path:
return pathlib.Path(buf.value) / 'postgresql'
else:
- def get_pg_home_directory() -> pathlib.Path:
- return pathlib.Path.home()
-
-
-if PY_37:
- def current_asyncio_task(loop):
- return asyncio.current_task(loop)
-else:
- def current_asyncio_task(loop):
- return asyncio.Task.current_task(loop)
+ def get_pg_home_directory() -> typing.Optional[pathlib.Path]:
+ try:
+ return pathlib.Path.home()
+ except (RuntimeError, KeyError):
+ return None
async def wait_closed(stream):
diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py
index a51eb789..8b29c0fc 100644
--- a/asyncpg/connect_utils.py
+++ b/asyncpg/connect_utils.py
@@ -57,7 +57,7 @@ def parse(cls, sslmode):
'direct_tls',
'connect_timeout',
'server_settings',
- 'target_session_attribute',
+ 'target_session_attrs',
])
@@ -239,10 +239,6 @@ def _parse_hostlist(hostlist, port, *, unquote=False):
def _parse_tls_version(tls_version):
- if not hasattr(ssl_module, 'TLSVersion'):
- raise ValueError(
- "TLSVersion is not supported in this version of Python"
- )
if tls_version.startswith('SSL'):
raise ValueError(
f"Unsupported TLS version: {tls_version}"
@@ -255,14 +251,19 @@ def _parse_tls_version(tls_version):
)
-def _dot_postgresql_path(filename) -> pathlib.Path:
- return (pathlib.Path.home() / '.postgresql' / filename).resolve()
+def _dot_postgresql_path(filename) -> typing.Optional[pathlib.Path]:
+ try:
+ homedir = pathlib.Path.home()
+ except (RuntimeError, KeyError):
+ return None
+
+ return (homedir / '.postgresql' / filename).resolve()
def _parse_connect_dsn_and_args(*, dsn, host, port, user,
password, passfile, database, ssl,
direct_tls, connect_timeout, server_settings,
- target_session_attribute):
+ target_session_attrs):
# `auth_hosts` is the version of host information for the purposes
# of reading the pgpass file.
auth_hosts = None
@@ -508,11 +509,16 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user,
ssl.load_verify_locations(cafile=sslrootcert)
ssl.verify_mode = ssl_module.CERT_REQUIRED
else:
- sslrootcert = _dot_postgresql_path('root.crt')
try:
+ sslrootcert = _dot_postgresql_path('root.crt')
+ assert sslrootcert is not None
ssl.load_verify_locations(cafile=sslrootcert)
- except FileNotFoundError:
+ except (AssertionError, FileNotFoundError):
if sslmode > SSLMode.require:
+ if sslrootcert is None:
+ raise RuntimeError(
+ 'Cannot determine home directory'
+ )
raise ValueError(
f'root certificate file "{sslrootcert}" does '
f'not exist\nEither provide the file or '
@@ -533,18 +539,20 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user,
ssl.verify_flags |= ssl_module.VERIFY_CRL_CHECK_CHAIN
else:
sslcrl = _dot_postgresql_path('root.crl')
- try:
- ssl.load_verify_locations(cafile=sslcrl)
- except FileNotFoundError:
- pass
- else:
- ssl.verify_flags |= ssl_module.VERIFY_CRL_CHECK_CHAIN
+ if sslcrl is not None:
+ try:
+ ssl.load_verify_locations(cafile=sslcrl)
+ except FileNotFoundError:
+ pass
+ else:
+ ssl.verify_flags |= \
+ ssl_module.VERIFY_CRL_CHECK_CHAIN
if sslkey is None:
sslkey = os.getenv('PGSSLKEY')
if not sslkey:
sslkey = _dot_postgresql_path('postgresql.key')
- if not sslkey.exists():
+ if sslkey is not None and not sslkey.exists():
sslkey = None
if not sslpassword:
sslpassword = ''
@@ -556,12 +564,15 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user,
)
else:
sslcert = _dot_postgresql_path('postgresql.crt')
- try:
- ssl.load_cert_chain(
- sslcert, keyfile=sslkey, password=lambda: sslpassword
- )
- except FileNotFoundError:
- pass
+ if sslcert is not None:
+ try:
+ ssl.load_cert_chain(
+ sslcert,
+ keyfile=sslkey,
+ password=lambda: sslpassword
+ )
+ except FileNotFoundError:
+ pass
# OpenSSL 1.1.1 keylog file, copied from create_default_context()
if hasattr(ssl, 'keylog_filename'):
@@ -576,11 +587,7 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user,
ssl_min_protocol_version
)
else:
- try:
- ssl.minimum_version = _parse_tls_version('TLSv1.2')
- except ValueError:
- # Python 3.6 does not have ssl.TLSVersion
- pass
+ ssl.minimum_version = _parse_tls_version('TLSv1.2')
if ssl_max_protocol_version is None:
ssl_max_protocol_version = os.getenv('PGSSLMAXPROTOCOLVERSION')
@@ -603,11 +610,28 @@ def _parse_connect_dsn_and_args(*, dsn, host, port, user,
'server_settings is expected to be None or '
'a Dict[str, str]')
+ if target_session_attrs is None:
+
+ target_session_attrs = os.getenv(
+ "PGTARGETSESSIONATTRS", SessionAttribute.any
+ )
+ try:
+
+ target_session_attrs = SessionAttribute(target_session_attrs)
+ except ValueError as exc:
+ raise exceptions.InterfaceError(
+ "target_session_attrs is expected to be one of "
+ "{!r}"
+ ", got {!r}".format(
+ SessionAttribute.__members__.values, target_session_attrs
+ )
+ ) from exc
+
params = _ConnectionParameters(
user=user, password=password, database=database, ssl=ssl,
sslmode=sslmode, direct_tls=direct_tls,
connect_timeout=connect_timeout, server_settings=server_settings,
- target_session_attribute=target_session_attribute)
+ target_session_attrs=target_session_attrs)
return addrs, params
@@ -618,7 +642,7 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile,
max_cached_statement_lifetime,
max_cacheable_statement_size,
ssl, direct_tls, server_settings,
- target_session_attribute):
+ target_session_attrs):
local_vars = locals()
for var_name in {'max_cacheable_statement_size',
'max_cached_statement_lifetime',
@@ -647,7 +671,7 @@ def _parse_connect_arguments(*, dsn, host, port, user, password, passfile,
password=password, passfile=passfile, ssl=ssl,
direct_tls=direct_tls, database=database,
connect_timeout=timeout, server_settings=server_settings,
- target_session_attribute=target_session_attribute)
+ target_session_attrs=target_session_attrs)
config = _ClientConfiguration(
command_timeout=command_timeout,
@@ -885,12 +909,16 @@ class SessionAttribute(str, enum.Enum):
primary = 'primary'
standby = 'standby'
prefer_standby = 'prefer-standby'
+ read_write = "read-write"
+ read_only = "read-only"
def _accept_in_hot_standby(should_be_in_hot_standby: bool):
"""
If the server didn't report "in_hot_standby" at startup, we must determine
the state by checking "SELECT pg_catalog.pg_is_in_recovery()".
+ If the server allows a connection and states it is in recovery it must
+ be a replica/standby server.
"""
async def can_be_used(connection):
settings = connection.get_settings()
@@ -901,12 +929,26 @@ async def can_be_used(connection):
is_in_hot_standby = await connection.fetchval(
"SELECT pg_catalog.pg_is_in_recovery()"
)
-
return is_in_hot_standby == should_be_in_hot_standby
return can_be_used
+def _accept_read_only(should_be_read_only: bool):
+ """
+ Verify the server has not set default_transaction_read_only=True
+ """
+ async def can_be_used(connection):
+ settings = connection.get_settings()
+ is_readonly = getattr(settings, 'default_transaction_read_only', 'off')
+
+ if is_readonly == "on":
+ return should_be_read_only
+
+ return await _accept_in_hot_standby(should_be_read_only)(connection)
+ return can_be_used
+
+
async def _accept_any(_):
return True
@@ -916,6 +958,8 @@ async def _accept_any(_):
SessionAttribute.primary: _accept_in_hot_standby(False),
SessionAttribute.standby: _accept_in_hot_standby(True),
SessionAttribute.prefer_standby: _accept_in_hot_standby(True),
+ SessionAttribute.read_write: _accept_read_only(False),
+ SessionAttribute.read_only: _accept_read_only(True),
}
@@ -929,7 +973,7 @@ async def _connect(*, loop, timeout, connection_class, record_class, **kwargs):
loop = asyncio.get_event_loop()
addrs, params, config = _parse_connect_arguments(timeout=timeout, **kwargs)
- target_attr = params.target_session_attribute
+ target_attr = params.target_session_attrs
candidates = []
chosen_connection = None
diff --git a/asyncpg/connection.py b/asyncpg/connection.py
index 6797c54e..432fcef6 100644
--- a/asyncpg/connection.py
+++ b/asyncpg/connection.py
@@ -20,7 +20,6 @@
import warnings
import weakref
-from . import compat
from . import connect_utils
from . import cursor
from . import exceptions
@@ -30,7 +29,6 @@
from . import serverversion
from . import transaction
from . import utils
-from .connect_utils import SessionAttribute
class ConnectionMeta(type):
@@ -1418,6 +1416,7 @@ def _mark_stmts_as_closed(self):
def _maybe_gc_stmt(self, stmt):
if (
stmt.refs == 0
+ and stmt.name
and not self._stmt_cache.has(
(stmt.query, stmt.record_class, stmt.ignore_custom_codec)
)
@@ -1469,7 +1468,7 @@ async def _cancel(self, waiter):
waiter.set_exception(ex)
finally:
self._cancellations.discard(
- compat.current_asyncio_task(self._loop))
+ asyncio.current_task(self._loop))
if not waiter.done():
waiter.set_result(None)
@@ -1794,7 +1793,7 @@ async def connect(dsn=None, *,
connection_class=Connection,
record_class=protocol.Record,
server_settings=None,
- target_session_attribute=SessionAttribute.any):
+ target_session_attrs=None):
r"""A coroutine to establish a connection to a PostgreSQL server.
The connection parameters may be specified either as a connection
@@ -2005,16 +2004,22 @@ async def connect(dsn=None, *,
this connection object. Must be a subclass of
:class:`~asyncpg.Record`.
- :param SessionAttribute target_session_attribute:
+ :param SessionAttribute target_session_attrs:
If specified, check that the host has the correct attribute.
Can be one of:
"any": the first successfully connected host
"primary": the host must NOT be in hot standby mode
"standby": the host must be in hot standby mode
+ "read-write": the host must allow writes
+ "read-only": the host most NOT allow writes
"prefer-standby": first try to find a standby host, but if
none of the listed hosts is a standby server,
return any of them.
+ If not specified will try to use PGTARGETSESSIONATTRS
+ from the environment.
+ Defaults to "any" if no value is set.
+
:return: A :class:`~asyncpg.connection.Connection` instance.
Example:
@@ -2099,15 +2104,6 @@ async def connect(dsn=None, *,
if record_class is not protocol.Record:
_check_record_class(record_class)
- try:
- target_session_attribute = SessionAttribute(target_session_attribute)
- except ValueError as exc:
- raise exceptions.InterfaceError(
- "target_session_attribute is expected to be one of "
- "'any', 'primary', 'standby' or 'prefer-standby'"
- ", got {!r}".format(target_session_attribute)
- ) from exc
-
if loop is None:
loop = asyncio.get_event_loop()
@@ -2130,7 +2126,7 @@ async def connect(dsn=None, *,
statement_cache_size=statement_cache_size,
max_cached_statement_lifetime=max_cached_statement_lifetime,
max_cacheable_statement_size=max_cacheable_statement_size,
- target_session_attribute=target_session_attribute
+ target_session_attrs=target_session_attrs
)
diff --git a/asyncpg/pool.py b/asyncpg/pool.py
index 14e4be7e..eaf501f4 100644
--- a/asyncpg/pool.py
+++ b/asyncpg/pool.py
@@ -43,10 +43,6 @@ def __new__(mcls, name, bases, dct, *, wrap=False):
return super().__new__(mcls, name, bases, dct)
- def __init__(cls, name, bases, dct, *, wrap=False):
- # Needed for Python 3.5 to handle `wrap` class keyword argument.
- super().__init__(name, bases, dct)
-
@staticmethod
def _wrap_connection_method(meth_name):
def call_con_method(self, *args, **kwargs):
@@ -450,6 +446,13 @@ async def _initialize(self):
await asyncio.gather(*connect_tasks)
+ def is_closing(self):
+ """Return ``True`` if the pool is closing or is closed.
+
+ .. versionadded:: 0.28.0
+ """
+ return self._closed or self._closing
+
def get_size(self):
"""Return the current number of connections in this pool.
diff --git a/asyncpg/protocol/record/recordobj.c b/asyncpg/protocol/record/recordobj.c
index 4bf34c8a..c0049217 100644
--- a/asyncpg/protocol/record/recordobj.c
+++ b/asyncpg/protocol/record/recordobj.c
@@ -451,16 +451,31 @@ record_subscript(ApgRecordObject* o, PyObject* item)
}
+static const char *
+get_typename(PyTypeObject *type)
+{
+ assert(type->tp_name != NULL);
+ const char *s = strrchr(type->tp_name, '.');
+ if (s == NULL) {
+ s = type->tp_name;
+ }
+ else {
+ s++;
+ }
+ return s;
+}
+
+
static PyObject *
record_repr(ApgRecordObject *v)
{
Py_ssize_t i, n;
- PyObject *keys_iter;
+ PyObject *keys_iter, *type_prefix;
_PyUnicodeWriter writer;
n = Py_SIZE(v);
if (n == 0) {
- return PyUnicode_FromString("");
+ return PyUnicode_FromFormat("<%s>", get_typename(Py_TYPE(v)));
}
keys_iter = PyObject_GetIter(v->desc->keys);
@@ -471,16 +486,22 @@ record_repr(ApgRecordObject *v)
i = Py_ReprEnter((PyObject *)v);
if (i != 0) {
Py_DECREF(keys_iter);
- return i > 0 ? PyUnicode_FromString("") : NULL;
+ if (i > 0) {
+ return PyUnicode_FromFormat("<%s ...>", get_typename(Py_TYPE(v)));
+ }
+ return NULL;
}
_PyUnicodeWriter_Init(&writer);
writer.overallocate = 1;
writer.min_length = 12; /* */
- if (_PyUnicodeWriter_WriteASCIIString(&writer, "` or any of the Record-returning
methods.
+.. code-block:: python
+
+ class MyRecord(asyncpg.Record):
+ def __getattr__(self, name):
+ return self[name]
+
Why can't I use a :ref:`cursor ` outside of a transaction?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/docs/index.rst b/docs/index.rst
index 87c43aa8..93671abc 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -14,8 +14,8 @@ PostgreSQL and Python/asyncio. asyncpg is an efficient, clean implementation
of PostgreSQL server binary protocol for use with Python's ``asyncio``
framework.
-**asyncpg** requires Python 3.6 or later and is supported for PostgreSQL
-versions 9.5 to 14. Older PostgreSQL versions or other databases implementing
+**asyncpg** requires Python 3.7 or later and is supported for PostgreSQL
+versions 9.5 to 15. Older PostgreSQL versions or other databases implementing
the PostgreSQL protocol *may* work, but are not being actively tested.
Contents
diff --git a/setup.py b/setup.py
index 332bad3f..af0bcdc3 100644
--- a/setup.py
+++ b/setup.py
@@ -7,8 +7,8 @@
import sys
-if sys.version_info < (3, 6):
- raise RuntimeError('asyncpg requires Python 3.6 or greater')
+if sys.version_info < (3, 7):
+ raise RuntimeError('asyncpg requires Python 3.7 or greater')
import os
import os.path
@@ -29,12 +29,8 @@
# Minimal dependencies required to test asyncpg.
TEST_DEPENDENCIES = [
- # pycodestyle is a dependency of flake8, but it must be frozen because
- # their combination breaks too often
- # (example breakage: https://gitlab.com/pycqa/flake8/issues/427)
- 'pycodestyle~=2.7.0',
- 'flake8~=3.9.2',
- 'uvloop>=0.15.3; platform_system != "Windows" and python_version >= "3.7"',
+ 'flake8~=5.0.4',
+ 'uvloop>=0.15.3; platform_system != "Windows"',
]
# Dependencies required to build documentation.
@@ -259,7 +255,6 @@ def finalize_options(self):
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3 :: Only',
- 'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
@@ -268,7 +263,7 @@ def finalize_options(self):
'Topic :: Database :: Front-Ends',
],
platforms=['macOS', 'POSIX', 'Windows'],
- python_requires='>=3.6.0',
+ python_requires='>=3.7.0',
zip_safe=False,
author='MagicStack Inc',
author_email='hello@magic.io',
diff --git a/tests/test_connect.py b/tests/test_connect.py
index f905e3cd..e3cfb372 100644
--- a/tests/test_connect.py
+++ b/tests/test_connect.py
@@ -14,7 +14,6 @@
import shutil
import ssl
import stat
-import sys
import tempfile
import textwrap
import unittest
@@ -25,7 +24,7 @@
import asyncpg
from asyncpg import _testbase as tb
-from asyncpg import connection
+from asyncpg import connection as pg_connection
from asyncpg import connect_utils
from asyncpg import cluster as pg_cluster
from asyncpg import exceptions
@@ -72,6 +71,14 @@ def mock_dot_postgresql(*, ca=True, crl=False, client=False, protected=False):
yield
+@contextlib.contextmanager
+def mock_no_home_dir():
+ with unittest.mock.patch(
+ 'pathlib.Path.home', unittest.mock.Mock(side_effect=RuntimeError)
+ ):
+ yield
+
+
class TestSettings(tb.ConnectedTestCase):
async def test_get_settings_01(self):
@@ -385,7 +392,8 @@ class TestConnectParams(tb.TestCase):
'password': 'passw',
'database': 'testdb',
'ssl': True,
- 'sslmode': SSLMode.prefer})
+ 'sslmode': SSLMode.prefer,
+ 'target_session_attrs': 'any'})
},
{
@@ -407,7 +415,8 @@ class TestConnectParams(tb.TestCase):
'result': ([('host2', 456)], {
'user': 'user2',
'password': 'passw2',
- 'database': 'db2'})
+ 'database': 'db2',
+ 'target_session_attrs': 'any'})
},
{
@@ -435,7 +444,8 @@ class TestConnectParams(tb.TestCase):
'password': 'passw2',
'database': 'db2',
'sslmode': SSLMode.disable,
- 'ssl': False})
+ 'ssl': False,
+ 'target_session_attrs': 'any'})
},
{
@@ -456,7 +466,8 @@ class TestConnectParams(tb.TestCase):
'password': '123123',
'database': 'abcdef',
'ssl': True,
- 'sslmode': SSLMode.allow})
+ 'sslmode': SSLMode.allow,
+ 'target_session_attrs': 'any'})
},
{
@@ -484,7 +495,8 @@ class TestConnectParams(tb.TestCase):
'password': 'passw2',
'database': 'db2',
'sslmode': SSLMode.disable,
- 'ssl': False})
+ 'ssl': False,
+ 'target_session_attrs': 'any'})
},
{
@@ -505,7 +517,8 @@ class TestConnectParams(tb.TestCase):
'password': '123123',
'database': 'abcdef',
'ssl': True,
- 'sslmode': SSLMode.prefer})
+ 'sslmode': SSLMode.prefer,
+ 'target_session_attrs': 'any'})
},
{
@@ -514,7 +527,8 @@ class TestConnectParams(tb.TestCase):
'result': ([('localhost', 5555)], {
'user': 'user3',
'password': '123123',
- 'database': 'abcdef'})
+ 'database': 'abcdef',
+ 'target_session_attrs': 'any'})
},
{
@@ -523,6 +537,7 @@ class TestConnectParams(tb.TestCase):
'result': ([('host1', 5432), ('host2', 5432)], {
'database': 'db',
'user': 'user',
+ 'target_session_attrs': 'any',
})
},
@@ -532,6 +547,7 @@ class TestConnectParams(tb.TestCase):
'result': ([('host1', 1111), ('host2', 2222)], {
'database': 'db',
'user': 'user',
+ 'target_session_attrs': 'any',
})
},
@@ -541,6 +557,7 @@ class TestConnectParams(tb.TestCase):
'result': ([('2001:db8::1234%eth0', 5432), ('::1', 5432)], {
'database': 'db',
'user': 'user',
+ 'target_session_attrs': 'any',
})
},
@@ -550,6 +567,7 @@ class TestConnectParams(tb.TestCase):
'result': ([('2001:db8::1234', 1111), ('::1', 2222)], {
'database': 'db',
'user': 'user',
+ 'target_session_attrs': 'any',
})
},
@@ -559,6 +577,7 @@ class TestConnectParams(tb.TestCase):
'result': ([('2001:db8::1234', 5432), ('::1', 5432)], {
'database': 'db',
'user': 'user',
+ 'target_session_attrs': 'any',
})
},
@@ -573,6 +592,7 @@ class TestConnectParams(tb.TestCase):
'result': ([('host1', 1111), ('host2', 2222)], {
'database': 'db',
'user': 'foo',
+ 'target_session_attrs': 'any',
})
},
@@ -585,6 +605,7 @@ class TestConnectParams(tb.TestCase):
'result': ([('host1', 1111), ('host2', 2222)], {
'database': 'db',
'user': 'foo',
+ 'target_session_attrs': 'any',
})
},
@@ -598,6 +619,7 @@ class TestConnectParams(tb.TestCase):
'result': ([('host1', 5432), ('host2', 5432)], {
'database': 'db',
'user': 'foo',
+ 'target_session_attrs': 'any',
})
},
@@ -617,7 +639,8 @@ class TestConnectParams(tb.TestCase):
'password': 'ask',
'database': 'db',
'ssl': True,
- 'sslmode': SSLMode.require})
+ 'sslmode': SSLMode.require,
+ 'target_session_attrs': 'any'})
},
{
@@ -638,7 +661,8 @@ class TestConnectParams(tb.TestCase):
'password': 'ask',
'database': 'db',
'sslmode': SSLMode.verify_full,
- 'ssl': True})
+ 'ssl': True,
+ 'target_session_attrs': 'any'})
},
{
@@ -646,7 +670,8 @@ class TestConnectParams(tb.TestCase):
'dsn': 'postgresql:///dbname?host=/unix_sock/test&user=spam',
'result': ([os.path.join('/unix_sock/test', '.s.PGSQL.5432')], {
'user': 'spam',
- 'database': 'dbname'})
+ 'database': 'dbname',
+ 'target_session_attrs': 'any'})
},
{
@@ -658,6 +683,7 @@ class TestConnectParams(tb.TestCase):
'user': 'us@r',
'password': 'p@ss',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
},
@@ -671,6 +697,7 @@ class TestConnectParams(tb.TestCase):
'user': 'user',
'password': 'p',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
},
@@ -683,6 +710,7 @@ class TestConnectParams(tb.TestCase):
{
'user': 'us@r',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
},
@@ -710,7 +738,8 @@ class TestConnectParams(tb.TestCase):
'user': 'user',
'database': 'user',
'sslmode': SSLMode.disable,
- 'ssl': None
+ 'ssl': None,
+ 'target_session_attrs': 'any',
}
)
},
@@ -724,7 +753,8 @@ class TestConnectParams(tb.TestCase):
'.s.PGSQL.5432'
)], {
'user': 'spam',
- 'database': 'db'
+ 'database': 'db',
+ 'target_session_attrs': 'any',
}
)
},
@@ -745,6 +775,7 @@ class TestConnectParams(tb.TestCase):
'database': 'db',
'ssl': True,
'sslmode': SSLMode.prefer,
+ 'target_session_attrs': 'any',
}
)
},
@@ -789,7 +820,7 @@ def run_testcase(self, testcase):
database = testcase.get('database')
sslmode = testcase.get('ssl')
server_settings = testcase.get('server_settings')
- target_session_attribute = testcase.get('target_session_attribute')
+ target_session_attrs = testcase.get('target_session_attrs')
expected = testcase.get('result')
expected_error = testcase.get('error')
@@ -814,7 +845,7 @@ def run_testcase(self, testcase):
passfile=passfile, database=database, ssl=sslmode,
direct_tls=False, connect_timeout=None,
server_settings=server_settings,
- target_session_attribute=target_session_attribute)
+ target_session_attrs=target_session_attrs)
params = {
k: v for k, v in params._asdict().items()
@@ -875,7 +906,9 @@ def test_test_connect_params_run_testcase(self):
'host': 'abc',
'result': (
[('abc', 5432)],
- {'user': '__test__', 'database': '__test__'}
+ {'user': '__test__',
+ 'database': '__test__',
+ 'target_session_attrs': 'any'}
)
})
@@ -913,6 +946,7 @@ def test_connect_pgpass_regular(self):
'password': 'password from pgpass for user@abc',
'user': 'user',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
})
@@ -929,6 +963,7 @@ def test_connect_pgpass_regular(self):
'password': 'password from pgpass for user@abc',
'user': 'user',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
})
@@ -943,6 +978,7 @@ def test_connect_pgpass_regular(self):
'password': 'password from pgpass for user@abc',
'user': 'user',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
})
@@ -958,6 +994,7 @@ def test_connect_pgpass_regular(self):
'password': 'password from pgpass for localhost',
'user': 'user',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
})
@@ -975,6 +1012,7 @@ def test_connect_pgpass_regular(self):
'password': 'password from pgpass for localhost',
'user': 'user',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
})
@@ -992,6 +1030,7 @@ def test_connect_pgpass_regular(self):
'password': 'password from pgpass for cde:5433',
'user': 'user',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
})
@@ -1008,6 +1047,7 @@ def test_connect_pgpass_regular(self):
'password': 'password from pgpass for testuser',
'user': 'testuser',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
})
@@ -1024,6 +1064,7 @@ def test_connect_pgpass_regular(self):
'password': 'password from pgpass for testdb',
'user': 'user',
'database': 'testdb',
+ 'target_session_attrs': 'any',
}
)
})
@@ -1040,6 +1081,7 @@ def test_connect_pgpass_regular(self):
'password': 'password from pgpass with escapes',
'user': R'test\\',
'database': R'test\:db',
+ 'target_session_attrs': 'any',
}
)
})
@@ -1067,6 +1109,7 @@ def test_connect_pgpass_badness_mode(self):
{
'user': 'user',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
})
@@ -1087,6 +1130,7 @@ def test_connect_pgpass_badness_non_file(self):
{
'user': 'user',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
})
@@ -1103,6 +1147,7 @@ def test_connect_pgpass_nonexistent(self):
{
'user': 'user',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
})
@@ -1123,6 +1168,7 @@ def test_connect_pgpass_inaccessible_file(self):
{
'user': 'user',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
})
@@ -1145,6 +1191,7 @@ def test_connect_pgpass_inaccessible_directory(self):
{
'user': 'user',
'database': 'db',
+ 'target_session_attrs': 'any',
}
)
})
@@ -1167,7 +1214,7 @@ async def test_connect_args_validation(self):
class TestConnection(tb.ConnectedTestCase):
async def test_connection_isinstance(self):
- self.assertTrue(isinstance(self.con, connection.Connection))
+ self.assertTrue(isinstance(self.con, pg_connection.Connection))
self.assertTrue(isinstance(self.con, object))
self.assertFalse(isinstance(self.con, list))
@@ -1260,6 +1307,27 @@ async def test_connection_implicit_host(self):
user=conn_spec.get('user'))
await con.close()
+ @unittest.skipIf(os.environ.get('PGHOST'), 'unmanaged cluster')
+ async def test_connection_no_home_dir(self):
+ with mock_no_home_dir():
+ con = await self.connect(
+ dsn='postgresql://foo/',
+ user='postgres',
+ database='postgres',
+ host='localhost')
+ await con.fetchval('SELECT 42')
+ await con.close()
+
+ with self.assertRaisesRegex(
+ RuntimeError,
+ 'Cannot determine home directory'
+ ):
+ with mock_no_home_dir():
+ await self.connect(
+ host='localhost',
+ user='ssl_user',
+ ssl='verify-full')
+
class BaseTestSSLConnection(tb.ConnectedTestCase):
@classmethod
@@ -1290,6 +1358,7 @@ def setUp(self):
create_script = []
create_script.append('CREATE ROLE ssl_user WITH LOGIN;')
+ create_script.append('GRANT ALL ON SCHEMA public TO ssl_user;')
self._add_hba_entry()
@@ -1304,6 +1373,7 @@ def tearDown(self):
self.cluster.trust_local_connections()
drop_script = []
+ drop_script.append('REVOKE ALL ON SCHEMA public FROM ssl_user;')
drop_script.append('DROP ROLE ssl_user;')
drop_script = '\n'.join(drop_script)
self.loop.run_until_complete(self.con.execute(drop_script))
@@ -1464,14 +1534,10 @@ async def test_executemany_uvloop_ssl_issue_700(self):
)
finally:
try:
- await con.execute('DROP TABLE test_many')
+ await con.execute('DROP TABLE IF EXISTS test_many')
finally:
await con.close()
- @unittest.skipIf(
- sys.version_info < (3, 7),
- "Python < 3.7 doesn't have ssl.TLSVersion"
- )
async def test_tls_version(self):
if self.cluster.get_pg_version() < (12, 0):
self.skipTest("PostgreSQL < 12 cannot set ssl protocol version")
@@ -1502,13 +1568,14 @@ async def test_tls_version(self):
'&ssl_min_protocol_version=TLSv1.1'
'&ssl_max_protocol_version=TLSv1.1'
)
- with self.assertRaisesRegex(ssl.SSLError, 'no protocols'):
- await self.connect(
- dsn='postgresql://ssl_user@localhost/postgres'
- '?sslmode=require'
- '&ssl_min_protocol_version=TLSv1.2'
- '&ssl_max_protocol_version=TLSv1.1'
- )
+ if not ssl.OPENSSL_VERSION.startswith('LibreSSL'):
+ with self.assertRaisesRegex(ssl.SSLError, 'no protocols'):
+ await self.connect(
+ dsn='postgresql://ssl_user@localhost/postgres'
+ '?sslmode=require'
+ '&ssl_min_protocol_version=TLSv1.2'
+ '&ssl_max_protocol_version=TLSv1.1'
+ )
con = await self.connect(
dsn='postgresql://ssl_user@localhost/postgres'
'?sslmode=require'
@@ -1740,7 +1807,7 @@ async def test_no_explicit_close_with_debug(self):
r'unclosed connection') as rw:
await self._run_no_explicit_close_test()
- msg = rw.warning.args[0]
+ msg = " ".join(rw.warning.args)
self.assertIn(' created at:\n', msg)
self.assertIn('in test_no_explicit_close_with_debug', msg)
finally:
@@ -1750,23 +1817,34 @@ async def test_no_explicit_close_with_debug(self):
class TestConnectionAttributes(tb.HotStandbyTestCase):
async def _run_connection_test(
- self, connect, target_attribute, expected_host
+ self, connect, target_attribute, expected_port
):
- conn = await connect(target_session_attribute=target_attribute)
- self.assertTrue(_get_connected_host(conn).startswith(expected_host))
+ conn = await connect(target_session_attrs=target_attribute)
+ self.assertTrue(_get_connected_host(conn).endswith(expected_port))
await conn.close()
- async def test_target_server_attribute_host(self):
- master_host = self.master_cluster.get_connection_spec()['host']
- standby_host = self.standby_cluster.get_connection_spec()['host']
+ async def test_target_server_attribute_port(self):
+ master_port = self.master_cluster.get_connection_spec()['port']
+ standby_port = self.standby_cluster.get_connection_spec()['port']
tests = [
- (self.connect_primary, 'primary', master_host),
- (self.connect_standby, 'standby', standby_host),
+ (self.connect_primary, 'primary', master_port),
+ (self.connect_standby, 'standby', standby_port),
]
- for connect, target_attr, expected_host in tests:
+ for connect, target_attr, expected_port in tests:
await self._run_connection_test(
- connect, target_attr, expected_host
+ connect, target_attr, expected_port
+ )
+ if self.master_cluster.get_pg_version()[0] < 14:
+ self.skipTest("PostgreSQL<14 does not support these features")
+ tests = [
+ (self.connect_primary, 'read-write', master_port),
+ (self.connect_standby, 'read-only', standby_port),
+ ]
+
+ for connect, target_attr, expected_port in tests:
+ await self._run_connection_test(
+ connect, target_attr, expected_port
)
async def test_target_attribute_not_matched(self):
@@ -1777,13 +1855,24 @@ async def test_target_attribute_not_matched(self):
for connect, target_attr in tests:
with self.assertRaises(exceptions.TargetServerAttributeNotMatched):
- await connect(target_session_attribute=target_attr)
+ await connect(target_session_attrs=target_attr)
+
+ if self.master_cluster.get_pg_version()[0] < 14:
+ self.skipTest("PostgreSQL<14 does not support these features")
+ tests = [
+ (self.connect_standby, 'read-write'),
+ (self.connect_primary, 'read-only'),
+ ]
+
+ for connect, target_attr in tests:
+ with self.assertRaises(exceptions.TargetServerAttributeNotMatched):
+ await connect(target_session_attrs=target_attr)
async def test_prefer_standby_when_standby_is_up(self):
- con = await self.connect(target_session_attribute='prefer-standby')
- standby_host = self.standby_cluster.get_connection_spec()['host']
+ con = await self.connect(target_session_attrs='prefer-standby')
+ standby_port = self.standby_cluster.get_connection_spec()['port']
connected_host = _get_connected_host(con)
- self.assertTrue(connected_host.startswith(standby_host))
+ self.assertTrue(connected_host.endswith(standby_port))
await con.close()
async def test_prefer_standby_picks_master_when_standby_is_down(self):
@@ -1791,20 +1880,23 @@ async def test_prefer_standby_picks_master_when_standby_is_down(self):
connection_spec = {
'host': [
primary_spec['host'],
- '/var/test/a/cluster/that/does/not/exist',
+ 'unlocalhost',
],
- 'port': [primary_spec['port'], 12345],
+ 'port': [primary_spec['port'], 15345],
'database': primary_spec['database'],
'user': primary_spec['user'],
- 'target_session_attribute': 'prefer-standby'
+ 'target_session_attrs': 'prefer-standby'
}
- con = await connection.connect(**connection_spec, loop=self.loop)
- master_host = self.master_cluster.get_connection_spec()['host']
+ con = await self.connect(**connection_spec)
+ master_port = self.master_cluster.get_connection_spec()['port']
connected_host = _get_connected_host(con)
- self.assertTrue(connected_host.startswith(master_host))
+ self.assertTrue(connected_host.endswith(master_port))
await con.close()
def _get_connected_host(con):
- return con._transport.get_extra_info('peername')
+ peername = con._transport.get_extra_info('peername')
+ if isinstance(peername, tuple):
+ peername = "".join((str(s) for s in peername if s))
+ return peername
diff --git a/tests/test_introspection.py b/tests/test_introspection.py
index 56f1d7a3..78561dd0 100644
--- a/tests/test_introspection.py
+++ b/tests/test_introspection.py
@@ -12,7 +12,7 @@
from asyncpg import connection as apg_con
-MAX_RUNTIME = 0.1
+MAX_RUNTIME = 0.25
class SlowIntrospectionConnection(apg_con.Connection):
diff --git a/tests/test_pool.py b/tests/test_pool.py
index f96cd2a6..540efb08 100644
--- a/tests/test_pool.py
+++ b/tests/test_pool.py
@@ -10,7 +10,6 @@
import os
import platform
import random
-import sys
import textwrap
import time
import unittest
@@ -740,7 +739,17 @@ async def test_pool_size_and_capacity(self):
self.assertEqual(pool.get_size(), 3)
self.assertEqual(pool.get_idle_size(), 0)
- @unittest.skipIf(sys.version_info[:2] < (3, 6), 'no asyncgen support')
+ async def test_pool_closing(self):
+ async with self.create_pool() as pool:
+ self.assertFalse(pool.is_closing())
+ await pool.close()
+ self.assertTrue(pool.is_closing())
+
+ async with self.create_pool() as pool:
+ self.assertFalse(pool.is_closing())
+ pool.terminate()
+ self.assertTrue(pool.is_closing())
+
async def test_pool_handles_transaction_exit_in_asyncgen_1(self):
pool = await self.create_pool(database='postgres',
min_size=1, max_size=1)
@@ -762,7 +771,6 @@ class MyException(Exception):
async for _ in iterate(con): # noqa
raise MyException()
- @unittest.skipIf(sys.version_info[:2] < (3, 6), 'no asyncgen support')
async def test_pool_handles_transaction_exit_in_asyncgen_2(self):
pool = await self.create_pool(database='postgres',
min_size=1, max_size=1)
@@ -787,7 +795,6 @@ class MyException(Exception):
del iterator
- @unittest.skipIf(sys.version_info[:2] < (3, 6), 'no asyncgen support')
async def test_pool_handles_asyncgen_finalization(self):
pool = await self.create_pool(database='postgres',
min_size=1, max_size=1)
diff --git a/tests/test_record.py b/tests/test_record.py
index 5b85fb4d..ef9388bb 100644
--- a/tests/test_record.py
+++ b/tests/test_record.py
@@ -523,6 +523,7 @@ class MyRecord(asyncpg.Record):
record_class=MyRecord,
)
self.assertIsInstance(r, MyRecord)
+ self.assertEqual(repr(r), "")
self.assertEqual(list(r.items()), [('a', 1), ('b', '2')])
self.assertEqual(list(r.keys()), ['a', 'b'])