diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..988c986a --- /dev/null +++ b/.editorconfig @@ -0,0 +1,6 @@ +root = true + +[*.{py,c}] +indent_style = tab +indent_size = tab +tab_width = 4 diff --git a/README b/README deleted file mode 100644 index bfb53949..00000000 --- a/README +++ /dev/null @@ -1,65 +0,0 @@ -About -===== - -py-postgresql is a Python 3 package providing modules for working with PostgreSQL. -This includes a high-level driver, and many other tools that support a developer -working with PostgreSQL databases. - -For a high performance async interface, MagicStack's asyncpg -http://github.com/MagicStack/asyncpg should be considered. - -py-postgresql, currently, does not have direct support for high-level async -interfaces provided by recent versions of Python. Future versions may change this. - -Errata ------- - -.. warning:: - In v1.3, `postgresql.driver.dbapi20.connect` will now raise `ClientCannotConnectError` directly. - Exception traps around connect should still function, but the `__context__` attribute - on the error instance will be `None` in the usual failure case as it is no longer - incorrectly chained. Trapping `ClientCannotConnectError` ahead of `Error` should - allow both cases to co-exist in the event that data is being extracted from - the `ClientCannotConnectError`. - -Installation ------------- - -Installation *should* be as simple as:: - - $ python3 ./setup.py install - -More information about installation is available via:: - - python -m postgresql.documentation.admin - -Basic Driver Usage ------------------- - -Using PG-API:: - - >>> import postgresql - >>> db = postgresql.open('pq://user:password@host:port/database') - >>> get_table = db.prepare("select * from information_schema.tables where table_name = $1") - >>> for x in get_table("tables"): - >>> print(x) - >>> print(get_table.first("tables")) - -However, a DB-API 2.0 driver is provided as well: `postgresql.driver.dbapi20`. - -Further Information -------------------- - -Online documentation can be retrieved from: - - http://py-postgresql.readthedocs.io - -Or, you can read them in your pager: python -m postgresql.documentation.index - -For information about PostgreSQL: - - http://postgresql.org - -For information about Python: - - http://python.org diff --git a/README.md b/README.md new file mode 100644 index 00000000..db9fffc7 --- /dev/null +++ b/README.md @@ -0,0 +1,73 @@ +### About + +py-postgresql is a Python 3 package providing modules for working with PostgreSQL. +Primarily, a high-level driver for querying databases. + +While py-postgresql is still usable for many purposes, asyncpg and PostgREST are +likely more suitable for most applications: + +- https://github.com/MagicStack/asyncpg +- https://postgrest.org + +py-postgresql, currently, does not have direct support for high-level async +interfaces provided by recent versions of Python. Future versions may change this. + +- [Project Future](https://github.com/python-postgres/fe/issues/124) + +### Advisory + +In v2.0, many, potentially breaking, changes are planned. +If you have automated installations using PyPI, make sure that they specify a major version. + +- Support for older versions of PostgreSQL and Python will be removed. This will allow the driver +to defer version parsing fixing (https://github.com/python-postgres/fe/issues/109), and better prepare for future versions. +- The connection establishment strategy will be simplified to only performing one attempt. `sslmode` +parameter should be considered deprecated. v1.4 will provide a new security parameter implying `sslmode=require`. See (https://github.com/python-postgres/fe/issues/122) and (https://github.com/python-postgres/fe/issues/75). +- StoredProcedure will be removed. See (https://github.com/python-postgres/fe/issues/80). + +### Installation + +From [PyPI](https://PyPI.org) using `pip`: + +```bash +python3 -m pip install py-postgresql +``` + +From [GitHub](https://github.com) using a full clone: + +```bash +git clone https://github.com/python-postgres/fe.git +cd fe +python3 ./setup.py install +``` + +### Basic Usage + +```python +import postgresql +db = postgresql.open('pq://user:password@host:port/database') + +get_table = db.prepare("SELECT * from information_schema.tables WHERE table_name = $1") +print(get_table("tables")) + +# Streaming, in a transaction. +with db.xact(): + for x in get_table.rows("tables"): + print(x) +``` + +REPL with connection bound to `db` builtin: + +```bash +python3 -m postgresql.bin.pg_python -I 'pq://postgres@localhost:5423/postgres' +``` + +### Documentation + +- https://py-postgresql.readthedocs.io +- https://github.com/python-postgres/fe/issues?q=label%3Ahowto + +### Related + +- https://postgresql.org +- https://python.org diff --git a/postgresql/api.py b/postgresql/api.py index 1aa87966..858c3f2b 100644 --- a/postgresql/api.py +++ b/postgresql/api.py @@ -11,7 +11,7 @@ This module is used to define "PG-API". It creates a set of ABCs that makes up the basic interfaces used to work with a PostgreSQL server. """ -import collections +import collections.abc import abc from .python.element import Element @@ -125,7 +125,8 @@ class Result(Element): @abc.abstractmethod def close(self) -> None: """ - Close the Result handle. + Close the Result discarding any supporting resources and causing + future read operations to emit empty record sets. """ @property @@ -202,18 +203,12 @@ def statement(self) -> ("Statement", None): `postgresql.api.Database.cursor_from_id`. """ -class Chunks( - Result, - collections.Iterator, - collections.Iterable, -): +@collections.abc.Iterator.register +class Chunks(Result): pass -class Cursor( - Result, - collections.Iterator, - collections.Iterable, -): +@collections.abc.Iterator.register +class Cursor(Result): """ A `Cursor` object is an interface to a sequence of tuples(rows). A result set. Cursors publish a file-like interface for reading tuples from a cursor @@ -259,10 +254,7 @@ def direction(self) -> bool: """ @abc.abstractmethod - def read(self, - quantity : "Number of rows to read" = None, - direction : "Direction to fetch in, defaults to `self.direction`" = None, - ) -> ["Row"]: + def read(self, quantity = None, direction = None) -> ["Row"]: """ Read, fetch, the specified number of rows and return them in a list. If quantity is `None`, all records will be fetched. @@ -312,7 +304,7 @@ class Execution(metaclass = abc.ABCMeta): """ @abc.abstractmethod - def __call__(self, *parameters : "Positional Parameters") -> ["Row"]: + def __call__(self, *parameters) -> ["Row"]: """ Execute the prepared statement with the given arguments as parameters. @@ -324,7 +316,7 @@ def __call__(self, *parameters : "Positional Parameters") -> ["Row"]: """ @abc.abstractmethod - def column(self, *parameters) -> collections.Iterable: + def column(self, *parameters) -> collections.abc.Iterable: """ Return an iterator producing the values of first column of the rows produced by the cursor created from the statement bound with the @@ -345,7 +337,7 @@ def column(self, *parameters) -> collections.Iterable: """ @abc.abstractmethod - def chunks(self, *parameters) -> collections.Iterable: + def chunks(self, *parameters) -> collections.abc.Iterable: """ Return an iterator producing sequences of rows produced by the cursor created from the statement bound with the given parameters. @@ -359,12 +351,12 @@ def chunks(self, *parameters) -> collections.Iterable: Each iteration returns sequences of rows *normally* of length(seq) == chunksize. If chunksize is unspecified, a default, positive integer will be filled in. The rows contained in the sequences are only required to - support the basic `collections.Sequence` interfaces; simple and quick + support the basic `collections.abc.Sequence` interfaces; simple and quick sequence types should be used. """ @abc.abstractmethod - def rows(self, *parameters) -> collections.Iterable: + def rows(self, *parameters) -> collections.abc.Iterable: """ Return an iterator producing rows produced by the cursor created from the statement bound with the given parameters. @@ -382,7 +374,7 @@ def rows(self, *parameters) -> collections.Iterable: """ @abc.abstractmethod - def column(self, *parameters) -> collections.Iterable: + def column(self, *parameters) -> collections.abc.Iterable: """ Return an iterator producing the values of the first column in the cursor created from the statement bound with the given parameters. @@ -407,7 +399,7 @@ def declare(self, *parameters) -> Cursor: """ @abc.abstractmethod - def first(self, *parameters) -> "'First' object that is returned by the query": + def first(self, *parameters): """ Execute the prepared statement with the given arguments as parameters. If the statement returns rows with multiple columns, return the first @@ -426,9 +418,7 @@ def first(self, *parameters) -> "'First' object that is returned by the query": """ @abc.abstractmethod - def load_rows(self, - iterable : "A iterable of tuples to execute the statement with" - ): + def load_rows(self, iterable): """ Given an iterable, `iterable`, feed the produced parameters to the query. This is a bulk-loading interface for parameterized queries. @@ -445,9 +435,7 @@ def load_rows(self, """ @abc.abstractmethod - def load_chunks(self, - iterable : "A iterable of chunks of tuples to execute the statement with" - ): + def load_chunks(self, iterable): """ Given an iterable, `iterable`, feed the produced parameters of the chunks produced by the iterable to the query. This is a bulk-loading interface @@ -465,11 +453,10 @@ def load_chunks(self, that the operation can be optimized. """ -class Statement( - Element, - collections.Callable, - collections.Iterable, -): +@collections.abc.Iterator.register +@collections.abc.Callable.register +@Execution.register +class Statement(Element): """ Instances of `Statement` are returned by the `prepare` method of `Database` instances. @@ -595,13 +582,10 @@ def close(self) -> None: """ Close the prepared statement releasing resources associated with it. """ -Execution.register(Statement) PreparedStatement = Statement -class StoredProcedure( - Element, - collections.Callable, -): +@collections.abc.Callable.register +class StoredProcedure(Element): """ A function stored on the database. """ @@ -609,7 +593,7 @@ class StoredProcedure( _e_factors = ('database',) @abc.abstractmethod - def __call__(self, *args, **kw) -> (object, Cursor, collections.Iterable): + def __call__(self, *args, **kw) -> (object, Cursor, collections.abc.Iterable): """ Execute the procedure with the given arguments. If keyword arguments are passed they must be mapped to the argument whose name matches the key. @@ -759,10 +743,8 @@ def __exit__(self, typ, obj, tb): block's exit. """ -class Settings( - Element, - collections.MutableMapping -): +@collections.abc.MutableMapping.register +class Settings(Element): """ A mapping interface to the session's settings. This provides a direct interface to ``SHOW`` or ``SET`` commands. Identifiers and values need @@ -881,10 +863,7 @@ def client_port(self) -> (int, None): @property @abc.abstractmethod - def xact(self, - isolation : "ISOLATION LEVEL to use with the transaction" = None, - mode : "Mode of the transaction, READ ONLY or READ WRITE" = None, - ) -> Transaction: + def xact(self, isolation = None, mode = None) -> Transaction: """ Create a `Transaction` object using the given keyword arguments as its configuration. @@ -926,9 +905,14 @@ def prepare(self, sql : str) -> Statement: """ @abc.abstractmethod - def statement_from_id(self, - statement_id : "The statement's identification string.", - ) -> Statement: + def query(self, sql : str, *args) -> Execution: + """ + Prepare and execute the statement, `sql`, with the given arguments. + Equivalent to ``db.prepare(sql)(*args)``. + """ + + @abc.abstractmethod + def statement_from_id(self, statement_id) -> Statement: """ Create a `Statement` object that was already prepared on the server. The distinction between this and a regular query is that it @@ -938,9 +922,7 @@ def statement_from_id(self, """ @abc.abstractmethod - def cursor_from_id(self, - cursor_id : "The cursor's identification string." - ) -> Cursor: + def cursor_from_id(self, cursor_id) -> Cursor: """ Create a `Cursor` object from the given `cursor_id` that was already declared on the server. @@ -953,10 +935,7 @@ def cursor_from_id(self, """ @abc.abstractmethod - def proc(self, - procedure_id : \ - "The procedure identifier; a valid ``regprocedure`` or Oid." - ) -> StoredProcedure: + def proc(self, procedure_id) -> StoredProcedure: """ Create a `StoredProcedure` instance using the given identifier. @@ -1030,7 +1009,7 @@ def listening_channels(self) -> ["channel name", ...]: """ @abc.abstractmethod - def iternotifies(self, timeout = None) -> collections.Iterator: + def iternotifies(self, timeout = None) -> collections.abc.Iterator: """ Return an iterator to the notifications received by the connection. The iterator *must* produce triples in the form ``(channel, payload, pid)``. @@ -1096,7 +1075,7 @@ def fatal_exception_message(self, err : Exception) -> (str, None): """ @abc.abstractmethod - def socket_secure(self, socket : "socket object") -> "secured socket": + def socket_secure(self, socket): """ Return a reference to the secured socket using the given parameters. @@ -1106,7 +1085,7 @@ def socket_secure(self, socket : "socket object") -> "secured socket": """ @abc.abstractmethod - def socket_factory_sequence(self) -> [collections.Callable]: + def socket_factory_sequence(self) -> [collections.abc.Callable]: """ Return a sequence of `SocketCreator`s that `Connection` objects will use to create the socket object. @@ -1145,11 +1124,12 @@ def __call__(self, *args, **kw): return self.driver.connection(self, *args, **kw) def __init__(self, - user : "required keyword specifying the user name(str)" = None, + user : str = None, password : str = None, database : str = None, settings : (dict, [(str,str)]) = None, category : Category = None, + **kw, ): if user is None: # sure, it's a "required" keyword, makes for better documentation @@ -1179,15 +1159,6 @@ def connector(self) -> Connector: communication and initialization. """ - @property - @abc.abstractmethod - def query(self) -> Execution: - """ - The :py:class:`Execution` instance providing a one-shot query interface:: - - connection.query.(sql, *parameters) == connection.prepare(sql).(*parameters) - """ - @property @abc.abstractmethod def closed(self) -> bool: @@ -1317,18 +1288,13 @@ def data_directory(self) -> str: @abc.abstractmethod def init(self, - initdb : "path to the initdb to use" = None, - user : "name of the cluster's superuser" = None, - password : "superuser's password" = None, - encoding : "the encoding to use for the cluster" = None, - locale : "the locale to use for the cluster" = None, - collate : "the collation to use for the cluster" = None, - ctype : "the ctype to use for the cluster" = None, - monetary : "the monetary to use for the cluster" = None, - numeric : "the numeric to use for the cluster" = None, - time : "the time to use for the cluster" = None, - text_search_config : "default text search configuration" = None, - xlogdir : "location for the transaction log directory" = None, + initdb = None, + user = None, password = None, + encoding = None, locale = None, + collate = None, ctype = None, + monetary = None, numeric = None, time = None, + text_search_config = None, + xlogdir = None, ): """ Create the cluster at the `data_directory` associated with the Cluster @@ -1366,9 +1332,7 @@ def restart(self): """ @abc.abstractmethod - def wait_until_started(self, - timeout : "maximum time to wait" = 10 - ): + def wait_until_started(self, timeout = 10): """ After the start() method is ran, the database may not be ready for use. This method provides a mechanism to block until the cluster is ready for @@ -1379,9 +1343,7 @@ def wait_until_started(self, """ @abc.abstractmethod - def wait_until_stopped(self, - timeout : "maximum time to wait" = 10 - ): + def wait_until_stopped(self, timeout = 10): """ After the stop() method is ran, the database may still be running. This method provides a mechanism to block until the cluster is completely @@ -1416,5 +1378,3 @@ def __exit__(self, exc, val, tb): __docformat__ = 'reStructuredText' if __name__ == '__main__': help(__package__ + '.api') -## -# vim: ts=3:sw=3:noet: diff --git a/postgresql/bin/pg_python.py b/postgresql/bin/pg_python.py index 28d993ec..a97aa97b 100644 --- a/postgresql/bin/pg_python.py +++ b/postgresql/bin/pg_python.py @@ -134,5 +134,3 @@ def command(argv = sys.argv): if __name__ == '__main__': sys.exit(command(sys.argv)) -## -# vim: ts=3:sw=3:noet: diff --git a/postgresql/clientparameters.py b/postgresql/clientparameters.py index 59ed876b..55409767 100644 --- a/postgresql/clientparameters.py +++ b/postgresql/clientparameters.py @@ -20,7 +20,7 @@ to support sub-dictionaries like settings:: >>> normal_params = { - 'user' : 'jwp', + 'user' : 'dbusername', 'host' : 'localhost', 'settings' : {'default_statistics_target' : 200, 'search_path' : 'home,public'} } @@ -505,7 +505,7 @@ def x_pg_service(service_name, config): ) def x_pg_ldap(ldap_url, config): - raise NotImplementedError("cannot resolve ldap URLs: " + str(ldap_url)) + raise Exception("cannot resolve ldap URLs") default_x_callbacks = { 'settings' : x_settings, @@ -580,15 +580,15 @@ def resolve_pg_service_file( return None def collect( - parsed_options = None, - no_defaults = False, - environ = os.environ, - environ_prefix = 'PG', - default_pg_sysconfdir = None, - pg_service_file = None, - prompt_title = '', - parameters = (), - ): + parsed_options = None, + no_defaults = False, + environ = os.environ, + environ_prefix = 'PG', + default_pg_sysconfdir = None, + pg_service_file = None, + prompt_title = '', + parameters = (), +): """ Build a normalized client parameters dictionary for use with a connection construction interface. diff --git a/postgresql/cluster.py b/postgresql/cluster.py index 2b3f8ac3..122103eb 100644 --- a/postgresql/cluster.py +++ b/postgresql/cluster.py @@ -40,23 +40,35 @@ class ClusterError(pg_exc.Error): code = '-C000' source = 'CLUSTER' class ClusterInitializationError(ClusterError): - "General cluster initialization failure" + """ + General cluster initialization failure. + """ code = '-Cini' class InitDBError(ClusterInitializationError): - "A non-zero result was returned by the initdb command" + """ + A non-zero result was returned by the initdb command. + """ code = '-Cidb' class ClusterStartupError(ClusterError): - "Cluster startup failed" + """ + Cluster startup failed. + """ code = '-Cbot' class ClusterNotRunningError(ClusterError): - "Cluster is not running" + """ + Cluster is not running. + """ code = '-Cdwn' class ClusterTimeoutError(ClusterError): - "Cluster operation timed out" + """ + Cluster operation timed out. + """ code = '-Cout' class ClusterWarning(pg_exc.Warning): - "Warning issued by cluster operations" + """ + Warning issued by cluster operations. + """ code = '-Cwrn' source = 'CLUSTER' @@ -154,10 +166,7 @@ def hba_file(self, join = os.path.join): join(self.data_directory, self.DEFAULT_HBA_FILENAME) ) - def __init__(self, - installation, - data_directory, - ): + def __init__(self, installation, data_directory): self.installation = installation self.data_directory = os.path.abspath(data_directory) self.pgsql_dot_conf = os.path.join( @@ -190,11 +199,7 @@ def __exit__(self, typ, val, tb): self.stop() self.wait_until_stopped() - def init(self, - password = None, - timeout = None, - **kw - ): + def init(self, password = None, timeout = None, **kw): """ Create the cluster at the given `data_directory` using the provided keyword parameters as options to the command. @@ -323,10 +328,7 @@ def drop(self): os.rmdir(os.path.join(root, name)) os.rmdir(self.data_directory) - def start(self, - logfile = None, - settings = None - ): + def start(self, logfile = None, settings = None): """ Start the cluster. """ @@ -562,10 +564,7 @@ def ready_for_connections(self): # credentials... strange, but true.. return e if e is not None else True - def wait_until_started(self, - timeout = 10, - delay = 0.05, - ): + def wait_until_started(self, timeout = 10, delay = 0.05): """ After the `start` method is used, this can be ran in order to block until the cluster is ready for use. @@ -614,10 +613,7 @@ def wait_until_started(self, raise e time.sleep(delay) - def wait_until_stopped(self, - timeout = 10, - delay = 0.05 - ): + def wait_until_stopped(self, timeout = 10, delay = 0.05): """ After the `stop` method is used, this can be ran in order to block until the cluster is shutdown. @@ -639,5 +635,3 @@ def wait_until_stopped(self, creator = self, ) time.sleep(delay) -## -# vim: ts=3:sw=3:noet: diff --git a/postgresql/configfile.py b/postgresql/configfile.py index f20a04b0..2f94f0e1 100644 --- a/postgresql/configfile.py +++ b/postgresql/configfile.py @@ -1,7 +1,9 @@ ## # .configfile ## -'PostgreSQL configuration file parser and editor functions.' +""" +PostgreSQL configuration file parser and editor functions. +""" import sys import os from . import string as pg_str @@ -76,18 +78,18 @@ def unquote(s, quote = quote): return s[1:-1].replace(quote*2, quote) def write_config(map, writer, keys = None): - 'A configuration writer that will trample & merely write the settings' + """ + A configuration writer that will trample & merely write the settings. + """ if keys is None: keys = map for k in keys: writer('='.join((k, map[k])) + os.linesep) -def alter_config( - map : "the configuration changes to make", - fo : "file object containing configuration lines(Iterable)", - keys : "the keys to change; defaults to map.keys()" = None -): - 'Alters a configuration file without trampling on the existing structure' +def alter_config(map, fo, keys = None): + """ + Alters a configuration file without trampling on the existing structure. + """ if keys is None: keys = list(map.keys()) # Normalize keys and map them back to @@ -212,7 +214,7 @@ class ConfigFile(pg_api.Settings): """ Provides a mapping interface to a configuration file. - Every action will cause the file to be wholly read, so using `update` to make + Every operation will cause the file to be wholly read, so using `update` to make multiple changes is desirable. """ _e_factors = ('path',) @@ -315,5 +317,3 @@ def getset(self, keys): for x in (keys - set(cfg.keys())): cfg[x] = None return cfg -## -# vim: ts=3:sw=3:noet: diff --git a/postgresql/copyman.py b/postgresql/copyman.py index 70ef7e51..43229822 100644 --- a/postgresql/copyman.py +++ b/postgresql/copyman.py @@ -9,7 +9,7 @@ """ import sys from abc import abstractmethod, abstractproperty -from collections import Iterator +from collections.abc import Iterator from .python.element import Element, ElementSet from .python.structlib import ulong_unpack, ulong_pack from .protocol.buffer import pq_message_stream @@ -443,7 +443,7 @@ def __next__(self): return self.nextchunk() def __init__(self, - recv_into : "callable taking writable buffer and size", + recv_into, buffer_size = default_buffer_size ): super().__init__() diff --git a/postgresql/documentation/bin.rst b/postgresql/documentation/bin.rst index 31de6680..43e7a76e 100644 --- a/postgresql/documentation/bin.rst +++ b/postgresql/documentation/bin.rst @@ -86,17 +86,17 @@ pg_python Examples Module execution taking advantage of the new built-ins:: $ python3 -m postgresql.bin.pg_python -h localhost -W -m timeit "prepare('SELECT 1').first()" - Password for pg_python[pq://jwp@localhost:5432]: + Password for pg_python[pq://dbusername@localhost:5432]: 1000 loops, best of 3: 1.35 msec per loop $ python3 -m postgresql.bin.pg_python -h localhost -W -m timeit -s "ps=prepare('SELECT 1')" "ps.first()" - Password for pg_python[pq://jwp@localhost:5432]: + Password for pg_python[pq://dbusername@localhost:5432]: 1000 loops, best of 3: 442 usec per loop Simple interactive usage:: $ python3 -m postgresql.bin.pg_python -h localhost -W - Password for pg_python[pq://jwp@localhost:5432]: + Password for pg_python[pq://dbusername@localhost:5432]: >>> ps = prepare('select 1') >>> ps.first() 1 @@ -142,22 +142,22 @@ Examples Modifying a simple configuration file:: $ echo "setting = value" >pg.conf - + # change 'setting' $ python3 -m postgresql.bin.pg_dotconf pg.conf setting=newvalue - + $ cat pg.conf setting = 'newvalue' - + # new settings are appended to the file $ python3 -m postgresql.bin.pg_dotconf pg.conf another_setting=value $ cat pg.conf setting = 'newvalue' another_setting = 'value' - + # comment a setting $ python3 -m postgresql.bin.pg_dotconf pg.conf another_setting - + $ cat pg.conf setting = 'newvalue' #another_setting = 'value' diff --git a/postgresql/documentation/changes-v1.3.rst b/postgresql/documentation/changes-v1.3.rst new file mode 100644 index 00000000..8b8686c3 --- /dev/null +++ b/postgresql/documentation/changes-v1.3.rst @@ -0,0 +1,14 @@ +Changes in v1.3 +=============== + +1.3.0 +----- + + * Commit DB-API 2.0 ClientCannotConnect exception correction. + * Eliminate types-as-documentation annotations. + * Add Connection.transaction alias for asyncpg consistency. + * Eliminate multiple inheritance in `postgresql.api` in favor of ABC registration. + * Add support for PGTEST environment variable (pq-IRI) to improve test performance + and to aid in cases where the target fixture is already available. + This should help for testing the driver against servers that are not actually + postgresql. diff --git a/postgresql/documentation/clientparameters.rst b/postgresql/documentation/clientparameters.rst index e85bd675..8c8441cf 100644 --- a/postgresql/documentation/clientparameters.rst +++ b/postgresql/documentation/clientparameters.rst @@ -68,7 +68,7 @@ accept: ``environ`` Environment variables to extract client parameter variables from. - Defaults to `os.environ` and expects a `collections.Mapping` interface. + Defaults to `os.environ` and expects a `collections.abc.Mapping` interface. ``environ_prefix`` Environment variable prefix to use. Defaults to "PG". This allows the @@ -100,16 +100,16 @@ instructed to do by the ``prompt_password`` key in the parameters:: >>> import postgresql.clientparameters as pg_param >>> p = pg_param.collect(prompt_title = 'my_prompt!', parameters = {'prompt_password':True}) - Password for my_prompt![pq://jwp@localhost:5432]: + Password for my_prompt![pq://dbusername@localhost:5432]: >>> p - {'host': 'localhost', 'user': 'jwp', 'password': 'secret', 'port': 5432} + {'host': 'localhost', 'user': 'dbusername', 'password': 'secret', 'port': 5432} If `None`, it will leave the necessary password resolution information in the parameters dictionary for ``resolve_password``:: >>> p = pg_param.collect(prompt_title = None, parameters = {'prompt_password':True}) >>> p - {'pgpassfile': '/Users/jwp/.pgpass', 'prompt_password': True, 'host': 'localhost', 'user': 'jwp', 'port': 5432} + {'pgpassfile': '/home/{USER}/.pgpass', 'prompt_password': True, 'host': 'localhost', 'user': 'dbusername', 'port': 5432} Of course, ``'prompt_password'`` is normally specified when ``parsed_options`` received a ``-W`` option from the command line:: @@ -118,9 +118,9 @@ received a ``-W`` option from the command line:: >>> co, ca = op.parse_args(['-W']) >>> p = pg_param.collect(parsed_options = co) >>> p=pg_param.collect(parsed_options = co) - Password for [pq://jwp@localhost:5432]: + Password for [pq://dbusername@localhost:5432]: >>> p - {'host': 'localhost', 'user': 'jwp', 'password': 'secret', 'port': 5432} + {'host': 'localhost', 'user': 'dbusername', 'password': 'secret', 'port': 5432} >>> @@ -166,10 +166,10 @@ When resolution occurs, the ``prompt_password``, ``prompt_title``, and >>> p=pg_param.collect(prompt_title = None) >>> p - {'pgpassfile': '/Users/jwp/.pgpass', 'host': 'localhost', 'user': 'jwp', 'port': 5432} + {'pgpassfile': '/home/{USER}/.pgpass', 'host': 'localhost', 'user': 'dbusername', 'port': 5432} >>> pg_param.resolve_password(p) >>> p - {'host': 'localhost', 'password': 'secret', 'user': 'jwp', 'port': 5432} + {'host': 'localhost', 'password': 'secret', 'user': 'dbusername', 'port': 5432} Defaults diff --git a/postgresql/documentation/cluster.rst b/postgresql/documentation/cluster.rst index 0ba2bc2d..1993ea28 100644 --- a/postgresql/documentation/cluster.rst +++ b/postgresql/documentation/cluster.rst @@ -348,7 +348,7 @@ Methods and properties available on `postgresql.cluster.Cluster` instances: `Cluster.wait_until_started`. ``Cluster.settings`` - A `collections.Mapping` interface to the ``postgresql.conf`` file of the + A `collections.abc.Mapping` interface to the ``postgresql.conf`` file of the cluster. A notable extension to the mapping interface is the ``getset`` method. This diff --git a/postgresql/documentation/copyman.rst b/postgresql/documentation/copyman.rst index 37304937..d4a18cb1 100644 --- a/postgresql/documentation/copyman.rst +++ b/postgresql/documentation/copyman.rst @@ -260,7 +260,7 @@ The following Producers are available: ``postgresql.copyman.StatementProducer(postgresql.api.Statement)`` Given a Statement producing COPY data, construct a Producer. - ``postgresql.copyman.IteratorProducer(collections.Iterator)`` + ``postgresql.copyman.IteratorProducer(collections.abc.Iterator)`` Given an Iterator producing *chunks* of COPY lines, construct a Producer to manage the data coming from the iterator. diff --git a/postgresql/documentation/driver.rst b/postgresql/documentation/driver.rst index b373f29e..00d3aa73 100644 --- a/postgresql/documentation/driver.rst +++ b/postgresql/documentation/driver.rst @@ -319,7 +319,7 @@ The methods and properties on the connection object are ready for use: ``Connection.proc(procedure_id)`` Create a `postgresql.api.StoredProcedure` object referring to a stored procedure on the database. The returned object will provide a - `collections.Callable` interface to the stored procedure on the server. See + `collections.abc.Callable` interface to the stored procedure on the server. See `Stored Procedures`_ for more information. ``Connection.statement_from_id(statement_id)`` @@ -350,7 +350,7 @@ The methods and properties on the connection object are ready for use: information. ``Connection.settings`` - A property providing a `collections.MutableMapping` interface to the + A property providing a `collections.abc.MutableMapping` interface to the database's SQL settings. See `Settings`_ for more information. ``Connection.clone()`` @@ -557,7 +557,7 @@ Prepared statement objects have a few execution methods: ``Statement.chunks(*parameters)`` This access point is designed for situations where rows are being streamed out - quickly. It is a method that returns a ``collections.Iterator`` that produces + quickly. It is a method that returns a ``collections.abc.Iterator`` that produces *sequences* of rows. This is the most efficient way to get rows from the database. The rows in the sequences are ``builtins.tuple`` objects. @@ -569,11 +569,11 @@ Prepared statement objects have a few execution methods: ``Statement.close()`` Close the statement inhibiting further use. - ``Statement.load_rows(collections.Iterable(parameters))`` + ``Statement.load_rows(collections.abc.Iterable(parameters))`` Given an iterable producing parameters, execute the statement for each iteration. Always returns `None`. - ``Statement.load_chunks(collections.Iterable(collections.Iterable(parameters)))`` + ``Statement.load_chunks(collections.abc.Iterable(collections.abc.Iterable(parameters)))`` Given an iterable of iterables producing parameters, execute the statement for each parameter produced. However, send the all execution commands with the corresponding parameters of each chunk before reading any results. @@ -1075,7 +1075,7 @@ critical. Row Interface Points -------------------- -Rows implement the `collections.Mapping` and `collections.Sequence` interfaces. +Rows implement the `collections.abc.Mapping` and `collections.abc.Sequence` interfaces. ``Row.keys()`` An iterable producing the column names. Order is not guaranteed. See the @@ -1214,8 +1214,8 @@ Queries have access to all execution methods: * ``Connection.query.first(sql, *parameters)`` * ``Connection.query.chunks(sql, *parameters)`` * ``Connection.query.declare(sql, *parameters)`` - * ``Connection.query.load_rows(sql, collections.Iterable(parameters))`` - * ``Connection.query.load_chunks(collections.Iterable(collections.Iterable(parameters)))`` + * ``Connection.query.load_rows(sql, collections.abc.Iterable(parameters))`` + * ``Connection.query.load_chunks(collections.abc.Iterable(collections.abc.Iterable(parameters)))`` In cases where a sequence of one-shot queries needs to be performed, it may be important to avoid unnecessary repeat attribute resolution from the connection object as the ``query`` @@ -1461,7 +1461,7 @@ Settings SQL's SHOW and SET provides a means to configure runtime parameters on the database("GUC"s). In order to save the user some grief, a -`collections.MutableMapping` interface is provided to simplify configuration. +`collections.abc.MutableMapping` interface is provided to simplify configuration. The ``settings`` attribute on the connection provides the interface extension. @@ -1485,7 +1485,7 @@ Settings Interface Points ------------------------- Manipulation and interrogation of the connection's settings is achieved by -using the standard `collections.MutableMapping` interfaces. +using the standard `collections.abc.MutableMapping` interfaces. ``Connection.settings[k]`` Get the value of a single setting. diff --git a/postgresql/documentation/gotchas.rst b/postgresql/documentation/gotchas.rst index beb0a884..915e3360 100644 --- a/postgresql/documentation/gotchas.rst +++ b/postgresql/documentation/gotchas.rst @@ -5,14 +5,6 @@ It is recognized that decisions were made that may not always be ideal for a given user. In order to highlight those potential issues and hopefully bring some sense into a confusing situation, this document was drawn. -Non-English Locales -------------------- - -Many non-english locales are not supported due to the localization of the severity field -in messages and errors sent to the client. Internally, py-postgresql uses this to allow -client side filtering of messages and to identify FATAL connection errors that allow the -client to recognize that it should be expecting the connection to terminate. - Thread Safety ------------- @@ -112,3 +104,11 @@ This exception is raised by a generic processing routine whose functionality is abstract in nature, so the message is abstract as well. It essentially means that a tuple in the sequence given to the loading method had too many or too few items. + +Non-English Locales +------------------- + +In the past, some builds of PostgreSQL localized the severity field of some protocol messages. +`py-postgresql` expects these fields to be consistent with their english terms. If the driver +raises strange exceptions during the use of non-english locales, it may be necessary to use an +english setting in order to coax the server into issueing familiar terms. diff --git a/postgresql/documentation/index.rst b/postgresql/documentation/index.rst index 322438d4..9189c563 100644 --- a/postgresql/documentation/index.rst +++ b/postgresql/documentation/index.rst @@ -14,12 +14,11 @@ Contents admin driver - copyman + clientparameters + cluster notifyman alock - cluster - lib - clientparameters + copyman gotchas Reference @@ -37,6 +36,7 @@ Changes .. toctree:: :maxdepth: 1 + changes-v1.3 changes-v1.2 changes-v1.1 changes-v1.0 diff --git a/postgresql/documentation/notifyman.rst b/postgresql/documentation/notifyman.rst index d774ee52..0b214750 100644 --- a/postgresql/documentation/notifyman.rst +++ b/postgresql/documentation/notifyman.rst @@ -20,7 +20,7 @@ receives notifications. The `postgresql.notifyman.NotificationManager` class is used to wait for messages to come in on a set of connections, pick up the messages, and deliver -the messages to the object's user via the `collections.Iterator` protocol. +the messages to the object's user via the `collections.abc.Iterator` protocol. Listening on a Single Connection diff --git a/postgresql/documentation/sphinx/changes-v1.3.rst b/postgresql/documentation/sphinx/changes-v1.3.rst new file mode 120000 index 00000000..51c4b25d --- /dev/null +++ b/postgresql/documentation/sphinx/changes-v1.3.rst @@ -0,0 +1 @@ +../changes-v1.3.rst \ No newline at end of file diff --git a/postgresql/driver/__init__.py b/postgresql/driver/__init__.py index c7c2c433..93f014fd 100644 --- a/postgresql/driver/__init__.py +++ b/postgresql/driver/__init__.py @@ -10,5 +10,7 @@ default = Driver() def connect(*args, **kw): - 'Establish a connection using the default driver.' + """ + Establish a connection using the default driver. + """ return default.connect(*args, **kw) diff --git a/postgresql/driver/dbapi20.py b/postgresql/driver/dbapi20.py index d0a2ac6b..dafd49af 100644 --- a/postgresql/driver/dbapi20.py +++ b/postgresql/driver/dbapi20.py @@ -325,7 +325,7 @@ class Connection(Connection): NotSupportedError = NotSupportedError # Explicitly manage DB-API connected state to properly - # throw the already closed error. This will be active in 1.3. + # throw the already closed error. _dbapi_connected_flag = False def autocommit_set(self, val): @@ -361,7 +361,7 @@ def connect(self, *args, **kw): self._dbapi_connected_flag = True def close(self): - if self.closed:# and self._dbapi_connected_flag: + if self.closed and self._dbapi_connected_flag: raise Error( "connection already closed", source = 'CLIENT', diff --git a/postgresql/driver/pq3.py b/postgresql/driver/pq3.py index b4695af5..6a778bd0 100644 --- a/postgresql/driver/pq3.py +++ b/postgresql/driver/pq3.py @@ -10,7 +10,6 @@ from traceback import format_exception from itertools import repeat, chain, count from functools import partial -from abc import abstractmethod from codecs import lookup as lookup_codecs from operator import itemgetter @@ -219,17 +218,18 @@ def type_from_oid(self, oid): return typ def resolve_descriptor(self, desc, index): - 'create a sequence of I/O routines from a pq descriptor' + """ + Create a sequence of I/O routines from a pq descriptor. + """ return [ (self.resolve(x[3]) or (None, None))[index] for x in desc ] # lookup a type's IO routines from a given typid def resolve(self, - typid : "The Oid of the type to resolve pack and unpack routines for.", - from_resolution_of : \ - "Sequence of typid's used to identify infinite recursion" = (), - builtins : "types.io.resolve" = pg_types_io.resolve, + typid : int, + from_resolution_of : [int] = (), + builtins = pg_types_io.resolve, quote_ident = quote_ident ): if from_resolution_of and typid in from_resolution_of: @@ -406,17 +406,19 @@ def RowTypeFactory(self, attribute_map = {}, _Row = pg_types.Row.from_sequence, # record_io_factory - Build an I/O pair for RECORDs ## def record_io_factory(self, - column_io : "sequence (pack,unpack) tuples corresponding to the columns", - typids : "sequence of type Oids; index must correspond to the composite's", - attmap : "mapping of column name to index number", - typnames : "sequence of sql type names in order", - attnames : "sequence of attribute names in order", - composite_relid : "oid of the composite relation", - composite_name : "the name of the composite type", + column_io, typids, attmap, typnames, attnames, composite_relid, composite_name, get0 = get0, get1 = get1, fmt_errmsg = "failed to {0} attribute {1}, {2}::{3}, of composite {4} from wire data".format ): + # column_io: sequence (pack,unpack) tuples corresponding to the columns. + # typids: sequence of type Oids; index must correspond to the composite's. + # attmap: mapping of column name to index number. + # typnames: sequence of sql type names in order. + # attnames: sequence of attribute names in order. + # composite_relid: oid of the composite relation. + # composite_name: the name of the composite type. + fpack = tuple(map(get0, column_io)) funpack = tuple(map(get1, column_io)) row_constructor = self.RowTypeFactory(attribute_map = attmap, composite_relid = composite_relid) @@ -448,18 +450,18 @@ def raise_unpack_tuple_error(cause, procs, tup, itemnum): )), cause = cause) def unpack_a_record(data, - unpack = io_lib.record_unpack, - process_tuple = process_tuple, - row_constructor = row_constructor - ): + unpack = io_lib.record_unpack, + process_tuple = process_tuple, + row_constructor = row_constructor + ): data = tuple([x[1] for x in unpack(data)]) return row_constructor(process_tuple(funpack, data, raise_unpack_tuple_error)) sorted_atts = sorted(attmap.items(), key = get1) def pack_a_record(data, - pack = io_lib.record_pack, - process_tuple = process_tuple, - ): + pack = io_lib.record_pack, + process_tuple = process_tuple, + ): if isinstance(data, dict): data = [data.get(k) for k,_ in sorted_atts] return pack( @@ -628,12 +630,12 @@ class Output(object): _complete_message = None - @abstractmethod def _init(self): """ Bind a cursor based on the configured parameters. """ # The local initialization for the specific cursor. + raise NotImplementedError def __init__(self, cursor_id, wref = weakref.ref, ID = ID): self.cursor_id = cursor_id @@ -765,8 +767,8 @@ def _process_tuple_chunk_Column(self, x, range = range): # Process the element.Tuple message in x for rows() def _process_tuple_chunk_Row(self, x, - proc = process_chunk, - ): + proc = process_chunk, + ): rc = self._row_constructor return [ rc(y) @@ -778,7 +780,7 @@ def _process_tuple_chunk(self, x, proc = process_chunk): return proc(self._output_io, x, self._raise_column_tuple_error) def _raise_column_tuple_error(self, cause, procs, tup, itemnum): - 'for column processing' + # For column processing. # The element traceback will include the full list of parameters. data = repr(tup[itemnum]) if len(data) > 80: @@ -835,12 +837,16 @@ def sql_column_types(self): ] def command(self): - "The completion message's command identifier" + """ + The completion message's command identifier. + """ if self._complete_message is not None: return self._complete_message.extract_command().decode('ascii') def count(self): - "The completion message's count number" + """ + The completion message's count number. + """ if self._complete_message is not None: return self._complete_message.extract_count() @@ -983,17 +989,17 @@ def __init__(self, statement, parameters, cursor_id): self.database = statement.database Output.__init__(self, cursor_id or ID(self)) - @abstractmethod def _bind(self): """ Generate the commands needed to bind the cursor. """ + raise NotImplementedError - @abstractmethod def _fetch(self): """ Generate the commands needed to bind the cursor. """ + raise NotImplementedError def _init(self): self._command = self._fetch() @@ -2198,7 +2204,7 @@ def start(self): @staticmethod def _release_string(id): - 'release "";' + # Release ""; return 'RELEASE "xact(' + id.replace('"', '""') + ')";' def commit(self): @@ -2311,15 +2317,18 @@ def execute(self, query : str) -> None: self._pq_complete() def do(self, language : str, source : str, - qlit = pg_str.quote_literal, - qid = pg_str.quote_ident, - ) -> None: + qlit = pg_str.quote_literal, + qid = pg_str.quote_ident, + ) -> None: sql = "DO " + qlit(source) + " LANGUAGE " + qid(language) + ";" self.execute(sql) - def xact(self, isolation = None, mode = None): + # Alias transaction as xact. xact is the original term, but support + # the full word for identifier consistency with asyncpg. + def transaction(self, isolation = None, mode = None) -> Transaction: x = Transaction(self, isolation = isolation, mode = mode) return x + xact=transaction def prepare(self, sql_statement_string : str, @@ -2328,6 +2337,8 @@ def prepare(self, ) -> Statement: ps = Class(self, statement_id, sql_statement_string) ps._init() + + # Complete protocol transaction to maintain point of origin in error cases. ps._fini() return ps @@ -2412,7 +2423,9 @@ def __enter__(self): return self def connect(self): - 'Establish the connection to the server' + """ + Establish the connection to the server. + """ if self.closed is False: # already connected? just return. return @@ -2623,11 +2636,11 @@ def _pq_step(self, complete_state = globals()['xact'].Complete): del self._controller def _receive_async(self, - msg, controller = None, - showoption = element.ShowOption.type, - notice = element.Notice.type, - notify = element.Notify.type, - ): + msg, controller = None, + showoption = element.ShowOption.type, + notice = element.Notice.type, + notify = element.Notify.type, + ): c = controller or getattr(self, '_controller', self) typ = msg.type if typ == showoption: @@ -2754,7 +2767,6 @@ def __repr__(self): keywords = os.linesep + ' ' + keywords if keywords else '' ) - @abstractmethod def socket_factory_sequence(self): """ Generate a list of callables that will be used to attempt to make the @@ -2764,28 +2776,32 @@ def socket_factory_sequence(self): The callables in the sequence must take a timeout parameter. """ + raise NotImplementedError def __init__(self, connect_timeout : int = None, - server_encoding : "server encoding hint for driver" = None, - sslmode : ('allow', 'prefer', 'require', 'disable') = None, - sslcrtfile : "filepath" = None, - sslkeyfile : "filepath" = None, - sslrootcrtfile : "filepath" = None, - sslrootcrlfile : "filepath" = None, + server_encoding = None, driver = None, **kw ): super().__init__(**kw) + self._security(kw) self.driver = driver - self.server_encoding = server_encoding self.connect_timeout = connect_timeout - self.sslmode = sslmode - self.sslkeyfile = sslkeyfile - self.sslcrtfile = sslcrtfile - self.sslrootcrtfile = sslrootcrtfile - self.sslrootcrlfile = sslrootcrlfile + + def _security(self, parameters): + self.sslmode = parameters.get('sslmode') or None + self.sslkeyfile = parameters.get('sslkeyfile') or None + self.sslcrtfile = parameters.get('sslcrtfile') or None + self.sslrootcrtfile = parameters.get('sslrootcrtfile') or None + self.sslrootcrlfile = parameters.get('sslrootcrlfile') or None + + self._socket_secure = { + 'keyfile': self.sslkeyfile, + 'certfile': self.sslcrtfile, + 'ca_certs': self.sslrootcrtfile, + } if self.sslrootcrlfile is not None: pg_exc.IgnoredClientParameterWarning( @@ -2793,6 +2809,7 @@ def __init__(self, creator = self, ).emit() + def _startup(self): # Startup message parameters. tnkw = { 'client_min_messages' : 'WARNING', @@ -2809,41 +2826,28 @@ def __init__(self, ) tnkw.update(s) + # Postgres defaults the database identifier to the user. tnkw['user'] = self.user if self.database is not None: tnkw['database'] = self.database + # Encode startup arguments. + # The server_encoding hint is strictly for str() values. se = self.server_encoding or 'utf-8' - ## - # Attempt to accommodate for literal treatment of startup data. - ## self._startup_parameters = tuple([ - # All keys go in utf-8. However, ascii would probably be good enough. ( k.encode('utf-8'), - # If it's a str(), encode in the hinted server_encoding. - # Otherwise, convert the object(int, float, bool, etc) into a string - # and treat it as utf-8. v.encode(se) if type(v) is str else str(v).encode('utf-8') ) for k, v in tnkw.items() ]) self._password = (self.password or '').encode(se) - self._socket_secure = { - 'keyfile' : self.sslkeyfile, - 'certfile' : self.sslcrtfile, - 'ca_certs' : self.sslrootcrtfile, - } # class Connector class SocketConnector(Connector): - 'abstract connector for using `socket` and `ssl`' - @abstractmethod - def socket_factory_sequence(self): - """ - Return a sequence of `SocketFactory`s for a connection to use to connect - to the target host. - """ + """ + Abstract connector for using `socket` and `ssl`. + """ def create_socket_factory(self, **params): return SocketFactory(**params) @@ -2861,23 +2865,27 @@ def socket_factory_params(self, host, port, ipv, **kw): raise TypeError("'port' is a required keyword and cannot be 'None'") return {'socket_create': (self.address_family, socket.SOCK_STREAM), - 'socket_connect': (host, int(port))} + 'socket_connect': (host, int(port)), + 'socket_secure': self._socket_secure} def __init__(self, host, port, ipv, **kw): + super().__init__(**kw) params = self.socket_factory_params(host, port, ipv, **kw) self.host, self.port = params['socket_connect'] # constant socket connector self._socketcreator = self.create_socket_factory(**params) self._socketcreators = (self._socketcreator,) - super().__init__(**kw) + self._startup() class IP4(IPConnector): - 'Connector for establishing IPv4 connections' + """ + Connector for establishing IPv4 connections. + """ ipv = 4 address_family = socket.AF_INET def __init__(self, - host : "IPv4 Address (str)" = None, + host : str = None, port : int = None, ipv = 4, **kw @@ -2885,12 +2893,14 @@ def __init__(self, super().__init__(host, port, ipv, **kw) class IP6(IPConnector): - 'Connector for establishing IPv6 connections' + """ + Connector for establishing IPv6 connections. + """ ipv = 6 address_family = socket.AF_INET6 def __init__(self, - host : "IPv6 Address (str)" = None, + host : str = None, port : int = None, ipv = 6, **kw @@ -2898,7 +2908,9 @@ def __init__(self, super().__init__(host, port, ipv, **kw) class Unix(SocketConnector): - 'Connector for establishing unix domain socket connections' + """ + Connector for establishing unix domain socket connections. + """ def socket_factory_sequence(self): return self._socketcreators @@ -2907,15 +2919,17 @@ def socket_factory_params(self, unix): raise TypeError("'unix' is a required keyword and cannot be 'None'") return {'socket_create': (socket.AF_UNIX, socket.SOCK_STREAM), - 'socket_connect': unix} + 'socket_connect': unix, + 'socket_secure': self._socket_secure} def __init__(self, unix = None, **kw): + super().__init__(**kw) params = self.socket_factory_params(unix) self.unix = params['socket_connect'] # constant socket connector self._socketcreator = self.create_socket_factory(**params) self._socketcreators = (self._socketcreator,) - super().__init__(**kw) + self._startup() class Host(SocketConnector): """ @@ -2946,9 +2960,11 @@ def __init__(self, host : str = None, port : (str, int) = None, ipv : int = None, - address_family : "address family to use(AF_INET,AF_INET6)" = None, + address_family = None, **kw ): + super().__init__(**kw) + if host is None: raise TypeError("'host' is a required keyword") if port is None: @@ -2967,7 +2983,7 @@ def __init__(self, raise TypeError("unknown IP version selected: 'ipv' = " + repr(ipv)) self.host = host self.port = port - super().__init__(**kw) + self._startup() class Driver(pg_api.Driver): def _e_metas(self): @@ -2992,10 +3008,7 @@ def fit(self, **kw ) -> Connector: """ - Create the appropriate `postgresql.api.Connector` based on the - parameters. - - This also protects against mutually exclusive parameters. + Create the appropriate `postgresql.api.Connector` based on the parameters. """ if unix is not None: if host is not None: @@ -3006,7 +3019,7 @@ def fit(self, else: if host is None or port is None: raise TypeError("'host' and 'port', or 'unix' must be supplied") - # We have a host and a port. + # If it's an IP address, IP4 or IP6 should be selected. if ':' in host: # There's a ':' in host, good chance that it's IPv6. @@ -3016,7 +3029,7 @@ def fit(self, except (socket.error, NameError): pass - # Not IPv6, maybe IPv4... + # Not IPv6, maybe IPv4. try: socket.inet_aton(host) # It's IP4 @@ -3029,6 +3042,9 @@ def fit(self, def connect(self, **kw) -> Connection: """ + Create an established Connection instance from a temporary Connector + built using the given keywords. + For information on acceptable keywords, see: `postgresql.documentation.driver`:Connection Keywords diff --git a/postgresql/exceptions.py b/postgresql/exceptions.py index fc960794..5d71d01c 100644 --- a/postgresql/exceptions.py +++ b/postgresql/exceptions.py @@ -36,14 +36,20 @@ PythonException = Exception class Exception(Exception): - 'Base PostgreSQL exception class' + """ + Base PostgreSQL exception class. + """ pass class LoadError(Exception): - 'Failed to load a library' + """ + Failed to load a library. + """ class Disconnection(Exception): - 'Exception identifying errors that result in disconnection' + """ + Exception identifying errors that result in disconnection. + """ class Warning(Message): code = '01000' @@ -80,12 +86,16 @@ class NoMoreSetsReturned(NoDataWarning): code = '02001' class Error(Message, Exception): - 'A PostgreSQL Error' + """ + A PostgreSQL Error. + """ _e_label = 'ERROR' code = '' def __str__(self): - 'Call .sys.errformat(self)' + """ + Call .sys.errformat(self). + """ return pg_sys.errformat(self) @property @@ -94,7 +104,9 @@ def fatal(self): return None if f is None else f in ('PANIC', 'FATAL') class DriverError(Error): - "Errors originating in the driver's implementation." + """ + Errors originating in the driver's implementation. + """ source = 'CLIENT' code = '--000' class AuthenticationMethodError(DriverError, Disconnection): @@ -109,7 +121,9 @@ class InsecurityError(DriverError, Disconnection): """ code = '--SEC' class ConnectTimeoutError(DriverError, Disconnection): - 'Client was unable to esablish a connection in the given time' + """ + Client was unable to esablish a connection in the given time. + """ code = '--TOE' class TypeIOError(DriverError): @@ -144,7 +158,9 @@ class ConnectionDoesNotExistError(ConnectionError): """ code = '08003' class ConnectionFailureError(ConnectionError): - 'Raised when a connection is dropped' + """ + Raised when a connection is dropped. + """ code = '08006' class ClientCannotConnectError(ConnectionError): @@ -164,7 +180,9 @@ class TriggeredActionError(Error): code = '09000' class FeatureError(Error): - "Unsupported feature" + """ + "Unsupported feature. + """ code = '0A000' class TransactionInitiationError(TransactionError): @@ -187,7 +205,9 @@ class CaseNotFoundError(Error): code = '20000' class CardinalityError(Error): - "Wrong number of rows returned" + """ + Wrong number of rows returned. + """ code = '21000' class TriggeredDataChangeViolation(Error): @@ -197,13 +217,17 @@ class AuthenticationSpecificationError(Error, Disconnection): code = '28000' class DPDSEError(Error): - "Dependent Privilege Descriptors Still Exist" + """ + Dependent Privilege Descriptors Still Exist. + """ code = '2B000' class DPDSEObjectError(DPDSEError): code = '2BP01' class SREError(Error): - "SQL Routine Exception" + """ + SQL Routine Exception. + """ code = '2F000' class FunctionExecutedNoReturnStatementError(SREError): code = '2F005' @@ -215,7 +239,9 @@ class ReadingDataProhibitedError(SREError): code = '2F004' class EREError(Error): - "External Routine Exception" + """ + External Routine Exception. + """ code = '38000' class ContainingSQLNotPermittedError(EREError): code = '38001' @@ -227,7 +253,9 @@ class ReadingSQLDataNotPermittedError(EREError): code = '38004' class ERIEError(Error): - "External Routine Invocation Exception" + """ + External Routine Invocation Exception. + """ code = '39000' class InvalidSQLState(ERIEError): code = '39001' @@ -239,7 +267,9 @@ class SRFProtocolError(ERIEError): code = '39P02' class TRError(TransactionError): - "Transaction Rollback" + """ + Transaction Rollback. + """ code = '40000' class DeadlockError(TRError): code = '40P01' @@ -252,7 +282,9 @@ class StatementCompletionUnknownError(TRError): class ITSError(TransactionError): - "Invalid Transaction State" + """ + Invalid Transaction State. + """ code = '25000' class ActiveTransactionError(ITSError): code = '25001' @@ -265,24 +297,34 @@ class BadIsolationForBranchError(ITSError): class NoActiveTransactionForBranchError(ITSError): code = '25005' class ReadOnlyTransactionError(ITSError): - "Occurs when an alteration occurs in a read-only transaction." + """ + Occurs when an alteration occurs in a read-only transaction. + """ code = '25006' class SchemaAndDataStatementsError(ITSError): - "Mixed schema and data statements not allowed." + """ + Mixed schema and data statements not allowed. + """ code = '25007' class InconsistentCursorIsolationError(ITSError): - "The held cursor requires the same isolation." + """ + The held cursor requires the same isolation. + """ code = '25008' class NoActiveTransactionError(ITSError): code = '25P01' class InFailedTransactionError(ITSError): - "Occurs when an action occurs in a failed transaction." + """ + Occurs when an action occurs in a failed transaction. + """ code = '25P02' class SavepointError(TransactionError): - "Classification error designating errors that relate to savepoints." + """ + Classification error designating errors that relate to savepoints. + """ code = '3B000' class InvalidSavepointSpecificationError(SavepointError): code = '3B001' @@ -291,7 +333,9 @@ class TransactionTerminationError(TransactionError): code = '2D000' class IRError(Error): - "Insufficient Resource Error" + """ + Insufficient Resource Error. + """ code = '53000' class MemoryError(IRError, MemoryError): code = '53200' @@ -301,7 +345,9 @@ class TooManyConnectionsError(IRError): code = '53300' class PLEError(OverflowError): - "Program Limit Exceeded" + """ + Program Limit Exceeded + """ code = '54000' class ComplexityOverflowError(PLEError): code = '54001' @@ -311,7 +357,9 @@ class ArgumentOverflowError(PLEError): code = '54023' class ONIPSError(Error): - "Object Not In Prerequisite State" + """ + Object Not In Prerequisite State. + """ code = '55000' class ObjectInUseError(ONIPSError): code = '55006' @@ -322,7 +370,9 @@ class UnavailableLockError(ONIPSError): class SEARVError(Error): - "Syntax Error or Access Rule Violation" + """ + Syntax Error or Access Rule Violation. + """ code = '42000' class SEARVNameError(SEARVError): @@ -445,7 +495,9 @@ class SchemaNameError(NameError): code = '3F000' class ICVError(Error): - "Integrity Contraint Violation" + """ + Integrity Contraint Violation. + """ code = '23000' class RestrictError(ICVError): code = '23001' @@ -539,7 +591,9 @@ class EscapeSequenceError(DataError): class EscapeCharacterConflictError(DataError): code = '2200B' class EscapeCharacterError(DataError): - "Invalid escape character" + """ + Invalid escape character. + """ code = '2200C' class SubstringError(DataError): @@ -573,7 +627,9 @@ class IndexCorruptedError(InternalError): code = 'XX002' class SIOError(Error): - "System I/O" + """ + System I/O. + """ code = '58000' class UndefinedFileError(SIOError): code = '58P01' @@ -581,13 +637,17 @@ class DuplicateFileError(SIOError): code = '58P02' class CFError(Error): - "Configuration File Error" + """ + Configuration File Error. + """ code = 'F0000' class LockFileExistsError(CFError): code = 'F0001' class OIError(Error): - "Operator Intervention" + """ + Operator Intervention. + """ code = '57000' class QueryCanceledError(OIError): code = '57014' @@ -596,14 +656,20 @@ class AdminShutdownError(OIError, Disconnection): class CrashShutdownError(OIError, Disconnection): code = '57P02' class ServerNotReadyError(OIError, Disconnection): - 'Thrown when a connection is established to a server that is still starting up.' + """ + Thrown when a connection is established to a server that is still starting up. + """ code = '57P03' class PLPGSQLError(Error): - "Error raised by a PL/PgSQL procedural function" + """ + Error raised by a PL/PgSQL procedural function. + """ code = 'P0000' class PLPGSQLRaiseError(PLPGSQLError): - "Error raised by a PL/PgSQL RAISE statement." + """ + Error raised by a PL/PgSQL RAISE statement. + """ code = 'P0001' class PLPGSQLNoDataFoundError(PLPGSQLError): code = 'P0002' @@ -615,9 +681,9 @@ class PLPGSQLTooManyRowsError(PLPGSQLError): code_to_error = {} code_to_warning = {} def map_errors_and_warnings( - objs : "A iterable of `Warning`s and `Error`'s", - error_container : "apply the code to error association to this object" = code_to_error, - warning_container : "apply the code to warning association to this object" = code_to_warning, + objs, + error_container = code_to_error, + warning_container = code_to_warning, ): """ Construct the code-to-error and code-to-warning associations. @@ -655,9 +721,9 @@ def map_errors_and_warnings( container[obj.pg_code] = obj def code_lookup( - default : "The object to return when no code or class is found", - container : "where to look for the object associated with the code", - code : "the code to find the exception for" + default, + container, + code ): obj = container.get(code) if obj is None: @@ -682,5 +748,3 @@ def code_lookup( ) ) ) -## -# vim: ts=3:sw=3:noet: diff --git a/postgresql/installation.py b/postgresql/installation.py index 046bff44..5856816c 100644 --- a/postgresql/installation.py +++ b/postgresql/installation.py @@ -240,7 +240,13 @@ def ssl(self): """ Whether the installation was compiled with SSL support. """ - return 'with_openssl' in self.configure_options + if 'with_openssl' in self.configure_options: + return True + # Parameterized form in newer versions. + for x in self.configure_options: + if 'with_ssl' in x: + return True + return False def default(typ = Installation): """ diff --git a/postgresql/iri.py b/postgresql/iri.py index 5e635dfa..7a90aee1 100644 --- a/postgresql/iri.py +++ b/postgresql/iri.py @@ -6,7 +6,7 @@ PQ IRIs take the form:: - pq://user:pass@host:port/database?setting=value&setting2=value2#public,othernamespace + pq://user:pass@host:port/database?setting=value&setting2=value2 IPv6 is supported via the standard representation:: @@ -14,7 +14,7 @@ Driver Parameters: - pq://user@host/?[driver_param]=value&[other_param]=value?setting=val + pq://user@host/?[driver_param]=value&[other_param]=value?server_setting=val """ from .resolved import riparse as ri from .string import split_ident @@ -27,9 +27,11 @@ escape_path_re = re.compile('[%s]' %(re.escape(ri.unescaped + ','),)) def structure(d, fieldproc = ri.unescape): - 'Create a clientparams dictionary from a parsed RI' - if d.get('scheme', 'pq').lower() != 'pq': - raise ValueError("PQ-IRI scheme is not 'pq'") + """ + Create a clientparams dictionary from a parsed RI. + """ + if d.get('scheme', 'pq').lower() not in {'pq', 'postgres', 'postgresql'}: + raise ValueError("PQ-IRI scheme is not 'pq', 'postgres', or 'postgresql'") cpd = { k : fieldproc(v) for k, v in d.items() if k not in ('path', 'fragment', 'query', 'host', 'scheme') @@ -90,7 +92,9 @@ def construct_path(x, re = escape_path_re): return ','.join((re.sub(ri.re_pct_encode, y) for y in x)) def construct(x, obscure_password = False): - 'Construct a RI dictionary from a clientparams dictionary' + """ + Construct a RI dictionary from a clientparams dictionary. + """ # the rather exhaustive settings choreography is due to # a desire to allow the search_path to be appended in the fragment settings = x.get('settings') @@ -167,7 +171,9 @@ def construct(x, obscure_password = False): ) def parse(s, fieldproc = ri.unescape): - 'Parse a Postgres IRI into a dictionary object' + """ + Parse a Postgres IRI into a dictionary object. + """ return structure( # In ri.parse, don't unescape the parsed values as our sub-structure # uses the escape mechanism in IRIs to specify literal separator @@ -177,7 +183,9 @@ def parse(s, fieldproc = ri.unescape): ) def serialize(x, obscure_password = False): - 'Return a Postgres IRI from a dictionary object.' + """ + Return a Postgres IRI from a dictionary object. + """ return ri.unsplit(construct(x, obscure_password = obscure_password)) if __name__ == '__main__': diff --git a/postgresql/message.py b/postgresql/message.py index e6000c02..7bbe77d9 100644 --- a/postgresql/message.py +++ b/postgresql/message.py @@ -64,11 +64,11 @@ def isconsistent(self, other): ) def __init__(self, - message : "The primary information of the message", - code : "Message code to attach (SQL state)" = None, - details : "additional information associated with the message" = {}, - source : "Which side generated the message(SERVER, CLIENT)" = None, - creator : "The interface element that called for instantiation" = None, + message, + code = None, + details = {}, + source = None, + creator = None, ): self.message = message self.details = details diff --git a/postgresql/notifyman.py b/postgresql/notifyman.py index b46aa0df..cc6ef2c8 100644 --- a/postgresql/notifyman.py +++ b/postgresql/notifyman.py @@ -106,7 +106,7 @@ def trash(self, connections): def queue(self, db, notifies): """ - Queue the notifies for the specified connection. Upon success, the + Queue the notifies for the specified connection. This method can be overridden by subclasses to take a callback approach to notification management. @@ -186,7 +186,9 @@ def settimeout(self, seconds): self.timeout = seconds def gettimeout(self): - 'Get the timeout.' + """ + Get the timeout assigned by `settimeout`. + """ return self.timeout def __iter__(self): diff --git a/postgresql/pgpassfile.py b/postgresql/pgpassfile.py index e7a505a7..ee0ae73f 100644 --- a/postgresql/pgpassfile.py +++ b/postgresql/pgpassfile.py @@ -1,7 +1,9 @@ ## # .pgpassfile - parse and lookup passwords in a pgpassfile ## -'Parse pgpass files and subsequently lookup a password.' +""" +Parse pgpass files and subsequently lookup a password. +""" import os.path def split(line, len = len): @@ -30,7 +32,9 @@ def split(line, len = len): return r def parse(data): - 'produce a list of [(word, (host,port,dbname,user))] from a pgpass file object' + """ + Produce a list of [(word, (host,port,dbname,user))] from a pgpass file object. + """ return [ (x[-1], x[0:4]) for x in [split(line) for line in data] if x ] @@ -50,7 +54,9 @@ def lookup_password(words, uhpd): return word def lookup_password_file(path, t): - 'like lookup_password, but takes a file path' + """ + Like lookup_password, but takes a file path. + """ with open(path) as f: return lookup_password(parse(f), t) diff --git a/postgresql/port/_optimized/buffer.c b/postgresql/port/_optimized/buffer.c index 0b6cf2eb..cb81b2f9 100644 --- a/postgresql/port/_optimized/buffer.c +++ b/postgresql/port/_optimized/buffer.c @@ -587,7 +587,7 @@ PyTypeObject pq_message_stream_Type = { sizeof(struct p_buffer), /* tp_basicsize */ 0, /* tp_itemsize */ p_dealloc, /* tp_dealloc */ - NULL, /* tp_print */ + 0, /* tp_print */ NULL, /* tp_getattr */ NULL, /* tp_setattr */ NULL, /* tp_compare */ @@ -624,6 +624,3 @@ PyTypeObject pq_message_stream_Type = { p_new, /* tp_new */ NULL, /* tp_free */ }; -/* - * vim: ts=3:sw=3:noet: - */ diff --git a/postgresql/port/_optimized/functools.c b/postgresql/port/_optimized/functools.c index 45a5d75f..9a0deea0 100644 --- a/postgresql/port/_optimized/functools.c +++ b/postgresql/port/_optimized/functools.c @@ -335,6 +335,3 @@ compose(PyObject *self, PyObject *args) return(rob); } -/* - * vim: ts=3:sw=3:noet: - */ diff --git a/postgresql/port/_optimized/module.c b/postgresql/port/_optimized/module.c index 240921d5..33f68759 100644 --- a/postgresql/port/_optimized/module.c +++ b/postgresql/port/_optimized/module.c @@ -149,6 +149,3 @@ PyInit_optimized(void) Py_DECREF(mod); return(NULL); } -/* - * vim: ts=3:sw=3:noet: - */ diff --git a/postgresql/port/_optimized/wirestate.c b/postgresql/port/_optimized/wirestate.c index 9af150cb..74a9aca4 100644 --- a/postgresql/port/_optimized/wirestate.c +++ b/postgresql/port/_optimized/wirestate.c @@ -248,7 +248,7 @@ PyTypeObject WireState_Type = { sizeof(struct wirestate), /* tp_basicsize */ 0, /* tp_itemsize */ ws_dealloc, /* tp_dealloc */ - NULL, /* tp_print */ + 0, /* tp_print */ NULL, /* tp_getattr */ NULL, /* tp_setattr */ NULL, /* tp_compare */ @@ -284,6 +284,3 @@ PyTypeObject WireState_Type = { ws_new, /* tp_new */ NULL, /* tp_free */ }; -/* - * vim: ts=3:sw=3:noet: - */ diff --git a/postgresql/project.py b/postgresql/project.py index f7c4f7bd..34ddf3a6 100644 --- a/postgresql/project.py +++ b/postgresql/project.py @@ -5,8 +5,8 @@ name = 'py-postgresql' identity = 'http://github.com/python-postgres/fe' -meaculpa = 'Python+Postgres' +meaculpa = 'python-postgres' abstract = 'Driver and tools library for PostgreSQL' -version_info = (1, 2, 2) +version_info = (1, 3, 1) version = '.'.join(map(str, version_info)) diff --git a/postgresql/protocol/client3.py b/postgresql/protocol/client3.py index daba076f..f7e21750 100644 --- a/postgresql/protocol/client3.py +++ b/postgresql/protocol/client3.py @@ -269,7 +269,9 @@ def read_into(self, Complete = xact.Complete): return True def standard_read_messages(self): - 'read more messages into self.read when self.read is empty' + """ + Read more messages into self.read when self.read is empty. + """ r = True if not self.read: # get more data from the wire and @@ -314,7 +316,9 @@ def send_message_data(self): def standard_write_messages(self, messages, cat_messages = element.cat_messages ): - 'protocol message writer' + """ + Protocol message writer. + """ if self.writing is not self.written: self.message_data += cat_messages(self.writing) self.written = self.writing @@ -327,7 +331,9 @@ def standard_write_messages(self, messages, write_messages = standard_write_messages def traced_write_messages(self, messages): - 'message_writer used when tracing' + """ + `message_writer` used when tracing. + """ for msg in messages: t = getattr(msg, 'type', None) if t is not None: @@ -346,7 +352,9 @@ def traced_write_messages(self, messages): return self.standard_write_messages(messages) def traced_read_messages(self): - 'message_reader used when tracing' + """ + `message_reader` used when tracing. + """ r = self.standard_read_messages() for msg in self.read: self._tracer('↓ %r(%d): %r%s' %( @@ -433,7 +441,9 @@ def step(self): self.xact = None def complete(self): - 'complete the current transaction' + """ + Complete the current transaction. + """ # Continue to transition until all transactions have been # completed, or an exception occurs that does not signal retry. x = self.xact diff --git a/postgresql/protocol/element3.py b/postgresql/protocol/element3.py index 39fca329..1f8579e2 100644 --- a/postgresql/protocol/element3.py +++ b/postgresql/protocol/element3.py @@ -135,8 +135,7 @@ def __repr__(self): class Void(Message): """ - An absolutely empty message. When serialized, it always yields an empty - string. + An absolutely empty message. When serialized, it always yields an empty string. """ type = b'' __slots__ = () @@ -146,7 +145,7 @@ def bytes(self): def serialize(self): return b'' - + def __new__(typ, *args, **kw): return VoidMessage VoidMessage = Message.__new__(Void) @@ -178,7 +177,9 @@ def parse(typ, data): return typ((data[0:1], data[5:])) class EmptyMessage(Message): - 'An abstract message that is always empty' + """ + An abstract message that is always empty. + """ __slots__ = () type = b'' @@ -195,7 +196,9 @@ def parse(typ, data): return typ.SingleInstance class Notify(Message): - 'Asynchronous notification message' + """ + Asynchronous notification message. + """ type = message_types[b'A'[0]] __slots__ = ('pid', 'channel', 'payload',) @@ -216,8 +219,9 @@ def parse(typ, data): return typ(pid, channel, payload) class ShowOption(Message): - """ShowOption(name, value) - GUC variable information from backend""" + """ + GUC variable information from backend + """ type = message_types[b'S'[0]] __slots__ = ('name', 'value') @@ -233,7 +237,9 @@ def parse(typ, data): return typ(*(data.split(b'\x00', 2)[0:2])) class Complete(StringMessage): - 'Command completion message.' + """ + Command completion message. + """ type = message_types[b'C'[0]] __slots__ = () @@ -257,45 +263,57 @@ def extract_command(self): Strip all the *surrounding* digits and spaces from the command tag, and return that string. """ - return self.data.strip(b'\c\n\t 0123456789') or None + return self.data.strip(b'\r\n\t 0123456789') or None class Null(EmptyMessage): - 'Null command' + """ + Null command. + """ type = message_types[b'I'[0]] __slots__ = () NullMessage = Message.__new__(Null) Null.SingleInstance = NullMessage class NoData(EmptyMessage): - 'Null command' + """ + Null command. + """ type = message_types[b'n'[0]] __slots__ = () NoDataMessage = Message.__new__(NoData) NoData.SingleInstance = NoDataMessage class ParseComplete(EmptyMessage): - 'Parse reaction' + """ + Parse reaction. + """ type = message_types[b'1'[0]] __slots__ = () ParseCompleteMessage = Message.__new__(ParseComplete) ParseComplete.SingleInstance = ParseCompleteMessage class BindComplete(EmptyMessage): - 'Bind reaction' + """ + Bind reaction. + """ type = message_types[b'2'[0]] __slots__ = () BindCompleteMessage = Message.__new__(BindComplete) BindComplete.SingleInstance = BindCompleteMessage class CloseComplete(EmptyMessage): - 'Close statement or Portal' + """ + Close statement or Portal. + """ type = message_types[b'3'[0]] __slots__ = () CloseCompleteMessage = Message.__new__(CloseComplete) CloseComplete.SingleInstance = CloseCompleteMessage class Suspension(EmptyMessage): - 'Portal was suspended, more tuples for reading' + """ + Portal was suspended, more tuples for reading. + """ type = message_types[b's'[0]] __slots__ = () SuspensionMessage = Message.__new__(Suspension) @@ -859,7 +877,6 @@ class Function(Message): """ Execute the specified function with the given arguments """ - type = message_types[b'F'[0]] __slots__ = ('oid', 'aformats', 'arguments', 'rformat') diff --git a/postgresql/protocol/pbuffer.py b/postgresql/protocol/pbuffer.py index a3014942..d41a79e0 100644 --- a/postgresql/protocol/pbuffer.py +++ b/postgresql/protocol/pbuffer.py @@ -16,7 +16,10 @@ xl_unpack = struct.Struct('!xL').unpack_from class pq_message_stream(object): - 'provide a message stream from a data stream' + """ + Provide a message stream from a data stream. + """ + _block = 512 _limit = _block * 4 def __init__(self): @@ -24,12 +27,18 @@ def __init__(self): self._start = 0 def truncate(self): - "remove all data in the buffer" + """ + Remove all data in the buffer. + """ + self._strio.truncate(0) self._start = 0 def _rtruncate(self, amt = None): - "[internal] remove the given amount of data" + """ + [internal] remove the given amount of data. + """ + strio = self._strio if amt is None: amt = self._strio.tell() @@ -58,7 +67,10 @@ def _rtruncate(self, amt = None): strio.truncate(size - amt) def has_message(self, xl_unpack = xl_unpack, len = len): - "if the buffer has a message available" + """ + Whether the buffer has a message available. + """ + strio = self._strio strio.seek(self._start) header = strio.read(5) @@ -71,7 +83,10 @@ def has_message(self, xl_unpack = xl_unpack, len = len): return (strio.tell() - self._start) >= length + 1 def __len__(self, xl_unpack = xl_unpack, len = len): - "number of messages in buffer" + """ + Number of messages in buffer. + """ + count = 0 rpos = self._start strio = self._strio diff --git a/postgresql/protocol/xact3.py b/postgresql/protocol/xact3.py index d8e35650..6f5497f6 100644 --- a/postgresql/protocol/xact3.py +++ b/postgresql/protocol/xact3.py @@ -1,7 +1,9 @@ ## # .protocol.xact3 - protocol state machine ## -'PQ version 3.0 client transactions' +""" +PQ version 3.0 client transactions. +""" import sys import os import pprint @@ -94,10 +96,7 @@ class Negotiation(Transaction): """ state = None - def __init__(self, - startup_message : "startup message to send", - password : "password source data(encoded password bytes)", - ): + def __init__(self, startup_message, password): self.startup_message = startup_message self.password = password self.received = [()] @@ -435,7 +434,9 @@ def __repr__(self, format = '{mod}.{name}({nl}{args})'.format): ) def messages_received(self): - 'Received and validate messages' + """ + Received and validate messages. + """ return chain.from_iterable(map(get1, self.completed)) def reverse(self, diff --git a/postgresql/python/command.py b/postgresql/python/command.py index 35fa8ab7..d88b1541 100644 --- a/postgresql/python/command.py +++ b/postgresql/python/command.py @@ -220,7 +220,7 @@ def __init__(self, *args, **kw): self.register_backslash(r'\?', self.showhelp, "Show this help message.") self.register_backslash(r'\set', self.bs_set, - "Configure environment variables. \set without arguments to show all") + "Configure environment variables. \\set without arguments to show all") self.register_backslash(r'\E', self.bs_E, "Edit a file or a temporary script.") self.register_backslash(r'\i', self.bs_i, @@ -633,5 +633,3 @@ def command(argv = sys.argv): if __name__ == '__main__': sys.exit(command()) -## -# vim: ts=3:sw=3:noet: diff --git a/postgresql/python/element.py b/postgresql/python/element.py index aa2dc5aa..4257a44e 100644 --- a/postgresql/python/element.py +++ b/postgresql/python/element.py @@ -7,7 +7,9 @@ from .decorlib import propertydoc class RecursiveFactor(Exception): - 'Raised when a factor is ultimately composed of itself' + """ + Raised when a factor is ultimately composed of itself. + """ pass class Element(object, metaclass = ABCMeta): @@ -96,7 +98,9 @@ def _e_metas(self): yield (None, format_element(x)) def prime_factor(obj): - 'get the primary factor on the `obj`, returns None if none.' + """ + Get the primary factor on the `obj`, returns None if none. + """ f = getattr(obj, '_e_factors', None) if f: return f[0], getattr(obj, f[0], None) @@ -126,7 +130,9 @@ def prime_factors(obj): yield fn, e def format_element(obj, coverage = ()): - 'format the given element with its factors and metadata into a readable string' + """ + Format the given element with its factors and metadata into a readable string. + """ # if it's not an Element, all there is to return is str(obj) if obj in coverage: raise RecursiveFactor(coverage) diff --git a/postgresql/python/itertools.py b/postgresql/python/itertools.py index 94672367..08fcdb5d 100644 --- a/postgresql/python/itertools.py +++ b/postgresql/python/itertools.py @@ -4,10 +4,10 @@ """ itertools extensions """ -import collections +import collections.abc from itertools import cycle, islice -def interlace(*iters, next = next) -> collections.Iterable: +def interlace(*iters, next = next) -> collections.abc.Iterable: """ interlace(i1, i2, ..., in) -> ( i1-0, i2-0, ..., in-0, diff --git a/postgresql/python/socket.py b/postgresql/python/socket.py index 6cdffdca..6587d4c1 100644 --- a/postgresql/python/socket.py +++ b/postgresql/python/socket.py @@ -6,7 +6,6 @@ import random import socket import errno -import ssl __all__ = ['find_available_port', 'SocketFactory'] @@ -49,14 +48,29 @@ def fatal_exception_message(typ, err) -> (str, None): return None return getattr(err, 'strerror', '') - def secure(self, socket : socket.socket) -> ssl.SSLSocket: + @property + def _security_context(self): + if self._security_context_ii is None: + from ssl import SSLContext, PROTOCOL_TLS_CLIENT + ctx = self._security_context_ii = SSLContext(PROTOCOL_TLS_CLIENT) + ctx.check_hostname = False + + cf = self.socket_secure.get('certfile') + kf = self.socket_secure.get('keyfile') + if cf is not None: + self._security_context_ii.load_cert_chain(cf, keyfile=kf) + + ca = self.socket_secure.get('ca_certs') + if ca is not None: + self._security_context_ii.load_verify_locations(ca) + + return self._security_context_ii + + def secure(self, socket: socket.socket): """ Secure a socket with SSL. """ - if self.socket_secure is not None: - return ssl.wrap_socket(socket, **self.socket_secure) - else: - return ssl.wrap_socket(socket) + return self._security_context.wrap_socket(socket) def __call__(self, timeout = None): s = socket.socket(*self.socket_create) @@ -73,10 +87,12 @@ def __init__(self, socket_create, socket_connect, socket_secure = None, + socket_security_context = None ): + self._security_context_ii = socket_security_context self.socket_create = socket_create self.socket_connect = socket_connect - self.socket_secure = socket_secure + self.socket_secure = socket_secure or {} def __str__(self): return 'socket' + repr(self.socket_connect) diff --git a/postgresql/release/__init__.py b/postgresql/release/__init__.py index 36ffdd84..71af8093 100644 --- a/postgresql/release/__init__.py +++ b/postgresql/release/__init__.py @@ -2,5 +2,5 @@ # .release ## """ -Release management code and project meta-data. +Release management code and project/release meta-data. """ diff --git a/postgresql/release/distutils.py b/postgresql/release/distutils.py index e921b7b1..a18af4f3 100644 --- a/postgresql/release/distutils.py +++ b/postgresql/release/distutils.py @@ -117,8 +117,8 @@ default_prefix = ['postgresql'] def prefixed_extensions( - prefix : "prefix to prepend to paths" = default_prefix, - extensions_data : "`extensions_data`" = extensions_data, + prefix = default_prefix, + extensions_data = extensions_data, ) -> [Extension]: """ Generator producing the `distutils` `Extension` objects. @@ -134,7 +134,7 @@ def prefixed_extensions( ) def prefixed_packages( - prefix : "prefix to prepend to source paths" = default_prefix, + prefix = default_prefix, packages = subpackages, ): """ @@ -147,7 +147,7 @@ def prefixed_packages( yield prefix + pkg def prefixed_package_data( - prefix : "prefix to prepend to dictionary keys paths" = default_prefix, + prefix = default_prefix, package_data = subpackage_data, ): """ @@ -177,6 +177,7 @@ def standard_setup_keywords(build_extensions = True, prefix = default_prefix): 'packages' : list(prefixed_packages(prefix = prefix)), 'package_data' : dict(prefixed_package_data(prefix = prefix)), 'cmdclass': dict(test=TestCommand), + 'python_requires': '>=3.7', } if build_extensions: d['ext_modules'] = list(prefixed_extensions(prefix = prefix)) diff --git a/postgresql/resolved/riparse.py b/postgresql/resolved/riparse.py index 668c7119..f91a2618 100644 --- a/postgresql/resolved/riparse.py +++ b/postgresql/resolved/riparse.py @@ -1,7 +1,3 @@ -# -*- encoding: utf-8 -*- -## -# copyright 2008, James William Pye. http://jwp.name -## """ Split, unsplit, parse, serialize, construct and structure resource indicators. @@ -69,7 +65,9 @@ del x def unescape(x, mkval = chr): - 'Substitute percent escapes with literal characters' + """ + Substitute percent escapes with literal characters. + """ nstr = type(x)('') if isinstance(x, str): mkval = chr @@ -193,7 +191,9 @@ def split_path(p, fieldproc = unescape): return [fieldproc(x) for x in p.split('/')] def unsplit(t): - 'Make a RI from a split RI(5-tuple)' + """ + Make a RI from a split RI(5-tuple). + """ s = '' if t[0] is not None: s += t[0] @@ -265,7 +265,9 @@ def split_netloc(netloc, fieldproc = unescape): return (user, password, addr, port) def unsplit_netloc(t): - 'Create a netloc fragment from the given tuple(user,password,host,port)' + """ + Create a netloc fragment from the given tuple(user,password,host,port). + """ if t[0] is None and t[2] is None: return None s = '' @@ -340,7 +342,9 @@ def construct_query(x, ]) def construct(x): - 'Construct a RI tuple(5-tuple) from a dictionary object' + """ + Construct a RI tuple(5-tuple) from a dictionary object. + """ p = x.get('path') if p is not None: p = '/'.join([escape_path_re.sub(re_pct_encode, y) for y in p]) @@ -378,7 +382,9 @@ def parse(s, fieldproc = unescape): return structure(split(s), fieldproc = fieldproc) def serialize(x): - 'Return an RI from a dictionary object. Synonym for ``unsplit(construct(x))``' + """ + Return an RI from a dictionary object. Synonym for ``unsplit(construct(x))``. + """ return unsplit(construct(x)) __docformat__ = 'reStructuredText' diff --git a/postgresql/string.py b/postgresql/string.py index adbd77c4..53799d37 100644 --- a/postgresql/string.py +++ b/postgresql/string.py @@ -16,22 +16,30 @@ import re def escape_literal(text): - "Replace every instance of ' with ''" + """ + Replace every instance of ' with ''. + """ return text.replace("'", "''") def quote_literal(text): - "Escape the literal and wrap it in [single] quotations" + """ + Escape the literal and wrap it in [single] quotations. + """ return "'" + text.replace("'", "''") + "'" def escape_ident(text): - 'Replace every instance of " with ""' + """ + Replace every instance of " with "". + """ return text.replace('"', '""') def needs_quoting(text): return not (text and not text[0].isdecimal() and text.replace('_', 'a').isalnum()) def quote_ident(text): - "Replace every instance of '"' with '""' *and* place '"' on each end" + """ + Replace every instance of '"' with '""' *and* place '"' on each end. + """ return '"' + text.replace('"', '""') + '"' def quote_ident_if_needed(text): @@ -52,7 +60,7 @@ def split(text): """ split the string up by into non-quoted and quoted portions. Zero and even numbered indexes are unquoted portions, while odd indexes are quoted - portions. + portions. Unquoted portions are regular strings, whereas quoted portions are pair-tuples specifying the quotation mechanism and the content thereof. @@ -214,7 +222,9 @@ def split_qname(text, maxsplit = -1): return split_ident(text, maxsplit = maxsplit, sep = '.') def qname(*args): - "Quote the identifiers and join them using '.'" + """ + Quote the identifiers and join them using '.'. + """ return '.'.join([quote_ident(x) for x in args]) def qname_if_needed(*args): diff --git a/postgresql/sys.py b/postgresql/sys.py index 80abe1d6..a8471317 100644 --- a/postgresql/sys.py +++ b/postgresql/sys.py @@ -30,7 +30,7 @@ def default_errformat(val): """ - Built-in error formatter. DON'T TOUCH! + Built-in error formatter. Do not change. """ it = val._e_metas() if val.creator is not None: @@ -85,11 +85,15 @@ def msghook(*args, **kw): return default_msghook(*args, **kw) def reset_errformat(with_func = errformat): - 'restore the original excformat function' + """ + Restore the original excformat function. + """ global errformat errformat = with_func def reset_msghook(with_func = msghook): - 'restore the original msghook function' + """ + Restore the original msghook function. + """ global msghook msghook = with_func diff --git a/postgresql/temporal.py b/postgresql/temporal.py index 1c128bea..a19a845d 100644 --- a/postgresql/temporal.py +++ b/postgresql/temporal.py @@ -29,8 +29,8 @@ class Temporal(object): Or `pg_tmp` can decorate a method or function. """ - #: Format the cluster directory name. - cluster_dirname = 'pg_tmp_{0}_{1}'.format + format_sandbox_id = staticmethod(('sandbox{0}_{1}').format) + cluster_dirname = staticmethod(('pg_tmp_{0}_{1}').format) cluster = None _init_pid_ = None @@ -91,7 +91,7 @@ def init(self, "environment variable to the `pg_config` path" } ): - if self.cluster is not None: + if self.cluster is not None or 'PGTEST' in os.environ: return ## # Hasn't been created yet, but doesn't matter. @@ -156,7 +156,7 @@ def init(self, unix_socket_directories = cluster.data_directory, )) - # Start it up. + # Start the database cluster. with open(self.logfile, 'w') as lfo: cluster.start(logfile = lfo) cluster.wait_until_started() @@ -165,18 +165,23 @@ def init(self, c = cluster.connection(user = 'test', database = 'template1',) with c: c.execute('create database test') - # It's ready. self.cluster = cluster def push(self): - c = self.cluster.connection(user = 'test') - c.connect() + if 'PGTEST' in os.environ: + from . import open as pg_open + c = pg_open(os.environ['PGTEST']) # Ignoring PGINSTALLATION. + else: + c = self.cluster.connection(user = 'test') + c.connect() + extras = [] + sbid = self.format_sandbox_id(os.getpid(), self.sandbox_id + 1) - def new_pg_tmp_connection(l = extras, c = c, sbid = 'sandbox' + str(self.sandbox_id + 1)): + def new_pg_tmp_connection(l = extras, clone = c.clone, sbid = sbid): # Used to create a new connection that will be closed # when the context stack is popped along with 'db'. - l.append(c.clone()) + l.append(clone()) l[-1].settings['search_path'] = str(sbid) + ',' + l[-1].settings['search_path'] return l[-1] @@ -205,7 +210,7 @@ def new_pg_tmp_connection(l = extras, c = c, sbid = 'sandbox' + str(self.sandbox builtins.__dict__.update(local_builtins) self.sandbox_id += 1 - def pop(self, exc, drop_schema = 'DROP SCHEMA sandbox{0} CASCADE'.format): + def pop(self, exc, drop_schema = ('DROP SCHEMA {0} CASCADE').format): local_builtins, extras = self.builtins_stack.pop() self.sandbox_id -= 1 @@ -235,32 +240,36 @@ def pop(self, exc, drop_schema = 'DROP SCHEMA sandbox{0} CASCADE'.format): # Interrupted and closed all the other connections at this level; # now remove the sandbox schema. - c = self.cluster.connection(user = 'test') - with c: + xdb = local_builtins['db'] + with xdb.clone() as c: # Use a new connection so that the state of # the context connection will not have to be # contended with. - c.execute(drop_schema(self.sandbox_id+1)) + c.execute(drop_schema(self.format_sandbox_id(os.getpid(), self.sandbox_id + 1))) else: - # interrupt + # interrupt exception; avoid waiting for close pass + def _init_c(self, cxn): + cxn.connect() + sb = self.format_sandbox_id(os.getpid(), self.sandbox_id) + cxn.execute('CREATE SCHEMA ' + sb) + cxn.settings['search_path'] = ','.join((sb, cxn.settings['search_path'])) + def __enter__(self): if self.cluster is None: self.init() + self.push() try: - db.connect() - db.execute('CREATE SCHEMA sandbox' + str(self.sandbox_id)) - db.settings['search_path'] = 'sandbox' + str(self.sandbox_id) + ',' + db.settings['search_path'] + self._init_c(builtins.db) except Exception as e: # failed to initialize sandbox schema; pop it. self.pop(e) raise def __exit__(self, exc, val, tb): - if self.cluster is not None: - self.pop(val) + self.pop(val) -#: The process' temporary cluster. +#: The process' temporary cluster or connection source. pg_tmp = Temporal() diff --git a/postgresql/test/cursor_integrity.py b/postgresql/test/cursor_integrity.py index 14a07acf..7bad07f2 100644 --- a/postgresql/test/cursor_integrity.py +++ b/postgresql/test/cursor_integrity.py @@ -55,7 +55,7 @@ def test_select(self): read += thisread completed.append(next[0]) if thisread: - self.failUnlessEqual( + self.assertEqual( last[0][-1][0], next[0][0][0] - 1, "first row(-1) of next failed to match the last row of the previous" ) @@ -63,8 +63,8 @@ def test_select(self): elif next[1] != 0: # done break - self.failUnlessEqual(read, limit) - self.failUnlessEqual(list(range(-1, limit)), [ + self.assertEqual(read, limit) + self.assertEqual(list(range(-1, limit)), [ x[0] for x in itertools.chain(*completed) ]) @@ -88,7 +88,7 @@ def test_copy_out(self): read += thisread completed.append(next[0]) if thisread: - self.failUnlessEqual( + self.assertEqual( last[0][-1], next[0][0] - 1, "first row(-1) of next failed to match the last row of the previous" ) @@ -96,8 +96,8 @@ def test_copy_out(self): elif next[1] != 0: # done break - self.failUnlessEqual(read, limit) - self.failUnlessEqual( + self.assertEqual(read, limit) + self.assertEqual( list(range(-1, limit)), list(itertools.chain(*completed)) ) diff --git a/postgresql/test/test_cluster.py b/postgresql/test/test_cluster.py index 4f781f6c..027b5fda 100644 --- a/postgresql/test/test_cluster.py +++ b/postgresql/test/test_cluster.py @@ -9,19 +9,16 @@ from .. import installation from ..cluster import Cluster, ClusterStartupError -default_install = installation.default() -if default_install is None: - sys.stderr.write("ERROR: cannot find 'default' pg_config\n") - sys.stderr.write("HINT: set the PGINSTALLATION environment variable to the `pg_config` path\n") - sys.exit(1) +default_installation = installation.default() class test_cluster(unittest.TestCase): def setUp(self): - self.cluster = Cluster(default_install, 'test_cluster',) + self.cluster = Cluster(default_installation, 'test_cluster',) def tearDown(self): - self.cluster.drop() - self.cluster = None + if self.cluster.installation is not None: + self.cluster.drop() + self.cluster = None def start_cluster(self, logfile = None): self.cluster.start(logfile = logfile) @@ -46,6 +43,7 @@ def init(self, *args, **kw): usd : self.cluster.data_directory, }) + @unittest.skipIf(default_installation is None, "no installation provided by environment") def testSilentMode(self): self.init() self.cluster.settings['silent_mode'] = 'on' @@ -66,6 +64,7 @@ def testSilentMode(self): elif self.cluster.installation.version_info[:2] >= (9, 2): self.fail("silent_mode unexpectedly supported on PostgreSQL >=9.2") + @unittest.skipIf(default_installation is None, "no installation provided by environment") def testSuperPassword(self): self.init( user = 'test', @@ -81,8 +80,11 @@ def testSuperPassword(self): with c: self.assertEqual(c.prepare('select 1').first(), 1) + @unittest.skipIf(default_installation is None, "no installation provided by environment") def testNoParameters(self): - 'simple init and drop' + """ + Simple init and drop. + """ self.init() self.start_cluster() diff --git a/postgresql/test/test_configfile.py b/postgresql/test/test_configfile.py index f57a3c0f..85d30b66 100644 --- a/postgresql/test/test_configfile.py +++ b/postgresql/test/test_configfile.py @@ -237,7 +237,7 @@ def testAroma(self): self.assertTrue( nlines[:4] == lines[:4] ) - + def testSelection(self): # Sanity red = configfile.read_config(['foo = bar'+os.linesep, 'bar = foo']) diff --git a/postgresql/test/test_connect.py b/postgresql/test/test_connect.py index 4e29b956..22834a84 100644 --- a/postgresql/test/test_connect.py +++ b/postgresql/test/test_connect.py @@ -18,6 +18,7 @@ from .. import driver as pg_driver from .. import open as pg_open +default_installation = installation.default() def check_for_ipv6(): result = False @@ -47,28 +48,28 @@ class TestCaseWithCluster(unittest.TestCase): """ postgresql.driver *interface* tests. """ + installation = default_installation + @property + def _crt(self): + return self.params.get('sslrootcrtfile') or None + def __init__(self, *args, **kw): super().__init__(*args, **kw) - self.installation = installation.default() self.cluster_path = \ 'pypg_test_' \ + str(os.getpid()) + getattr(self, 'cluster_path_suffix', '') - if self.installation is None: - sys.stderr.write("ERROR: cannot find 'default' pg_config\n") - sys.stderr.write( - "HINT: set the PGINSTALLATION environment variable to the `pg_config` path\n" - ) - sys.exit(1) - self.cluster = pg_cluster.Cluster( self.installation, self.cluster_path, ) - if self.cluster.initialized(): - self.cluster.drop() - self.disable_replication = self.installation.version_info[:2] > (9, 6) + @property + def disable_replication(self): + """ + Whether replication settings should be disabled. + """ + return self.installation.version_info[:2] > (9, 6) def configure_cluster(self): self.cluster_port = find_available_port() @@ -82,6 +83,11 @@ def configure_cluster(self): if has_ipv6: listen_addresses += ',::1' + if self.cluster.installation.version_info >= (10, 0): + pwe = 'md5' + else: + pwe = 'on' + self.cluster.settings.update(dict( port = str(self.cluster_port), max_connections = '6', @@ -89,6 +95,7 @@ def configure_cluster(self): listen_addresses = listen_addresses, log_destination = 'stderr', log_min_messages = 'FATAL', + password_encryption = pwe, )) if self.disable_replication: @@ -115,6 +122,7 @@ def initialize_database(self): c = self.cluster.connection( user = 'test', database = 'template1', + sslrootcrtfile = self._crt, ) with c: if c.prepare( @@ -124,27 +132,37 @@ def initialize_database(self): c.execute('create database test') def connection(self, *args, **kw): - return self.cluster.connection(*args, user = 'test', **kw) + return self.cluster.connection(*args, user = 'test', **self.params, **kw) + + def drop_cluster(self): + if self.cluster.initialized(): + self.cluster.drop() def run(self, *args, **kw): - if not self.cluster.initialized(): - self.cluster.encoding = 'utf-8' - self.cluster.init( - user = 'test', - encoding = self.cluster.encoding, - logfile = None, - ) - sys.stderr.write('*') - try: - atexit.register(self.cluster.drop) - self.configure_cluster() - self.cluster.start(logfile = sys.stdout) - self.cluster.wait_until_started() - self.initialize_database() - except Exception: - self.cluster.drop() - atexit.unregister(self.cluster.drop) - raise + self.params = {} + + if 'PGINSTALLATION' not in os.environ: + # Expect tests to show skipped. + return super().run(*args, **kw) + + # From prior test run? + if self.cluster.initialized(): + self.cluster.drop() + + self.cluster.encoding = 'utf-8' + self.cluster.init( + user = 'test', + encoding = self.cluster.encoding, + logfile = None, + ) + sys.stderr.write('*') + + atexit.register(self.drop_cluster) + self.configure_cluster() + self.cluster.start(logfile = sys.stdout) + self.cluster.wait_until_started() + self.initialize_database() + if not self.cluster.running(): self.cluster.start() self.cluster.wait_until_started() @@ -157,12 +175,11 @@ def run(self, *args, **kw): class test_connect(TestCaseWithCluster): """ - postgresql.driver connectivity tests + postgresql.driver connection tests """ ip6 = '::1' ip4 = '127.0.0.1' host = 'localhost' - params = {} cluster_path_suffix = '_test_connect' mk_common_users = """ @@ -179,9 +196,10 @@ class test_connect(TestCaseWithCluster): def __init__(self, *args, **kw): super().__init__(*args,**kw) - # 8.4 nixed this. - vi = self.cluster.installation.version_info - self.check_crypt_user = (vi < (8,4)) + + @property + def check_crypt_user(self): + return (self.cluster.installation.version_info < (8,4)) def configure_cluster(self): super().configure_cluster() @@ -215,84 +233,101 @@ def configure_cluster(self): def initialize_database(self): super().initialize_database() - with self.cluster.connection(user = 'test') as db: + with self.connection() as db: db.execute(self.mk_common_users) if self.check_crypt_user: db.execute(self.mk_crypt_user) + @unittest.skipIf(default_installation is None, "no installation provided by environment") def test_pg_open_SQL_ASCII(self): - # postgresql.open host, port = self.cluster.address() + dbctx = self.params + # test simple locators.. with pg_open( 'pq://' + 'md5:' + 'md5_password@' + host + ':' + str(port) \ - + '/test?client_encoding=SQL_ASCII' + + '/test?client_encoding=SQL_ASCII', + **dbctx ) as db: self.assertEqual(db.prepare('select 1')(), [(1,)]) self.assertEqual(db.settings['client_encoding'], 'SQL_ASCII') self.assertTrue(db.closed) + @unittest.skipIf(default_installation is None, "no installation provided by environment") def test_pg_open_keywords(self): host, port = self.cluster.address() - # straight test, no IRI + dbctx = self.params + + # Keywords only, no indicator. with pg_open( user = 'md5', password = 'md5_password', host = host, port = port, - database = 'test' + database = 'test', + **dbctx, ) as db: self.assertEqual(db.prepare('select 1')(), [(1,)]) - self.assertTrue(db.closed) - # composite test + + # Keyword and indicator source. with pg_open( "pq://md5:md5_password@", host = host, port = port, - database = 'test' + database = 'test', + **dbctx, ) as db: self.assertEqual(db.prepare('select 1')(), [(1,)]) - # override test + + # Keyword override. with pg_open( "pq://md5:foobar@", password = 'md5_password', host = host, port = port, - database = 'test' + database = 'test', + **dbctx, ) as db: self.assertEqual(db.prepare('select 1')(), [(1,)]) - # and, one with some settings + + # Settings override. with pg_open( "pq://md5:foobar@?search_path=ieeee", password = 'md5_password', host = host, port = port, database = 'test', - settings = {'search_path' : 'public'} + settings = {'search_path' : 'public'}, + **dbctx, ) as db: self.assertEqual(db.prepare('select 1')(), [(1,)]) self.assertEqual(db.settings['search_path'], 'public') + @unittest.skipIf(default_installation is None, "no installation provided by environment") def test_pg_open(self): - # postgresql.open host, port = self.cluster.address() + dbctx = self.params + # test simple locators.. with pg_open( 'pq://' + 'md5:' + 'md5_password@' + host + ':' + str(port) \ - + '/test' + + '/test', + **dbctx, ) as db: self.assertEqual(db.prepare('select 1')(), [(1,)]) self.assertTrue(db.closed) with pg_open( 'pq://' + 'password:' + 'password_password@' + host + ':' + str(port) \ - + '/test' + + '/test', + **dbctx, ) as db: self.assertEqual(db.prepare('select 1')(), [(1,)]) self.assertTrue(db.closed) with pg_open( - 'pq://' + 'trusted@' + host + ':' + str(port) + '/test' + 'pq://' + 'trusted@' + host + ':' + str(port) + '/test', + **dbctx, ) as db: self.assertEqual(db.prepare('select 1')(), [(1,)]) self.assertTrue(db.closed) @@ -308,7 +343,7 @@ def test_pg_open(self): os.environ['PGPORT'] = str(port) os.environ['PGDATABASE'] = 'test' # No arguments, the environment provided everything. - with pg_open() as db: + with pg_open(**dbctx) as db: self.assertEqual(db.prepare('select 1')(), [(1,)]) self.assertEqual(db.prepare('select current_user').first(), 'md5') self.assertTrue(db.closed) @@ -338,7 +373,7 @@ def test_pg_open(self): try: os.environ['PGSERVICE'] = 'myserv' os.environ['PGSYSCONFDIR'] = os.getcwd() - with pg_open() as db: + with pg_open(**dbctx) as db: self.assertEqual(db.prepare('select 1')(), [(1,)]) self.assertEqual(db.prepare('select current_user').first(), 'password') self.assertEqual(db.settings['search_path'], 'public') @@ -355,6 +390,7 @@ def test_pg_open(self): if os.path.exists('pg_service.conf'): os.remove('pg_service.conf') + @unittest.skipIf(default_installation is None, "no installation provided by environment") def test_dbapi_connect(self): host, port = self.cluster.address() MD5 = dbapi20.connect( @@ -410,6 +446,7 @@ def test_dbapi_connect(self): TRUST.cursor().execute, 'select 1' ) + @unittest.skipIf(default_installation is None, "no installation provided by environment") def test_dbapi_connect_failure(self): host, port = self.cluster.address() badlogin = (lambda: dbapi20.connect( @@ -421,6 +458,7 @@ def test_dbapi_connect_failure(self): )) self.assertRaises(pg_exc.ClientCannotConnectError, badlogin) + @unittest.skipIf(default_installation is None, "no installation provided by environment") def test_IP4_connect(self): C = pg_driver.default.ip4( user = 'test', @@ -432,18 +470,20 @@ def test_IP4_connect(self): with C() as c: self.assertEqual(c.prepare('select 1').first(), 1) - if has_ipv6: - def test_IP6_connect(self): - C = pg_driver.default.ip6( - user = 'test', - host = '::1', - database = 'test', - port = self.cluster.address()[1], - **self.params - ) - with C() as c: - self.assertEqual(c.prepare('select 1').first(), 1) + @unittest.skipIf(default_installation is None, "no installation provided by environment") + @unittest.skipIf(not has_ipv6, "platform may not support IPv6") + def test_IP6_connect(self): + C = pg_driver.default.ip6( + user = 'test', + host = '::1', + database = 'test', + port = self.cluster.address()[1], + **self.params + ) + with C() as c: + self.assertEqual(c.prepare('select 1').first(), 1) + @unittest.skipIf(default_installation is None, "no installation provided by environment") def test_Host_connect(self): C = pg_driver.default.host( user = 'test', @@ -455,6 +495,7 @@ def test_Host_connect(self): with C() as c: self.assertEqual(c.prepare('select 1').first(), 1) + @unittest.skipIf(default_installation is None, "no installation provided by environment") def test_md5_connect(self): c = self.cluster.connection( user = 'md5', @@ -465,6 +506,7 @@ def test_md5_connect(self): with c: self.assertEqual(c.prepare('select current_user').first(), 'md5') + @unittest.skipIf(default_installation is None, "no installation provided by environment") def test_crypt_connect(self): if self.check_crypt_user: c = self.cluster.connection( @@ -476,15 +518,18 @@ def test_crypt_connect(self): with c: self.assertEqual(c.prepare('select current_user').first(), 'crypt') + @unittest.skipIf(default_installation is None, "no installation provided by environment") def test_password_connect(self): c = self.cluster.connection( user = 'password', password = 'password_password', database = 'test', + sslrootcrtfile = self._crt, ) with c: self.assertEqual(c.prepare('select current_user').first(), 'password') + @unittest.skipIf(default_installation is None, "no installation provided by environment") def test_trusted_connect(self): c = self.cluster.connection( user = 'trusted', @@ -495,9 +540,11 @@ def test_trusted_connect(self): with c: self.assertEqual(c.prepare('select current_user').first(), 'trusted') + @unittest.skipIf(default_installation is None, "no installation provided by environment") def test_Unix_connect(self): if not has_unix_sock: return + unix_domain_socket = os.path.join( self.cluster.data_directory, '.s.PGSQL.' + self.cluster.settings['port'] @@ -510,6 +557,7 @@ def test_Unix_connect(self): self.assertEqual(c.prepare('select 1').first(), 1) self.assertEqual(c.client_address, None) + @unittest.skipIf(default_installation is None, "no installation provided by environment") def test_pg_open_unix(self): if not has_unix_sock: return diff --git a/postgresql/test/test_dbapi20.py b/postgresql/test/test_dbapi20.py index 3cbd6626..6a0f2380 100644 --- a/postgresql/test/test_dbapi20.py +++ b/postgresql/test/test_dbapi20.py @@ -93,27 +93,21 @@ def executeDDL1(self,cursor): def executeDDL2(self,cursor): cursor.execute(self.ddl2) + def setUp(self): + pg_tmp.init() + pg_tmp.push() + pg_tmp._init_c(db) + def tearDown(self): - con = self._connect() - try: - cur = con.cursor() - for ddl in (self.xddl1, self.xddl2): - try: - cur.execute(ddl) - con.commit() - except self.driver.Error: - # Assume table didn't exist. Other tests will check if - # execute is busted. - pass - finally: - con.close() + pg_tmp.pop(None) def _connect(self): - pg_tmp.init() - host, port = pg_tmp.cluster.address() - return self.driver.connect( - user = 'test', host = host, port = port, - ) + c = db.clone() + c.__class__ = self.driver.Connection + c._xact = c.xact() + c._xact.start() + c._dbapi_connected_flag = True + return c def test_connect(self): con = self._connect() @@ -708,7 +702,7 @@ def test_mixedfetch(self): def help_nextset_setUp(self,cur): ''' Should create a procedure called deleteme - that returns two result sets, first the + that returns two result sets, first the number of rows in booze then "name from booze" ''' cur.execute('select name from ' + self.booze_name) diff --git a/postgresql/test/test_driver.py b/postgresql/test/test_driver.py index 62740e46..df413314 100644 --- a/postgresql/test/test_driver.py +++ b/postgresql/test/test_driver.py @@ -538,12 +538,16 @@ def testStatementAndCursorMetadata(self): self.assertEqual(tuple(c.pg_column_types), (pg_types.TEXTOID, pg_types.VARCHAROID)) self.assertEqual(tuple(c.column_types), (str,str)) - db.execute("CREATE TYPE public.myudt AS (i int)") + # Should be pg_temp or sandbox. + schema = db.settings['search_path'].split(',')[0] + typpath = '"%s"."myudt"' %(schema,) + + db.execute("CREATE TYPE myudt AS (i int)") myudt_oid = db.prepare("select oid from pg_type WHERE typname='myudt'").first() - ps = db.prepare("SELECT $1::text AS my_column1, $2::varchar AS my_column2, $3::public.myudt AS my_column3") + ps = db.prepare("SELECT $1::text AS my_column1, $2::varchar AS my_column2, $3::myudt AS my_column3") self.assertEqual(tuple(ps.column_names), ('my_column1','my_column2', 'my_column3')) - self.assertEqual(tuple(ps.sql_column_types), ('pg_catalog.text', 'CHARACTER VARYING', '"public"."myudt"')) - self.assertEqual(tuple(ps.sql_parameter_types), ('pg_catalog.text', 'CHARACTER VARYING', '"public"."myudt"')) + self.assertEqual(tuple(ps.sql_column_types), ('pg_catalog.text', 'CHARACTER VARYING', typpath)) + self.assertEqual(tuple(ps.sql_parameter_types), ('pg_catalog.text', 'CHARACTER VARYING', typpath)) self.assertEqual(tuple(ps.pg_column_types), ( pg_types.TEXTOID, pg_types.VARCHAROID, myudt_oid) ) @@ -554,7 +558,7 @@ def testStatementAndCursorMetadata(self): self.assertEqual(tuple(ps.column_types), (str,str,tuple)) c = ps.declare('textdata', 'varchardata', (123,)) self.assertEqual(tuple(c.column_names), ('my_column1','my_column2', 'my_column3')) - self.assertEqual(tuple(c.sql_column_types), ('pg_catalog.text', 'CHARACTER VARYING', '"public"."myudt"')) + self.assertEqual(tuple(c.sql_column_types), ('pg_catalog.text', 'CHARACTER VARYING', typpath)) self.assertEqual(tuple(c.pg_column_types), ( pg_types.TEXTOID, pg_types.VARCHAROID, myudt_oid )) @@ -820,6 +824,27 @@ def testSelectInXact(self): with db.xact(): self.select() + @pg_tmp + def testTransactionAlias(self): + self.assertEqual(db.transaction, db.xact) + + try: + with db.transaction(): + db.execute("CREATE TABLE t (i int);") + raise Exception('some failure') + except: + pass + else: + self.fail("expected exception was not raised") + + try: + db.query("select * from t") + except: + # No table. + pass + else: + self.fail("transaction abort had no effect") + def cursor_read(self): ps = db.prepare("SELECT i FROM generate_series(0, (2^8)::int - 1) AS g(i)") c = ps.declare() diff --git a/postgresql/test/test_iri.py b/postgresql/test/test_iri.py index cb6a0abc..0379302f 100644 --- a/postgresql/test/test_iri.py +++ b/postgresql/test/test_iri.py @@ -18,6 +18,7 @@ ':pass@', 'u:p@h', 'u:p@h:1', + 'postgres://host/database', 'pq://user:password@host:port/database?setting=value#public,private', 'pq://fæm.com:123/õéf/á?param=val', 'pq://l»»@fæm.com:123/õéf/á?param=val', @@ -84,6 +85,20 @@ ] class test_iri(unittest.TestCase): + def testAlternateSchemes(self): + field = pg_iri.parse("postgres://host")['host'] + self.assertEqual(field, 'host') + + field = pg_iri.parse("postgresql://host")['host'] + self.assertEqual(field, 'host') + + try: + pg_iri.parse("reject://host") + except ValueError: + pass + else: + self.fail("unacceptable IRI scheme not rejected") + def testIP6Hosts(self): """ Validate that IPv6 hosts are properly extracted. @@ -101,7 +116,9 @@ def testIP6Hosts(self): self.assertEqual(p['host'], h) def testPresentPasswordObscure(self): - "password is present in IRI, and obscure it" + """ + Password is present in IRI, and obscure it. + """ s = 'pq://user:pass@host:port/dbname' o = 'pq://user:***@host:port/dbname' p = pg_iri.parse(s) @@ -109,7 +126,9 @@ def testPresentPasswordObscure(self): self.assertEqual(ps, o) def testPresentPasswordObscure(self): - "password is *not* present in IRI, and do nothing" + """ + Password is *not* present in IRI, and do nothing. + """ s = 'pq://user@host:port/dbname' o = 'pq://user@host:port/dbname' p = pg_iri.parse(s) diff --git a/postgresql/test/test_ssl_connect.py b/postgresql/test/test_ssl_connect.py index ce2e3e2c..30f511d0 100644 --- a/postgresql/test/test_ssl_connect.py +++ b/postgresql/test/test_ssl_connect.py @@ -10,89 +10,102 @@ from ..driver import dbapi20 from . import test_connect +default_installation = test_connect.default_installation + +has_ssl = False +if default_installation is not None: + has_ssl = default_installation.ssl + server_key = """ -----BEGIN RSA PRIVATE KEY----- -MIICXAIBAAKBgQCy8veVaqL6MZVT8o0j98ggZYfibGwSN4XGC4rfineA2QZhi8t+ -zrzfOS10vLXKtgiIpevHeQbDlrqFDPUDowozurg+jfro2L1jzQjZPdgqOUs+YjKh -EO0Ya7NORO7ZgBx8WveXq30k4l8DK41jvpxRyBb9aqNWG4cB7fJqVTwZrwIDAQAB -AoGAJ74URGfheEVoz7MPq4xNMvy5mAzSV51jJV/M4OakscYBR8q/UBNkGQNe2A1N -Jo8VCBwpaCy11txz4jbFd6BPFFykgXleuRvMxoTv1qV0dZZ0X0ESNEAnjoHtjin/ -25mxsZTR6ucejHqXD9qE9NvFQ+wLv6Xo5rgDpx0onvgLA3kCQQDn4GeMkCfPZCve -lDUK+TpJnLYupyElZiidoFMITlFo5WoWNJror2W42A5TD9sZ23pGSxw7ypiWIF4f -ukGT5ZSzAkEAxZDwUUhgtoJIK7E9sCJM4AvcjDxGjslbUI/SmQTT+aTNCAmcIRrl -kq3WMkPjxi/QFEdkIpPsV9Kc94oQ/8b9FQJBAKHxRQCTsWoTsNvbsIwAcif1Lfu5 -N9oR1i34SeVUJWFYUFY/2SzHSwjkxGRYf5I4idZMIOTVYun+ox4PjDtJrScCQEQ4 -RiNrIKok1pLvwuNdFLqQnfl2ns6TTQrGfuwDtMaRV5Mc7mKoDPnXOQ1mT/KRdAJs -nHEsLwIsYbNAY5pOtfkCQDOy2Ffe7Z1YzFZXCTzpcq4mvMOPEUqlIX6hACNJGhgt -1EpruPwqR2PYDOIC4sXCaSogL8YyjI+Jlhm5kEJ4GaU= +MIIJKQIBAAKCAgEAp6C6t3exwgx5QQjeoW2vtawSl9SMhsNKfwGVh97gStBCHNqZ +DuO6nn5qp3GmzkDII+B8uAJPe5znHSlqj2g13EiFENeaF3G9l1uzaWGEvuFyU2sq +x3lu/pJz6ISEhlogkrGz9inmMcLaNLzm4XbXR/9pjf3QKq7xPH0CacjSzeA9gfAm +CjKJM/DxkrWeyKvBJuVCZbDPbCHtS1MJvAcU0DL9wdfPr4+2P4rVjzBgbzUzPUXL +DT/ewAk94aJPZAWAvtNrdbXjSvIJ/CWBedLtyCpHPchRwaOdJrkZYItYRqYP7SKM +rwddTbrQ/70sCHCS9Yq7X6NO9ONNVrgLhVQm3Ua3FsGyKcU/bx+xEYQsAsCJj7Ps +WdRhImU/3bdHqPobwyKRbssa5iz1rrwdQ0eFUakv+he3nqXLUqmqOs4RrrM5OvRs +e/JCi5N50NlRXkiix2u909vCdPQFzviiVQbkpqzSmejN0PF3GYFpc0+c7HDp78J9 +YEd+WMvx16LABVy9Kq5eYQbGQOmaWzH01fH+h3vxGnA4G9ArXGPL93P0+ztNhHJf +XBg5bwNzy5cca4roy6QNx87M/+n23iEHE4Bn9uulYJsXx2urUOAN9WCJTKYULTfu +IChWIRDy5ceYVcedHiuhRO90WyGsmwILAoAV0jebDosMK6Q+kIoOM2DT1n8CAwEA +AQKCAgAoDKTPtM9Jl4VY3m+ijfxPIX+HuwagJASmd5BsV/mqpjtFfYzYG9y4hWeh +/etml9+5gqcJp7OpywEE3KJTBQjpSoJQVdLBCzHK+ePRp7T5jg+skow0AHVeaUs8 +IH0xRFNH+SEQDU6sUOulcgSPlb81unZTsHKN4CJO22c6Mvr6qTrI0sGj6hMRz91H +uhDnzPFnA5trhGTqZui0+G/49pAodiZeq9s5DNL0N41ympJPv5wwZX5v+fSUWSDp +ycfCE/aAoS6pfv2BKHbuQV+/5X9eNYuz3Sp7Y0XmvI6tnF1I8+AWPgzyvIW0TpAk +qePdWFgkRjMiVHhG1g/iSjKmdkaacIqfOmaaUO//r5uj8L4rSxcTtqfM4CoUwiGo +Iqj0fCMQp+G+QCyMJzm0d2Ctg5mOMxFbl3jk09Z2u/ZaUpauvJ0S8WIEkJ0BMdGN +AqOtBFD7xOo6od2+7zreBVJQAV15owwi578Jk86skp5zY400IlV51yLqM6BQa3zd +Ft4xF2up0e/n7xVWW3twY8m/4i+ie+20hap9730UENo1XGy9iIIN8bxHU9+NcHdL +AP71Zgqa2nC2Wy7sCdRkt8c7P2VuraOoWgsOFvShhNOdVWR/LH/WwQaBzG9nj2u3 +0hmaDJezjdBGEJk3EhNocrMxYV1+L/MBgT6jBABx0b98K9YAEQKCAQEA03u/PPBQ +9H3ybGHC/JpfG+hLd6/Ux8Eq69i4rrduYshRFnwKtSjyfAPdtpLPhx3/N3uuxmCK +2VO+hwMKCEgk8Sb/qp0z2Dthvl2KCHUXOilFEh6B5J1nQi3OJVxalR3/yDp3ir8F +TdptY1gybWBFGdwnKSdHEiCr3+3k9OoSPeqYUuHuzBw2s8Zcet8RirJbtJVv5VyY +WNnzv/vDaEsMLENm0opmEAkFw+YW3ltolgPXyKfmgtXOKAk2s04DEwC0sHmiUvBY +4CdX6TBD7DXNEkl+bpOA5j92USGrlAiKkxeq+igQ7dPDhlmYcAahJodV3fyBPwqz +5pa6SWxQMNhRYwKCAQEAyum6pSHGHoI8twxpf/sgG3wKwUN+BQXoKustueeU66GV +P5xN+4tFmxJCRegFnfRB/IS9Oi5tety1BgYUA8z7h3pRz3ed3FUF0UXCDhgnqmp3 +XWpa9MBkoA8MO+/s+k10CZz5doR9cS+l2c7XfrlwHn21juScfGEsaxhgGBYbDVlW +IehjNERjVYyl4oHG9H/baXGRLaYfaFummwNGivWI0kqn8b00Sc0uW28LAmze5/IM +2simidgDJjV8EScta8o8uF6fe/3WKvGas7/NwVW+zP+Rs/sgsqa9FHQ2FZYRzIrw +5VpnGbz69SxRkbqLdPoKNQrcGOUdDmXrNZds1BAfNQKCAQEArztnHzhE7AD8ASAU +L7g9vGMDPT3dQlLlnJxrkqF8/q7auZW4TZmLKoUNjf0hpeSOF0wNamSOSDtisH4t +LuWQbp0Q1S8CyVWSzOi2ugFDaLbPe475tBNUfvpzSHO4vrwnt6HycW2MGJE3eEyZ +JBXTy/SmIixgcD3QDHES+HiG+vTKmEqK0mdCUD25XTo+T70vzXbRS6wos96MYPRc +Wqtsf7StmyCAJyNCuqqJIl99TmgKwUGV96zu8C+KOpIWbAV2so9ml/B8w+b1qcuL +TErcDB4He9oOwTmucNVEVRmqsOy4iCTwug9wgH72l0R2/PTAinpyIWld3V/hJXtx +CrgC3wKCAQEAiTTQk3ap+9k+6tvGvtZ1WIBg2Vwk64qZ+eN60PlKFqb1P8UWaiA7 +mecXzyNcIPmYYQL03VGlj+2Lrp4PjJ5f+rT4etw8b09ClsafuF4W/EHvosgW5ubt +Y9mpASJ0ULBs5U8y1DQ0ioOYlxYpWzRTHxsL2Kq3MdeXbHdYCxFvi3A8MMNtyVrw +/FkVlnsAqDWIjN1RONfa5vsKRklJuw7aTLBUrb6ti7XlQchtXl91vstKa+o/yne5 +cW27DfI64Wcn9ddt6i6zUeh7Hk509+VeFko+IMCP1J2wvxLxu1j1giT1TXD6xEmo +PH6STYMhZ6DnpARK3b6XDjRWfq981ExufQKCAQAwPtgINZF5c5GjIyn0EZh5cK4l +Ef7E7qXHFYV9yH4dswE9hdOD6IggaZTv1XvrwH5SN9Kt8FOXbPMsARSlD1kUNWsl +aTuco3xmCQNtq/ydN3OGMKOgUV3egzc1xWKSB8txOnfwZyEoHyCT6EQUQgF0ePLm +jcq9ONsyyLWZnRc7qxfJIwb7zCNAvOQezd+J+sDcqUQShfc3tzhqLmfaEOQz/Bz/ +4Sy6OIsujW1LWiJ//B0QXxxhjWd8NKmuTQC1cyKKXUh8iXvAO0CNjhdcZxjN+07n +JSuuwpLFnQtfda1VpNg0seYqbihuuVJpOA55/tlu1BiakdIW6DHB0wrMOL50 -----END RSA PRIVATE KEY----- """ server_crt = """ -Certificate: - Data: - Version: 3 (0x2) - Serial Number: - a1:02:62:34:22:0d:45:6a - Signature Algorithm: md5WithRSAEncryption - Issuer: C=US, ST=Arizona, L=Nowhere, O=ACME Inc, OU=Test Division, CN=test.python.projects.postgresql.org - Validity - Not Before: Feb 18 15:52:20 2009 GMT - Not After : Mar 20 15:52:20 2009 GMT - Subject: C=US, ST=Arizona, L=Nowhere, O=ACME Inc, OU=Test Division, CN=test.python.projects.postgresql.org - Subject Public Key Info: - Public Key Algorithm: rsaEncryption - RSA Public Key: (1024 bit) - Modulus (1024 bit): - 00:b2:f2:f7:95:6a:a2:fa:31:95:53:f2:8d:23:f7: - c8:20:65:87:e2:6c:6c:12:37:85:c6:0b:8a:df:8a: - 77:80:d9:06:61:8b:cb:7e:ce:bc:df:39:2d:74:bc: - b5:ca:b6:08:88:a5:eb:c7:79:06:c3:96:ba:85:0c: - f5:03:a3:0a:33:ba:b8:3e:8d:fa:e8:d8:bd:63:cd: - 08:d9:3d:d8:2a:39:4b:3e:62:32:a1:10:ed:18:6b: - b3:4e:44:ee:d9:80:1c:7c:5a:f7:97:ab:7d:24:e2: - 5f:03:2b:8d:63:be:9c:51:c8:16:fd:6a:a3:56:1b: - 87:01:ed:f2:6a:55:3c:19:af - Exponent: 65537 (0x10001) - X509v3 extensions: - X509v3 Subject Key Identifier: - 4B:2F:4F:1A:43:75:43:DC:26:59:89:48:56:73:BB:D0:AA:95:E8:60 - X509v3 Authority Key Identifier: - keyid:4B:2F:4F:1A:43:75:43:DC:26:59:89:48:56:73:BB:D0:AA:95:E8:60 - DirName:/C=US/ST=Arizona/L=Nowhere/O=ACME Inc/OU=Test Division/CN=test.python.projects.postgresql.org - serial:A1:02:62:34:22:0D:45:6A - - X509v3 Basic Constraints: - CA:TRUE - Signature Algorithm: md5WithRSAEncryption - 24:ee:20:0f:b5:86:08:d6:3c:8f:d4:8d:16:fd:ac:e8:49:77: - 86:74:7d:b8:f3:15:51:1d:d8:65:17:5e:a8:58:aa:b0:f6:68: - 45:cb:77:9d:9f:21:81:e3:5e:86:1c:64:31:39:b6:29:5f:f1: - ec:b1:33:45:1f:0c:54:16:26:11:af:e2:23:1b:a6:03:46:9b: - 0e:63:ce:2c:02:41:26:93:bc:6f:6e:08:7e:95:b7:7a:f9:3a: - 5a:bd:47:4c:92:ce:ea:09:75:de:3d:bb:30:51:a0:c5:f1:5d: - 33:5f:c0:37:75:53:4e:6c:b4:3b:b1:a5:1b:fd:59:19:07:18: - 22:6a -----BEGIN CERTIFICATE----- -MIIDhzCCAvCgAwIBAgIJAKECYjQiDUVqMA0GCSqGSIb3DQEBBAUAMIGKMQswCQYD -VQQGEwJVUzEQMA4GA1UECBMHQXJpem9uYTEQMA4GA1UEBxMHTm93aGVyZTERMA8G -A1UEChMIQUNNRSBJbmMxFjAUBgNVBAsTDVRlc3QgRGl2aXNpb24xLDAqBgNVBAMT -I3Rlc3QucHl0aG9uLnByb2plY3RzLnBvc3RncmVzcWwub3JnMB4XDTA5MDIxODE1 -NTIyMFoXDTA5MDMyMDE1NTIyMFowgYoxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdB -cml6b25hMRAwDgYDVQQHEwdOb3doZXJlMREwDwYDVQQKEwhBQ01FIEluYzEWMBQG -A1UECxMNVGVzdCBEaXZpc2lvbjEsMCoGA1UEAxMjdGVzdC5weXRob24ucHJvamVj -dHMucG9zdGdyZXNxbC5vcmcwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBALLy -95VqovoxlVPyjSP3yCBlh+JsbBI3hcYLit+Kd4DZBmGLy37OvN85LXS8tcq2CIil -68d5BsOWuoUM9QOjCjO6uD6N+ujYvWPNCNk92Co5Sz5iMqEQ7Rhrs05E7tmAHHxa -95erfSTiXwMrjWO+nFHIFv1qo1YbhwHt8mpVPBmvAgMBAAGjgfIwge8wHQYDVR0O -BBYEFEsvTxpDdUPcJlmJSFZzu9CqlehgMIG/BgNVHSMEgbcwgbSAFEsvTxpDdUPc -JlmJSFZzu9CqlehgoYGQpIGNMIGKMQswCQYDVQQGEwJVUzEQMA4GA1UECBMHQXJp -em9uYTEQMA4GA1UEBxMHTm93aGVyZTERMA8GA1UEChMIQUNNRSBJbmMxFjAUBgNV -BAsTDVRlc3QgRGl2aXNpb24xLDAqBgNVBAMTI3Rlc3QucHl0aG9uLnByb2plY3Rz -LnBvc3RncmVzcWwub3JnggkAoQJiNCINRWowDAYDVR0TBAUwAwEB/zANBgkqhkiG -9w0BAQQFAAOBgQAk7iAPtYYI1jyP1I0W/azoSXeGdH248xVRHdhlF16oWKqw9mhF -y3ednyGB416GHGQxObYpX/HssTNFHwxUFiYRr+IjG6YDRpsOY84sAkEmk7xvbgh+ -lbd6+TpavUdMks7qCXXePbswUaDF8V0zX8A3dVNObLQ7saUb/VkZBxgiag== +MIIGOjCCBCKgAwIBAgIUPZomw8k4yyMSlXYFUrsQ7Co8LCowDQYJKoZIhvcNAQEL +BQAwgawxCzAJBgNVBAYTAlVTMRAwDgYDVQQIDAdBcml6b25hMRAwDgYDVQQHDAdQ +aG9lbml4MSAwHgYDVQQKDBdBbm9ueW1vdXMgQml0IEZhY3RvcmllczEUMBIGA1UE +CwwLRW5naW5lZXJpbmcxGjAYBgNVBAMMEXBnLXRlc3QubG9jYWxob3N0MSUwIwYJ +KoZIhvcNAQkBFhZmYWtlQHBnLXRlc3QubG9jYWxob3N0MCAXDTIzMDIwNDIwMDEx +MFoYDzIwNzcxMjAxMjAwMTEwWjCBrDELMAkGA1UEBhMCVVMxEDAOBgNVBAgMB0Fy +aXpvbmExEDAOBgNVBAcMB1Bob2VuaXgxIDAeBgNVBAoMF0Fub255bW91cyBCaXQg +RmFjdG9yaWVzMRQwEgYDVQQLDAtFbmdpbmVlcmluZzEaMBgGA1UEAwwRcGctdGVz +dC5sb2NhbGhvc3QxJTAjBgkqhkiG9w0BCQEWFmZha2VAcGctdGVzdC5sb2NhbGhv +c3QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCnoLq3d7HCDHlBCN6h +ba+1rBKX1IyGw0p/AZWH3uBK0EIc2pkO47qefmqncabOQMgj4Hy4Ak97nOcdKWqP +aDXcSIUQ15oXcb2XW7NpYYS+4XJTayrHeW7+knPohISGWiCSsbP2KeYxwto0vObh +dtdH/2mN/dAqrvE8fQJpyNLN4D2B8CYKMokz8PGStZ7Iq8Em5UJlsM9sIe1LUwm8 +BxTQMv3B18+vj7Y/itWPMGBvNTM9RcsNP97ACT3hok9kBYC+02t1teNK8gn8JYF5 +0u3IKkc9yFHBo50muRlgi1hGpg/tIoyvB11NutD/vSwIcJL1irtfo070401WuAuF +VCbdRrcWwbIpxT9vH7ERhCwCwImPs+xZ1GEiZT/dt0eo+hvDIpFuyxrmLPWuvB1D +R4VRqS/6F7eepctSqao6zhGuszk69Gx78kKLk3nQ2VFeSKLHa73T28J09AXO+KJV +BuSmrNKZ6M3Q8XcZgWlzT5zscOnvwn1gR35Yy/HXosAFXL0qrl5hBsZA6ZpbMfTV +8f6He/EacDgb0CtcY8v3c/T7O02Ecl9cGDlvA3PLlxxriujLpA3Hzsz/6fbeIQcT +gGf266VgmxfHa6tQ4A31YIlMphQtN+4gKFYhEPLlx5hVx50eK6FE73RbIaybAgsC +gBXSN5sOiwwrpD6Qig4zYNPWfwIDAQABo1AwTjAdBgNVHQ4EFgQUy0r/yzk92MJZ +skVC5HrZ76nJKHkwHwYDVR0jBBgwFoAUy0r/yzk92MJZskVC5HrZ76nJKHkwDAYD +VR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAFYbhdj3Tz1E17iXe6dGQluCy +Fdo4PTMO7MwHhrpsscWdFpzJq9dtcWwLyGYIy111WNfB4AHVg8e13Ula5B9mi2CK +7kJjRZ+fFPuoBOG+qhXurf/yDhUwavF/forTCDiL58wc6QzGxp4TmkVyZzus2ryj +WmrgkLYMSzLNbWor/kLZzGh5OCUtLFXjL4EJn4NskbeOPvTotcmsOlokNryiH/t6 +ploi0TCL8JjdVblT1uPFtytEiheySJt3SZvL7tQhDBZfhNeup45f1bpQCtPGqqPd +9aTwSaatXNWfIltBpWMiyaj+udD7hntee0pD6iPdXh13knKOwhHzLET4OHEAPZGj +V4hZly5acthz6Xu9WLCznEo9/CZ1pyltKFP2Cx3xpkoGt8GQ3QiLdNvpox0xCVYM +8kQ9XGW3lEdZ+zl02flaN/Mah24RzDFAlceapSJLGg47Lrct+QWNuOo0LlAkA6Ir +XD96B4pjcfHmM1Qg0FROWed0UuDnnqFxM+4tyEnnPfhd6lgkQA8oVNJg8sgSm+Tl +NKdWyaylxx8ElI3e1ebzmfuY+J/DvlCbVd+7ZcPLAtsMqWIFWkWf2fXiLBWAll0Q +wqnIFRifRR6wFjSW2Re3gv64ShYWxqhRYztUSKzDFqJCmOyca/Ou4Yvfo2RJtiMk +kD4TZkFt1F7QewUFoMI= -----END CERTIFICATE----- """ @@ -104,6 +117,9 @@ class test_ssl_connect(test_connect.test_connect): cluster_path_suffix = '_test_ssl_connect' def configure_cluster(self): + if not has_ssl: + return + super().configure_cluster() self.cluster.settings['ssl'] = 'on' with open(self.cluster.hba_file, 'a') as hba: @@ -125,9 +141,15 @@ def configure_cluster(self): os.chmod(key_file, 0o700) os.chmod(crt_file, 0o700) + self.params['sslrootcrtfile'] = crt_file + def initialize_database(self): + if not has_ssl: + return + super().initialize_database() - with self.cluster.connection(user = 'test') as db: + # Setup TLS users. + with self.cluster.connection(user = 'test', **self.params) as db: db.execute( """ CREATE USER nossl; @@ -135,6 +157,8 @@ def initialize_database(self): """ ) + @unittest.skipIf(default_installation is None, "no installation provided by environment") + @unittest.skipIf(not has_ssl, "could not detect installation tls") def test_ssl_mode_require(self): host, port = self.cluster.address() params = dict(self.params) @@ -167,6 +191,8 @@ def test_ssl_mode_require(self): self.assertEqual(c.prepare('select 1').first(), 1) self.assertEqual(c.security, 'ssl') + @unittest.skipIf(default_installation is None, "no installation provided by environment") + @unittest.skipIf(not has_ssl, "could not detect installation tls") def test_ssl_mode_disable(self): host, port = self.cluster.address() params = dict(self.params) @@ -200,6 +226,8 @@ def test_ssl_mode_disable(self): self.assertEqual(c.prepare('select 1').first(), 1) self.assertEqual(c.security, None) + @unittest.skipIf(default_installation is None, "no installation provided by environment") + @unittest.skipIf(not has_ssl, "could not detect installation tls") def test_ssl_mode_prefer(self): host, port = self.cluster.address() params = dict(self.params) @@ -233,6 +261,8 @@ def test_ssl_mode_prefer(self): self.assertEqual(c.prepare('select 1').first(), 1) self.assertEqual(c.security, None) + @unittest.skipIf(default_installation is None, "no installation provided by environment") + @unittest.skipIf(not has_ssl, "could not detect installation tls") def test_ssl_mode_allow(self): host, port = self.cluster.address() params = dict(self.params) diff --git a/postgresql/test/testall.py b/postgresql/test/testall.py index 366d3ad4..b32ccaa1 100644 --- a/postgresql/test/testall.py +++ b/postgresql/test/testall.py @@ -17,13 +17,9 @@ from .test_installation import * from .test_cluster import * -# These two require custom cluster configurations. +# Expects PGINSTALLATION to be set. Tests may be skipped. from .test_connect import * -# No SSL? cluster initialization will fail. -if default().ssl: - from .test_ssl_connect import * -else: - stderr.write("NOTICE: installation doesn't support SSL\n") +from .test_ssl_connect import * try: from .test_optimized import * diff --git a/postgresql/types/__init__.py b/postgresql/types/__init__.py index 6481d929..12b2543e 100644 --- a/postgresql/types/__init__.py +++ b/postgresql/types/__init__.py @@ -286,9 +286,9 @@ def detect_dimensions(hier, len = len): @classmethod def from_elements(typ, - elements : "iterable of elements in the array", - lowerbounds : "beginning of each axis" = None, - upperbounds : "upper bounds; size of each axis" = None, + elements, + lowerbounds = None, + upperbounds = None, len = len, ): """ @@ -610,7 +610,7 @@ def column_names(self, get0 = get0, get1 = get1): def transform(self, *args, **kw): """ Make a new Row after processing the values with the callables associated - with the values either by index, \*args, or my column name, \*\*kw. + with the values either by index, *args, or my column name, **kw. >>> r=Row.from_sequence({'col1':0,'col2':1}, (1,'two')) >>> r.transform(str) diff --git a/postgresql/types/geometry.py b/postgresql/types/geometry.py index b1ed9f89..ba996e57 100644 --- a/postgresql/types/geometry.py +++ b/postgresql/types/geometry.py @@ -95,9 +95,6 @@ def __str__(self): def parallel(self, ob): return self.slope == type(self)(ob).slope - def intersect(self, ob): - raise NotImplementedError - def perpendicular(self, ob): return (self.slope / type(self)(ob).slope) == -1.0 @@ -117,7 +114,7 @@ class Box(tuple): postgresql.types.geometry.Box(((-2.0, 0.0), (-4.0, -3.0))) :: - + (-2, 0) `high` | | @@ -170,7 +167,7 @@ def __str__(self): class Circle(tuple): """ - type for PostgreSQL circles + Type for PostgreSQL circles. """ __slots__ = () center = property(fget = get0, doc = "center of the circle (point)") diff --git a/postgresql/versionstring.py b/postgresql/versionstring.py index ccb39536..04c065a5 100644 --- a/postgresql/versionstring.py +++ b/postgresql/versionstring.py @@ -2,18 +2,16 @@ # .versionstring ## """ -PostgreSQL version parsing. +PostgreSQL version string parsing. ->>> postgresql.version.split('8.0.1') +>>> postgresql.versionstring.split('8.0.1') (8, 0, 1, None, None) """ -def split(vstr : str) -> ( - 'major','minor','patch',...,'state_class','state_level' -): +def split(vstr: str) -> tuple: """ - Split a PostgreSQL version string into a tuple - (major,minor,patch,...,state_class,state_level) + Split a PostgreSQL version string into a tuple. + (major, minor, patch, ..., state_class, state_level) """ v = vstr.strip().split('.') @@ -38,24 +36,22 @@ def split(vstr : str) -> ( vlist += [None] * ((3 - len(vlist)) + 2) return tuple(vlist) -def unsplit(vtup : tuple) -> str: - 'join a version tuple back into the original version string' +def unsplit(vtup: tuple) -> str: + """ + Join a version tuple back into the original version string. + """ svtup = [str(x) for x in vtup[:-2] if x is not None] state_class, state_level = vtup[-2:] - return '.'.join(svtup) + ( - '' if state_class is None else state_class + str(state_level) - ) + return '.'.join(svtup) + ('' if state_class is None else state_class + str(state_level)) -def normalize(split_version : "a tuple returned by `split`") -> tuple: +def normalize(split_version: tuple) -> tuple: """ Given a tuple produced by `split`, normalize the `None` objects into int(0) - or 'final' if it's the ``state_class`` + or 'final' if it's the ``state_class``. """ (*head, state_class, state_level) = split_version mmp = [x if x is not None else 0 for x in head] - return tuple( - mmp + [state_class or 'final', state_level or 0] - ) + return tuple(mmp + [state_class or 'final', state_level or 0]) default_state_class_priority = [ 'dev', diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..f581b945 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 -m validate_pyproject +# Consistent with postgresql.project and postgresql.release.distutils. +[build-system] +requires = ["setuptools >= 0"] +build-backend = "setuptools.build_meta" + +[project] +name = "py-postgresql" +version = "1.3.1" +description = "Query PostgreSQL databases using Python and the PQv3 protocol." +readme = "README.md" + +license.file = "LICENSE" +authors = [ + { name = "James William Pye", email = "james.pye@gmail.com" }, +] +maintainers = [ + { name = "James William Pye", email = "james.pye@gmail.com" }, +] + +requires-python = ">=3.7" +keywords = ["syncpg", "postgres", "postgresql", "sql", "driver"] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "License :: OSI Approved :: MIT License", + "License :: OSI Approved :: Attribution Assurance License", + "License :: OSI Approved :: Python Software Foundation License", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Topic :: Database", +] + +dependencies = [] + +[project.urls] +Documentation = "http://py-postgresql.readthedocs.io" +Issues = "https://github.com/python-postgres/fe/issues" +Source = "https://github.com/python-postgres/fe" diff --git a/readthedocs.yml b/readthedocs.yml index d53ffa68..d75e54ac 100644 --- a/readthedocs.yml +++ b/readthedocs.yml @@ -2,4 +2,4 @@ build: image: latest python: - version: 3.6 + version: 3.7