1# postgresql/psycopg2.py
2# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: http://www.opensource.org/licenses/mit-license.php
7
8r"""
9.. dialect:: postgresql+psycopg2
10    :name: psycopg2
11    :dbapi: psycopg2
12    :connectstring: postgresql+psycopg2://user:password@host:port/dbname[?key=value&key=value...]
13    :url: http://pypi.python.org/pypi/psycopg2/
14
15psycopg2 Connect Arguments
16-----------------------------------
17
18psycopg2-specific keyword arguments which are accepted by
19:func:`.create_engine()` are:
20
21* ``server_side_cursors``: Enable the usage of "server side cursors" for SQL
22  statements which support this feature. What this essentially means from a
23  psycopg2 point of view is that the cursor is created using a name, e.g.
24  ``connection.cursor('some name')``, which has the effect that result rows
25  are not immediately pre-fetched and buffered after statement execution, but
26  are instead left on the server and only retrieved as needed. SQLAlchemy's
27  :class:`~sqlalchemy.engine.ResultProxy` uses special row-buffering
28  behavior when this feature is enabled, such that groups of 100 rows at a
29  time are fetched over the wire to reduce conversational overhead.
30  Note that the :paramref:`.Connection.execution_options.stream_results`
31  execution option is a more targeted
32  way of enabling this mode on a per-execution basis.
33
34* ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode
35  per connection.  True by default.
36
37  .. seealso::
38
39    :ref:`psycopg2_disable_native_unicode`
40
41* ``isolation_level``: This option, available for all PostgreSQL dialects,
42  includes the ``AUTOCOMMIT`` isolation level when using the psycopg2
43  dialect.
44
45  .. seealso::
46
47    :ref:`psycopg2_isolation_level`
48
49* ``client_encoding``: sets the client encoding in a libpq-agnostic way,
50  using psycopg2's ``set_client_encoding()`` method.
51
52  .. seealso::
53
54    :ref:`psycopg2_unicode`
55
56Unix Domain Connections
57------------------------
58
59psycopg2 supports connecting via Unix domain connections.   When the ``host``
60portion of the URL is omitted, SQLAlchemy passes ``None`` to psycopg2,
61which specifies Unix-domain communication rather than TCP/IP communication::
62
63    create_engine("postgresql+psycopg2://user:password@/dbname")
64
65By default, the socket file used is to connect to a Unix-domain socket
66in ``/tmp``, or whatever socket directory was specified when PostgreSQL
67was built.  This value can be overridden by passing a pathname to psycopg2,
68using ``host`` as an additional keyword argument::
69
70    create_engine("postgresql+psycopg2://user:password@/dbname?\
71host=/var/lib/postgresql")
72
73.. seealso::
74
75    `PQconnectdbParams <http://www.postgresql.org/docs/9.1/static/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS>`_
76
77.. _psycopg2_execution_options:
78
79Per-Statement/Connection Execution Options
80-------------------------------------------
81
82The following DBAPI-specific options are respected when used with
83:meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`,
84:meth:`.Query.execution_options`, in addition to those not specific to DBAPIs:
85
86* ``isolation_level`` - Set the transaction isolation level for the lifespan of a
87  :class:`.Connection` (can only be set on a connection, not a statement
88  or query).   See :ref:`psycopg2_isolation_level`.
89
90* ``stream_results`` - Enable or disable usage of psycopg2 server side cursors -
91  this feature makes use of "named" cursors in combination with special
92  result handling methods so that result rows are not fully buffered.
93  If ``None`` or not set, the ``server_side_cursors`` option of the
94  :class:`.Engine` is used.
95
96* ``max_row_buffer`` - when using ``stream_results``, an integer value that
97  specifies the maximum number of rows to buffer at a time.  This is
98  interpreted by the :class:`.BufferedRowResultProxy`, and if omitted the
99  buffer will grow to ultimately store 1000 rows at a time.
100
101  .. versionadded:: 1.0.6
102
103.. _psycopg2_unicode:
104
105Unicode with Psycopg2
106----------------------
107
108By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
109extension, such that the DBAPI receives and returns all strings as Python
110Unicode objects directly - SQLAlchemy passes these values through without
111change.   Psycopg2 here will encode/decode string values based on the
112current "client encoding" setting; by default this is the value in
113the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
114Typically, this can be changed to ``utf8``, as a more useful default::
115
116    # postgresql.conf file
117
118    # client_encoding = sql_ascii # actually, defaults to database
119                                 # encoding
120    client_encoding = utf8
121
122A second way to affect the client encoding is to set it within Psycopg2
123locally.   SQLAlchemy will call psycopg2's
124:meth:`psycopg2:connection.set_client_encoding` method
125on all new connections based on the value passed to
126:func:`.create_engine` using the ``client_encoding`` parameter::
127
128    # set_client_encoding() setting;
129    # works for *all* PostgreSQL versions
130    engine = create_engine("postgresql://user:pass@host/dbname",
131                           client_encoding='utf8')
132
133This overrides the encoding specified in the PostgreSQL client configuration.
134When using the parameter in this way, the psycopg2 driver emits
135``SET client_encoding TO 'utf8'`` on the connection explicitly, and works
136in all PostgreSQL versions.
137
138Note that the ``client_encoding`` setting as passed to :func:`.create_engine`
139is **not the same** as the more recently added ``client_encoding`` parameter
140now supported by libpq directly.   This is enabled when ``client_encoding``
141is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed
142using the :paramref:`.create_engine.connect_args` parameter::
143
144    # libpq direct parameter setting;
145    # only works for PostgreSQL **9.1 and above**
146    engine = create_engine("postgresql://user:pass@host/dbname",
147                           connect_args={'client_encoding': 'utf8'})
148
149    # using the query string is equivalent
150    engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8")
151
152The above parameter was only added to libpq as of version 9.1 of PostgreSQL,
153so using the previous method is better for cross-version support.
154
155.. _psycopg2_disable_native_unicode:
156
157Disabling Native Unicode
158^^^^^^^^^^^^^^^^^^^^^^^^
159
160SQLAlchemy can also be instructed to skip the usage of the psycopg2
161``UNICODE`` extension and to instead utilize its own unicode encode/decode
162services, which are normally reserved only for those DBAPIs that don't
163fully support unicode directly.  Passing ``use_native_unicode=False`` to
164:func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``.
165SQLAlchemy will instead encode data itself into Python bytestrings on the way
166in and coerce from bytes on the way back,
167using the value of the :func:`.create_engine` ``encoding`` parameter, which
168defaults to ``utf-8``.
169SQLAlchemy's own unicode encode/decode functionality is steadily becoming
170obsolete as most DBAPIs now support unicode fully.
171
172Bound Parameter Styles
173----------------------
174
175The default parameter style for the psycopg2 dialect is "pyformat", where
176SQL is rendered using ``%(paramname)s`` style.   This format has the limitation
177that it does not accommodate the unusual case of parameter names that
178actually contain percent or parenthesis symbols; as SQLAlchemy in many cases
179generates bound parameter names based on the name of a column, the presence
180of these characters in a column name can lead to problems.
181
182There are two solutions to the issue of a :class:`.schema.Column` that contains
183one of these characters in its name.  One is to specify the
184:paramref:`.schema.Column.key` for columns that have such names::
185
186    measurement = Table('measurement', metadata,
187        Column('Size (meters)', Integer, key='size_meters')
188    )
189
190Above, an INSERT statement such as ``measurement.insert()`` will use
191``size_meters`` as the parameter name, and a SQL expression such as
192``measurement.c.size_meters > 10`` will derive the bound parameter name
193from the ``size_meters`` key as well.
194
195.. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key`
196   as the source of naming when anonymous bound parameters are created
197   in SQL expressions; previously, this behavior only applied to
198   :meth:`.Table.insert` and :meth:`.Table.update` parameter names.
199
200The other solution is to use a positional format; psycopg2 allows use of the
201"format" paramstyle, which can be passed to
202:paramref:`.create_engine.paramstyle`::
203
204    engine = create_engine(
205        'postgresql://scott:tiger@localhost:5432/test', paramstyle='format')
206
207With the above engine, instead of a statement like::
208
209    INSERT INTO measurement ("Size (meters)") VALUES (%(Size (meters))s)
210    {'Size (meters)': 1}
211
212we instead see::
213
214    INSERT INTO measurement ("Size (meters)") VALUES (%s)
215    (1, )
216
217Where above, the dictionary style is converted into a tuple with positional
218style.
219
220
221Transactions
222------------
223
224The psycopg2 dialect fully supports SAVEPOINT and two-phase commit operations.
225
226.. _psycopg2_isolation_level:
227
228Psycopg2 Transaction Isolation Level
229-------------------------------------
230
231As discussed in :ref:`postgresql_isolation_level`,
232all PostgreSQL dialects support setting of transaction isolation level
233both via the ``isolation_level`` parameter passed to :func:`.create_engine`,
234as well as the ``isolation_level`` argument used by
235:meth:`.Connection.execution_options`.  When using the psycopg2 dialect, these
236options make use of psycopg2's ``set_isolation_level()`` connection method,
237rather than emitting a PostgreSQL directive; this is because psycopg2's
238API-level setting is always emitted at the start of each transaction in any
239case.
240
241The psycopg2 dialect supports these constants for isolation level:
242
243* ``READ COMMITTED``
244* ``READ UNCOMMITTED``
245* ``REPEATABLE READ``
246* ``SERIALIZABLE``
247* ``AUTOCOMMIT``
248
249.. versionadded:: 0.8.2 support for AUTOCOMMIT isolation level when using
250    psycopg2.
251
252.. seealso::
253
254    :ref:`postgresql_isolation_level`
255
256    :ref:`pg8000_isolation_level`
257
258
259NOTICE logging
260---------------
261
262The psycopg2 dialect will log PostgreSQL NOTICE messages via the
263``sqlalchemy.dialects.postgresql`` logger::
264
265    import logging
266    logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO)
267
268.. _psycopg2_hstore::
269
270HSTORE type
271------------
272
273The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of
274the HSTORE type.   The SQLAlchemy psycopg2 dialect will enable this extension
275by default when psycopg2 version 2.4 or greater is used, and
276it is detected that the target database has the HSTORE type set up for use.
277In other words, when the dialect makes the first
278connection, a sequence like the following is performed:
279
2801. Request the available HSTORE oids using
281   ``psycopg2.extras.HstoreAdapter.get_oids()``.
282   If this function returns a list of HSTORE identifiers, we then determine
283   that the ``HSTORE`` extension is present.
284   This function is **skipped** if the version of psycopg2 installed is
285   less than version 2.4.
286
2872. If the ``use_native_hstore`` flag is at its default of ``True``, and
288   we've detected that ``HSTORE`` oids are available, the
289   ``psycopg2.extensions.register_hstore()`` extension is invoked for all
290   connections.
291
292The ``register_hstore()`` extension has the effect of **all Python
293dictionaries being accepted as parameters regardless of the type of target
294column in SQL**. The dictionaries are converted by this extension into a
295textual HSTORE expression.  If this behavior is not desired, disable the
296use of the hstore extension by setting ``use_native_hstore`` to ``False`` as
297follows::
298
299    engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test",
300                use_native_hstore=False)
301
302The ``HSTORE`` type is **still supported** when the
303``psycopg2.extensions.register_hstore()`` extension is not used.  It merely
304means that the coercion between Python dictionaries and the HSTORE
305string format, on both the parameter side and the result side, will take
306place within SQLAlchemy's own marshalling logic, and not that of ``psycopg2``
307which may be more performant.
308
309"""
310from __future__ import absolute_import
311
312import re
313import logging
314
315from ... import util, exc
316import decimal
317from ... import processors
318from ...engine import result as _result
319from ...sql import expression
320from ... import types as sqltypes
321from .base import PGDialect, PGCompiler, \
322    PGIdentifierPreparer, PGExecutionContext, \
323    ENUM, _DECIMAL_TYPES, _FLOAT_TYPES,\
324    _INT_TYPES, UUID
325from .hstore import HSTORE
326from .json import JSON, JSONB
327
328try:
329    from uuid import UUID as _python_UUID
330except ImportError:
331    _python_UUID = None
332
333
334logger = logging.getLogger('sqlalchemy.dialects.postgresql')
335
336
337class _PGNumeric(sqltypes.Numeric):
338    def bind_processor(self, dialect):
339        return None
340
341    def result_processor(self, dialect, coltype):
342        if self.asdecimal:
343            if coltype in _FLOAT_TYPES:
344                return processors.to_decimal_processor_factory(
345                    decimal.Decimal,
346                    self._effective_decimal_return_scale)
347            elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
348                # pg8000 returns Decimal natively for 1700
349                return None
350            else:
351                raise exc.InvalidRequestError(
352                    "Unknown PG numeric type: %d" % coltype)
353        else:
354            if coltype in _FLOAT_TYPES:
355                # pg8000 returns float natively for 701
356                return None
357            elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
358                return processors.to_float
359            else:
360                raise exc.InvalidRequestError(
361                    "Unknown PG numeric type: %d" % coltype)
362
363
364class _PGEnum(ENUM):
365    def result_processor(self, dialect, coltype):
366        if self.native_enum and util.py2k and self.convert_unicode is True:
367            # we can't easily use PG's extensions here because
368            # the OID is on the fly, and we need to give it a python
369            # function anyway - not really worth it.
370            self.convert_unicode = "force_nocheck"
371        return super(_PGEnum, self).result_processor(dialect, coltype)
372
373
374class _PGHStore(HSTORE):
375    def bind_processor(self, dialect):
376        if dialect._has_native_hstore:
377            return None
378        else:
379            return super(_PGHStore, self).bind_processor(dialect)
380
381    def result_processor(self, dialect, coltype):
382        if dialect._has_native_hstore:
383            return None
384        else:
385            return super(_PGHStore, self).result_processor(dialect, coltype)
386
387
388class _PGJSON(JSON):
389
390    def result_processor(self, dialect, coltype):
391        if dialect._has_native_json:
392            return None
393        else:
394            return super(_PGJSON, self).result_processor(dialect, coltype)
395
396
397class _PGJSONB(JSONB):
398
399    def result_processor(self, dialect, coltype):
400        if dialect._has_native_jsonb:
401            return None
402        else:
403            return super(_PGJSONB, self).result_processor(dialect, coltype)
404
405
406class _PGUUID(UUID):
407    def bind_processor(self, dialect):
408        if not self.as_uuid and dialect.use_native_uuid:
409            nonetype = type(None)
410
411            def process(value):
412                if value is not None:
413                    value = _python_UUID(value)
414                return value
415            return process
416
417    def result_processor(self, dialect, coltype):
418        if not self.as_uuid and dialect.use_native_uuid:
419            def process(value):
420                if value is not None:
421                    value = str(value)
422                return value
423            return process
424
425
426_server_side_id = util.counter()
427
428
429class PGExecutionContext_psycopg2(PGExecutionContext):
430    def create_server_side_cursor(self):
431        # use server-side cursors:
432        # http://lists.initd.org/pipermail/psycopg/2007-January/005251.html
433        ident = "c_%s_%s" % (hex(id(self))[2:],
434                             hex(_server_side_id())[2:])
435        return self._dbapi_connection.cursor(ident)
436
437    def get_result_proxy(self):
438        # TODO: ouch
439        if logger.isEnabledFor(logging.INFO):
440            self._log_notices(self.cursor)
441
442        if self._is_server_side:
443            return _result.BufferedRowResultProxy(self)
444        else:
445            return _result.ResultProxy(self)
446
447    def _log_notices(self, cursor):
448        for notice in cursor.connection.notices:
449            # NOTICE messages have a
450            # newline character at the end
451            logger.info(notice.rstrip())
452
453        cursor.connection.notices[:] = []
454
455
456class PGCompiler_psycopg2(PGCompiler):
457    def visit_mod_binary(self, binary, operator, **kw):
458        return self.process(binary.left, **kw) + " %% " + \
459            self.process(binary.right, **kw)
460
461    def post_process_text(self, text):
462        return text.replace('%', '%%')
463
464
465class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer):
466    def _escape_identifier(self, value):
467        value = value.replace(self.escape_quote, self.escape_to_quote)
468        return value.replace('%', '%%')
469
470
471class PGDialect_psycopg2(PGDialect):
472    driver = 'psycopg2'
473    if util.py2k:
474        supports_unicode_statements = False
475
476    supports_server_side_cursors = True
477
478    default_paramstyle = 'pyformat'
479    # set to true based on psycopg2 version
480    supports_sane_multi_rowcount = False
481    execution_ctx_cls = PGExecutionContext_psycopg2
482    statement_compiler = PGCompiler_psycopg2
483    preparer = PGIdentifierPreparer_psycopg2
484    psycopg2_version = (0, 0)
485
486    FEATURE_VERSION_MAP = dict(
487        native_json=(2, 5),
488        native_jsonb=(2, 5, 4),
489        sane_multi_rowcount=(2, 0, 9),
490        array_oid=(2, 4, 3),
491        hstore_adapter=(2, 4)
492    )
493
494    _has_native_hstore = False
495    _has_native_json = False
496    _has_native_jsonb = False
497
498    engine_config_types = PGDialect.engine_config_types.union([
499        ('use_native_unicode', util.asbool),
500    ])
501
502    colspecs = util.update_copy(
503        PGDialect.colspecs,
504        {
505            sqltypes.Numeric: _PGNumeric,
506            ENUM: _PGEnum,  # needs force_unicode
507            sqltypes.Enum: _PGEnum,  # needs force_unicode
508            HSTORE: _PGHStore,
509            JSON: _PGJSON,
510            sqltypes.JSON: _PGJSON,
511            JSONB: _PGJSONB,
512            UUID: _PGUUID
513        }
514    )
515
516    def __init__(self, server_side_cursors=False, use_native_unicode=True,
517                 client_encoding=None,
518                 use_native_hstore=True, use_native_uuid=True,
519                 **kwargs):
520        PGDialect.__init__(self, **kwargs)
521        self.server_side_cursors = server_side_cursors
522        self.use_native_unicode = use_native_unicode
523        self.use_native_hstore = use_native_hstore
524        self.use_native_uuid = use_native_uuid
525        self.supports_unicode_binds = use_native_unicode
526        self.client_encoding = client_encoding
527        if self.dbapi and hasattr(self.dbapi, '__version__'):
528            m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
529                         self.dbapi.__version__)
530            if m:
531                self.psycopg2_version = tuple(
532                    int(x)
533                    for x in m.group(1, 2, 3)
534                    if x is not None)
535
536    def initialize(self, connection):
537        super(PGDialect_psycopg2, self).initialize(connection)
538        self._has_native_hstore = self.use_native_hstore and \
539            self._hstore_oids(connection.connection) \
540            is not None
541        self._has_native_json = \
542            self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_json']
543        self._has_native_jsonb = \
544            self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_jsonb']
545
546        # http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9
547        self.supports_sane_multi_rowcount = \
548            self.psycopg2_version >= \
549            self.FEATURE_VERSION_MAP['sane_multi_rowcount']
550
551    @classmethod
552    def dbapi(cls):
553        import psycopg2
554        return psycopg2
555
556    @classmethod
557    def _psycopg2_extensions(cls):
558        from psycopg2 import extensions
559        return extensions
560
561    @classmethod
562    def _psycopg2_extras(cls):
563        from psycopg2 import extras
564        return extras
565
566    @util.memoized_property
567    def _isolation_lookup(self):
568        extensions = self._psycopg2_extensions()
569        return {
570            'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT,
571            'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED,
572            'READ UNCOMMITTED': extensions.ISOLATION_LEVEL_READ_UNCOMMITTED,
573            'REPEATABLE READ': extensions.ISOLATION_LEVEL_REPEATABLE_READ,
574            'SERIALIZABLE': extensions.ISOLATION_LEVEL_SERIALIZABLE
575        }
576
577    def set_isolation_level(self, connection, level):
578        try:
579            level = self._isolation_lookup[level.replace('_', ' ')]
580        except KeyError:
581            raise exc.ArgumentError(
582                "Invalid value '%s' for isolation_level. "
583                "Valid isolation levels for %s are %s" %
584                (level, self.name, ", ".join(self._isolation_lookup))
585            )
586
587        connection.set_isolation_level(level)
588
589    def on_connect(self):
590        extras = self._psycopg2_extras()
591        extensions = self._psycopg2_extensions()
592
593        fns = []
594        if self.client_encoding is not None:
595            def on_connect(conn):
596                conn.set_client_encoding(self.client_encoding)
597            fns.append(on_connect)
598
599        if self.isolation_level is not None:
600            def on_connect(conn):
601                self.set_isolation_level(conn, self.isolation_level)
602            fns.append(on_connect)
603
604        if self.dbapi and self.use_native_uuid:
605            def on_connect(conn):
606                extras.register_uuid(None, conn)
607            fns.append(on_connect)
608
609        if self.dbapi and self.use_native_unicode:
610            def on_connect(conn):
611                extensions.register_type(extensions.UNICODE, conn)
612                extensions.register_type(extensions.UNICODEARRAY, conn)
613            fns.append(on_connect)
614
615        if self.dbapi and self.use_native_hstore:
616            def on_connect(conn):
617                hstore_oids = self._hstore_oids(conn)
618                if hstore_oids is not None:
619                    oid, array_oid = hstore_oids
620                    kw = {'oid': oid}
621                    if util.py2k:
622                        kw['unicode'] = True
623                    if self.psycopg2_version >= \
624                            self.FEATURE_VERSION_MAP['array_oid']:
625                        kw['array_oid'] = array_oid
626                    extras.register_hstore(conn, **kw)
627            fns.append(on_connect)
628
629        if self.dbapi and self._json_deserializer:
630            def on_connect(conn):
631                if self._has_native_json:
632                    extras.register_default_json(
633                        conn, loads=self._json_deserializer)
634                if self._has_native_jsonb:
635                    extras.register_default_jsonb(
636                        conn, loads=self._json_deserializer)
637            fns.append(on_connect)
638
639        if fns:
640            def on_connect(conn):
641                for fn in fns:
642                    fn(conn)
643            return on_connect
644        else:
645            return None
646
647    @util.memoized_instancemethod
648    def _hstore_oids(self, conn):
649        if self.psycopg2_version >= self.FEATURE_VERSION_MAP['hstore_adapter']:
650            extras = self._psycopg2_extras()
651            oids = extras.HstoreAdapter.get_oids(conn)
652            if oids is not None and oids[0]:
653                return oids[0:2]
654        return None
655
656    def create_connect_args(self, url):
657        opts = url.translate_connect_args(username='user')
658        if 'port' in opts:
659            opts['port'] = int(opts['port'])
660        opts.update(url.query)
661        return ([], opts)
662
663    def is_disconnect(self, e, connection, cursor):
664        if isinstance(e, self.dbapi.Error):
665            # check the "closed" flag.  this might not be
666            # present on old psycopg2 versions.   Also,
667            # this flag doesn't actually help in a lot of disconnect
668            # situations, so don't rely on it.
669            if getattr(connection, 'closed', False):
670                return True
671
672            # checks based on strings.  in the case that .closed
673            # didn't cut it, fall back onto these.
674            str_e = str(e).partition("\n")[0]
675            for msg in [
676                # these error messages from libpq: interfaces/libpq/fe-misc.c
677                # and interfaces/libpq/fe-secure.c.
678                'terminating connection',
679                'closed the connection',
680                'connection not open',
681                'could not receive data from server',
682                'could not send data to server',
683                # psycopg2 client errors, psycopg2/conenction.h,
684                # psycopg2/cursor.h
685                'connection already closed',
686                'cursor already closed',
687                # not sure where this path is originally from, it may
688                # be obsolete.   It really says "losed", not "closed".
689                'losed the connection unexpectedly',
690                # these can occur in newer SSL
691                'connection has been closed unexpectedly',
692                'SSL SYSCALL error: Bad file descriptor',
693                'SSL SYSCALL error: EOF detected',
694                'SSL error: decryption failed or bad record mac',
695                'SSL SYSCALL error: Operation timed out',
696            ]:
697                idx = str_e.find(msg)
698                if idx >= 0 and '"' not in str_e[:idx]:
699                    return True
700        return False
701
702dialect = PGDialect_psycopg2
703