| 1 | """
|
|---|
| 2 | PostgreSQL database backend for Django.
|
|---|
| 3 |
|
|---|
| 4 | Requires psycopg 2: http://initd.org/projects/psycopg2
|
|---|
| 5 | """
|
|---|
| 6 |
|
|---|
| 7 | import threading
|
|---|
| 8 | import warnings
|
|---|
| 9 |
|
|---|
| 10 | from django.conf import settings
|
|---|
| 11 | from django.core.exceptions import ImproperlyConfigured
|
|---|
| 12 | from django.db import connections
|
|---|
| 13 | from django.db.backends.base.base import BaseDatabaseWrapper
|
|---|
| 14 | from django.db.utils import DatabaseError as WrappedDatabaseError
|
|---|
| 15 | from django.utils.functional import cached_property
|
|---|
| 16 | from django.utils.safestring import SafeText
|
|---|
| 17 | from django.utils.version import get_version_tuple
|
|---|
| 18 |
|
|---|
| 19 | try:
|
|---|
| 20 | import psycopg2 as Database
|
|---|
| 21 | import psycopg2.extensions
|
|---|
| 22 | import psycopg2.extras
|
|---|
| 23 | except ImportError as e:
|
|---|
| 24 | raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e)
|
|---|
| 25 |
|
|---|
| 26 |
|
|---|
| 27 | def psycopg2_version():
|
|---|
| 28 | version = psycopg2.__version__.split(' ', 1)[0]
|
|---|
| 29 | return get_version_tuple(version)
|
|---|
| 30 |
|
|---|
| 31 |
|
|---|
| 32 | PSYCOPG2_VERSION = psycopg2_version()
|
|---|
| 33 |
|
|---|
| 34 | if PSYCOPG2_VERSION < (2, 5, 4):
|
|---|
| 35 | raise ImproperlyConfigured("psycopg2_version 2.5.4 or newer is required; you have %s" % psycopg2.__version__)
|
|---|
| 36 |
|
|---|
| 37 |
|
|---|
| 38 | # Some of these import psycopg2, so import them after checking if it's installed.
|
|---|
| 39 | from .client import DatabaseClient # NOQA isort:skip
|
|---|
| 40 | from .creation import DatabaseCreation # NOQA isort:skip
|
|---|
| 41 | from .features import DatabaseFeatures # NOQA isort:skip
|
|---|
| 42 | from .introspection import DatabaseIntrospection # NOQA isort:skip
|
|---|
| 43 | from .operations import DatabaseOperations # NOQA isort:skip
|
|---|
| 44 | from .schema import DatabaseSchemaEditor # NOQA isort:skip
|
|---|
| 45 | from .utils import utc_tzinfo_factory # NOQA isort:skip
|
|---|
| 46 |
|
|---|
| 47 | psycopg2.extensions.register_adapter(SafeText, psycopg2.extensions.QuotedString)
|
|---|
| 48 | psycopg2.extras.register_uuid()
|
|---|
| 49 |
|
|---|
| 50 | # Register support for inet[] manually so we don't have to handle the Inet()
|
|---|
| 51 | # object on load all the time.
|
|---|
| 52 | INETARRAY_OID = 1041
|
|---|
| 53 | INETARRAY = psycopg2.extensions.new_array_type(
|
|---|
| 54 | (INETARRAY_OID,),
|
|---|
| 55 | 'INETARRAY',
|
|---|
| 56 | psycopg2.extensions.UNICODE,
|
|---|
| 57 | )
|
|---|
| 58 | psycopg2.extensions.register_type(INETARRAY)
|
|---|
| 59 |
|
|---|
| 60 |
|
|---|
| 61 | class DatabaseWrapper(BaseDatabaseWrapper):
|
|---|
| 62 | vendor = 'postgresql'
|
|---|
| 63 | display_name = 'PostgreSQL'
|
|---|
| 64 | # This dictionary maps Field objects to their associated PostgreSQL column
|
|---|
| 65 | # types, as strings. Column-type strings can contain format strings; they'll
|
|---|
| 66 | # be interpolated against the values of Field.__dict__ before being output.
|
|---|
| 67 | # If a column type is set to None, it won't be included in the output.
|
|---|
| 68 |
|
|---|
| 69 | data_types = {
|
|---|
| 70 | 'IdentityAutoField': 'integer',
|
|---|
| 71 | 'IdentityBigAutoField': 'bigint',
|
|---|
| 72 | 'AutoField': 'serial',
|
|---|
| 73 | 'BigAutoField': 'bigserial',
|
|---|
| 74 | 'BinaryField': 'bytea',
|
|---|
| 75 | 'BooleanField': 'boolean',
|
|---|
| 76 | 'CharField': 'varchar(%(max_length)s)',
|
|---|
| 77 | 'DateField': 'date',
|
|---|
| 78 | 'DateTimeField': 'timestamp with time zone',
|
|---|
| 79 | 'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)',
|
|---|
| 80 | 'DurationField': 'interval',
|
|---|
| 81 | 'FileField': 'varchar(%(max_length)s)',
|
|---|
| 82 | 'FilePathField': 'varchar(%(max_length)s)',
|
|---|
| 83 | 'FloatField': 'double precision',
|
|---|
| 84 | 'IntegerField': 'integer',
|
|---|
| 85 | 'BigIntegerField': 'bigint',
|
|---|
| 86 | 'IPAddressField': 'inet',
|
|---|
| 87 | 'GenericIPAddressField': 'inet',
|
|---|
| 88 | 'NullBooleanField': 'boolean',
|
|---|
| 89 | 'OneToOneField': 'integer',
|
|---|
| 90 | 'PositiveIntegerField': 'integer',
|
|---|
| 91 | 'PositiveSmallIntegerField': 'smallint',
|
|---|
| 92 | 'SlugField': 'varchar(%(max_length)s)',
|
|---|
| 93 | 'SmallIntegerField': 'smallint',
|
|---|
| 94 | 'TextField': 'text',
|
|---|
| 95 | 'TimeField': 'time',
|
|---|
| 96 | 'UUIDField': 'uuid',
|
|---|
| 97 | }
|
|---|
| 98 | data_type_check_constraints = {
|
|---|
| 99 | 'PositiveIntegerField': '"%(column)s" >= 0',
|
|---|
| 100 | 'PositiveSmallIntegerField': '"%(column)s" >= 0',
|
|---|
| 101 | }
|
|---|
| 102 | operators = {
|
|---|
| 103 | 'exact': '= %s',
|
|---|
| 104 | 'iexact': '= UPPER(%s)',
|
|---|
| 105 | 'contains': 'LIKE %s',
|
|---|
| 106 | 'icontains': 'LIKE UPPER(%s)',
|
|---|
| 107 | 'regex': '~ %s',
|
|---|
| 108 | 'iregex': '~* %s',
|
|---|
| 109 | 'gt': '> %s',
|
|---|
| 110 | 'gte': '>= %s',
|
|---|
| 111 | 'lt': '< %s',
|
|---|
| 112 | 'lte': '<= %s',
|
|---|
| 113 | 'startswith': 'LIKE %s',
|
|---|
| 114 | 'endswith': 'LIKE %s',
|
|---|
| 115 | 'istartswith': 'LIKE UPPER(%s)',
|
|---|
| 116 | 'iendswith': 'LIKE UPPER(%s)',
|
|---|
| 117 | }
|
|---|
| 118 |
|
|---|
| 119 | # The patterns below are used to generate SQL pattern lookup clauses when
|
|---|
| 120 | # the right-hand side of the lookup isn't a raw string (it might be an expression
|
|---|
| 121 | # or the result of a bilateral transformation).
|
|---|
| 122 | # In those cases, special characters for LIKE operators (e.g. \, *, _) should be
|
|---|
| 123 | # escaped on database side.
|
|---|
| 124 | #
|
|---|
| 125 | # Note: we use str.format() here for readability as '%' is used as a wildcard for
|
|---|
| 126 | # the LIKE operator.
|
|---|
| 127 | pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')"
|
|---|
| 128 | pattern_ops = {
|
|---|
| 129 | 'contains': "LIKE '%%' || {} || '%%'",
|
|---|
| 130 | 'icontains': "LIKE '%%' || UPPER({}) || '%%'",
|
|---|
| 131 | 'startswith': "LIKE {} || '%%'",
|
|---|
| 132 | 'istartswith': "LIKE UPPER({}) || '%%'",
|
|---|
| 133 | 'endswith': "LIKE '%%' || {}",
|
|---|
| 134 | 'iendswith': "LIKE '%%' || UPPER({})",
|
|---|
| 135 | }
|
|---|
| 136 |
|
|---|
| 137 | Database = Database
|
|---|
| 138 | SchemaEditorClass = DatabaseSchemaEditor
|
|---|
| 139 | # Classes instantiated in __init__().
|
|---|
| 140 | client_class = DatabaseClient
|
|---|
| 141 | creation_class = DatabaseCreation
|
|---|
| 142 | features_class = DatabaseFeatures
|
|---|
| 143 | introspection_class = DatabaseIntrospection
|
|---|
| 144 | ops_class = DatabaseOperations
|
|---|
| 145 | # PostgreSQL backend-specific attributes.
|
|---|
| 146 | _named_cursor_idx = 0
|
|---|
| 147 |
|
|---|
| 148 | def get_connection_params(self):
|
|---|
| 149 | settings_dict = self.settings_dict
|
|---|
| 150 | # None may be used to connect to the default 'postgres' db
|
|---|
| 151 | if settings_dict['NAME'] == '':
|
|---|
| 152 | raise ImproperlyConfigured(
|
|---|
| 153 | "settings.DATABASES is improperly configured. "
|
|---|
| 154 | "Please supply the NAME value.")
|
|---|
| 155 | if len(settings_dict['NAME'] or '') > self.ops.max_name_length():
|
|---|
| 156 | raise ImproperlyConfigured(
|
|---|
| 157 | "The database name '%s' (%d characters) is longer than "
|
|---|
| 158 | "PostgreSQL's limit of %d characters. Supply a shorter NAME "
|
|---|
| 159 | "in settings.DATABASES." % (
|
|---|
| 160 | settings_dict['NAME'],
|
|---|
| 161 | len(settings_dict['NAME']),
|
|---|
| 162 | self.ops.max_name_length(),
|
|---|
| 163 | )
|
|---|
| 164 | )
|
|---|
| 165 | conn_params = {
|
|---|
| 166 | 'database': settings_dict['NAME'] or 'postgres',
|
|---|
| 167 | **settings_dict['OPTIONS'],
|
|---|
| 168 | }
|
|---|
| 169 | conn_params.pop('isolation_level', None)
|
|---|
| 170 | if settings_dict['USER']:
|
|---|
| 171 | conn_params['user'] = settings_dict['USER']
|
|---|
| 172 | if settings_dict['PASSWORD']:
|
|---|
| 173 | conn_params['password'] = settings_dict['PASSWORD']
|
|---|
| 174 | if settings_dict['HOST']:
|
|---|
| 175 | conn_params['host'] = settings_dict['HOST']
|
|---|
| 176 | if settings_dict['PORT']:
|
|---|
| 177 | conn_params['port'] = settings_dict['PORT']
|
|---|
| 178 | return conn_params
|
|---|
| 179 |
|
|---|
| 180 | def get_new_connection(self, conn_params):
|
|---|
| 181 | connection = Database.connect(**conn_params)
|
|---|
| 182 |
|
|---|
| 183 | # self.isolation_level must be set:
|
|---|
| 184 | # - after connecting to the database in order to obtain the database's
|
|---|
| 185 | # default when no value is explicitly specified in options.
|
|---|
| 186 | # - before calling _set_autocommit() because if autocommit is on, that
|
|---|
| 187 | # will set connection.isolation_level to ISOLATION_LEVEL_AUTOCOMMIT.
|
|---|
| 188 | options = self.settings_dict['OPTIONS']
|
|---|
| 189 | try:
|
|---|
| 190 | self.isolation_level = options['isolation_level']
|
|---|
| 191 | except KeyError:
|
|---|
| 192 | self.isolation_level = connection.isolation_level
|
|---|
| 193 | else:
|
|---|
| 194 | # Set the isolation level to the value from OPTIONS.
|
|---|
| 195 | if self.isolation_level != connection.isolation_level:
|
|---|
| 196 | connection.set_session(isolation_level=self.isolation_level)
|
|---|
| 197 |
|
|---|
| 198 | return connection
|
|---|
| 199 |
|
|---|
| 200 | def ensure_timezone(self):
|
|---|
| 201 | self.ensure_connection()
|
|---|
| 202 | conn_timezone_name = self.connection.get_parameter_status('TimeZone')
|
|---|
| 203 | timezone_name = self.timezone_name
|
|---|
| 204 | if timezone_name and conn_timezone_name != timezone_name:
|
|---|
| 205 | with self.connection.cursor() as cursor:
|
|---|
| 206 | cursor.execute(self.ops.set_time_zone_sql(), [timezone_name])
|
|---|
| 207 | return True
|
|---|
| 208 | return False
|
|---|
| 209 |
|
|---|
| 210 | def init_connection_state(self):
|
|---|
| 211 | self.connection.set_client_encoding('UTF8')
|
|---|
| 212 |
|
|---|
| 213 | timezone_changed = self.ensure_timezone()
|
|---|
| 214 | if timezone_changed:
|
|---|
| 215 | # Commit after setting the time zone (see #17062)
|
|---|
| 216 | if not self.get_autocommit():
|
|---|
| 217 | self.connection.commit()
|
|---|
| 218 |
|
|---|
| 219 | def create_cursor(self, name=None):
|
|---|
| 220 | if name:
|
|---|
| 221 | # In autocommit mode, the cursor will be used outside of a
|
|---|
| 222 | # transaction, hence use a holdable cursor.
|
|---|
| 223 | cursor = self.connection.cursor(name, scrollable=False, withhold=self.connection.autocommit)
|
|---|
| 224 | else:
|
|---|
| 225 | cursor = self.connection.cursor()
|
|---|
| 226 | cursor.tzinfo_factory = utc_tzinfo_factory if settings.USE_TZ else None
|
|---|
| 227 | return cursor
|
|---|
| 228 |
|
|---|
| 229 | def chunked_cursor(self):
|
|---|
| 230 | self._named_cursor_idx += 1
|
|---|
| 231 | return self._cursor(
|
|---|
| 232 | name='_django_curs_%d_%d' % (
|
|---|
| 233 | # Avoid reusing name in other threads
|
|---|
| 234 | threading.current_thread().ident,
|
|---|
| 235 | self._named_cursor_idx,
|
|---|
| 236 | )
|
|---|
| 237 | )
|
|---|
| 238 |
|
|---|
| 239 | def _set_autocommit(self, autocommit):
|
|---|
| 240 | with self.wrap_database_errors:
|
|---|
| 241 | self.connection.autocommit = autocommit
|
|---|
| 242 |
|
|---|
| 243 | def check_constraints(self, table_names=None):
|
|---|
| 244 | """
|
|---|
| 245 | Check constraints by setting them to immediate. Return them to deferred
|
|---|
| 246 | afterward.
|
|---|
| 247 | """
|
|---|
| 248 | self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE')
|
|---|
| 249 | self.cursor().execute('SET CONSTRAINTS ALL DEFERRED')
|
|---|
| 250 |
|
|---|
| 251 | def is_usable(self):
|
|---|
| 252 | try:
|
|---|
| 253 | # Use a psycopg cursor directly, bypassing Django's utilities.
|
|---|
| 254 | self.connection.cursor().execute("SELECT 1")
|
|---|
| 255 | except Database.Error:
|
|---|
| 256 | return False
|
|---|
| 257 | else:
|
|---|
| 258 | return True
|
|---|
| 259 |
|
|---|
| 260 | @property
|
|---|
| 261 | def _nodb_connection(self):
|
|---|
| 262 | nodb_connection = super()._nodb_connection
|
|---|
| 263 | try:
|
|---|
| 264 | nodb_connection.ensure_connection()
|
|---|
| 265 | except (Database.DatabaseError, WrappedDatabaseError):
|
|---|
| 266 | warnings.warn(
|
|---|
| 267 | "Normally Django will use a connection to the 'postgres' database "
|
|---|
| 268 | "to avoid running initialization queries against the production "
|
|---|
| 269 | "database when it's not needed (for example, when running tests). "
|
|---|
| 270 | "Django was unable to create a connection to the 'postgres' database "
|
|---|
| 271 | "and will use the first PostgreSQL database instead.",
|
|---|
| 272 | RuntimeWarning
|
|---|
| 273 | )
|
|---|
| 274 | for connection in connections.all():
|
|---|
| 275 | if connection.vendor == 'postgresql' and connection.settings_dict['NAME'] != 'postgres':
|
|---|
| 276 | return self.__class__(
|
|---|
| 277 | {**self.settings_dict, 'NAME': connection.settings_dict['NAME']},
|
|---|
| 278 | alias=self.alias,
|
|---|
| 279 | allow_thread_sharing=False,
|
|---|
| 280 | )
|
|---|
| 281 | return nodb_connection
|
|---|
| 282 |
|
|---|
| 283 | @cached_property
|
|---|
| 284 | def pg_version(self):
|
|---|
| 285 | with self.temporary_connection():
|
|---|
| 286 | return self.connection.server_version
|
|---|