| 1 | import psycopg2
|
|---|
| 2 |
|
|---|
| 3 | from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
|---|
| 4 | from django.conf import settings
|
|---|
| 5 |
|
|---|
| 6 | class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
|---|
| 7 |
|
|---|
| 8 | sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s USING %(column)s::%(type)s"
|
|---|
| 9 |
|
|---|
| 10 | sql_create_sequence = "CREATE SEQUENCE %(sequence)s"
|
|---|
| 11 | sql_delete_sequence = "DROP SEQUENCE IF EXISTS %(sequence)s CASCADE"
|
|---|
| 12 | sql_set_sequence_max = "SELECT setval('%(sequence)s', MAX(%(column)s)) FROM %(table)s"
|
|---|
| 13 |
|
|---|
| 14 | sql_create_index = "CREATE INDEX %(name)s ON %(table)s%(using)s (%(columns)s)%(extra)s"
|
|---|
| 15 | sql_create_varchar_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s varchar_pattern_ops)%(extra)s"
|
|---|
| 16 | sql_create_text_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s text_pattern_ops)%(extra)s"
|
|---|
| 17 | sql_delete_index = "DROP INDEX IF EXISTS %(name)s"
|
|---|
| 18 |
|
|---|
| 19 | # Setting the constraint to IMMEDIATE runs any deferred checks to allow
|
|---|
| 20 | # dropping it in the same transaction.
|
|---|
| 21 | sql_delete_fk = "SET CONSTRAINTS %(name)s IMMEDIATE; ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
|
|---|
| 22 |
|
|---|
| 23 | sql_delete_procedure = 'DROP FUNCTION %(procedure)s(%(param_types)s)'
|
|---|
| 24 |
|
|---|
| 25 | def quote_value(self, value):
|
|---|
| 26 | return psycopg2.extensions.adapt(value)
|
|---|
| 27 |
|
|---|
| 28 | def _field_indexes_sql(self, model, field):
|
|---|
| 29 | output = super()._field_indexes_sql(model, field)
|
|---|
| 30 | like_index_statement = self._create_like_index_sql(model, field)
|
|---|
| 31 | if like_index_statement is not None:
|
|---|
| 32 | output.append(like_index_statement)
|
|---|
| 33 | return output
|
|---|
| 34 |
|
|---|
| 35 | def _create_like_index_sql(self, model, field):
|
|---|
| 36 | """
|
|---|
| 37 | Return the statement to create an index with varchar operator pattern
|
|---|
| 38 | when the column type is 'varchar' or 'text', otherwise return None.
|
|---|
| 39 | """
|
|---|
| 40 | db_type = field.db_type(connection=self.connection)
|
|---|
| 41 | if db_type is not None and (field.db_index or field.unique):
|
|---|
| 42 | # Fields with database column types of `varchar` and `text` need
|
|---|
| 43 | # a second index that specifies their operator class, which is
|
|---|
| 44 | # needed when performing correct LIKE queries outside the
|
|---|
| 45 | # C locale. See #12234.
|
|---|
| 46 | #
|
|---|
| 47 | # The same doesn't apply to array fields such as varchar[size]
|
|---|
| 48 | # and text[size], so skip them.
|
|---|
| 49 | if '[' in db_type:
|
|---|
| 50 | return None
|
|---|
| 51 | if db_type.startswith('varchar'):
|
|---|
| 52 | return self._create_index_sql(model, [field], suffix='_like', sql=self.sql_create_varchar_index)
|
|---|
| 53 | elif db_type.startswith('text'):
|
|---|
| 54 | return self._create_index_sql(model, [field], suffix='_like', sql=self.sql_create_text_index)
|
|---|
| 55 | return None
|
|---|
| 56 |
|
|---|
| 57 | def _alter_column_type_sql(self, model, old_field, new_field, new_type):
|
|---|
| 58 | """Make ALTER TYPE with SERIAL make sense."""
|
|---|
| 59 | table = model._meta.db_table
|
|---|
| 60 | if new_type.lower() in ("serial", "bigserial"):
|
|---|
| 61 | column = new_field.column
|
|---|
| 62 | sequence_name = "%s_%s_seq" % (table, column)
|
|---|
| 63 | col_type = "integer" if new_type.lower() == "serial" else "bigint"
|
|---|
| 64 | return (
|
|---|
| 65 | (
|
|---|
| 66 | self.sql_alter_column_type % {
|
|---|
| 67 | "column": self.quote_name(column),
|
|---|
| 68 | "type": col_type,
|
|---|
| 69 | },
|
|---|
| 70 | [],
|
|---|
| 71 | ),
|
|---|
| 72 | [
|
|---|
| 73 | (
|
|---|
| 74 | self.sql_delete_sequence % {
|
|---|
| 75 | "sequence": self.quote_name(sequence_name),
|
|---|
| 76 | },
|
|---|
| 77 | [],
|
|---|
| 78 | ),
|
|---|
| 79 | (
|
|---|
| 80 | self.sql_create_sequence % {
|
|---|
| 81 | "sequence": self.quote_name(sequence_name),
|
|---|
| 82 | },
|
|---|
| 83 | [],
|
|---|
| 84 | ),
|
|---|
| 85 | (
|
|---|
| 86 | # GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 5000 CACHE 1 ),
|
|---|
| 87 | # DEFAULT nextval('catalog_data_federalstate_id_seq'::regclass) ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 2147483647 CACHE 1 ),
|
|---|
| 88 |
|
|---|
| 89 | self.sql_alter_column % {
|
|---|
| 90 | "table": self.quote_name(table),
|
|---|
| 91 | "changes": self.sql_alter_column_default % {
|
|---|
| 92 | "column": self.quote_name(column),
|
|---|
| 93 | "default": "nextval('%s')" % self.quote_name(sequence_name),
|
|---|
| 94 | }
|
|---|
| 95 | },
|
|---|
| 96 | [],
|
|---|
| 97 | ),
|
|---|
| 98 | (
|
|---|
| 99 | self.sql_set_sequence_max % {
|
|---|
| 100 | "table": self.quote_name(table),
|
|---|
| 101 | "column": self.quote_name(column),
|
|---|
| 102 | "sequence": self.quote_name(sequence_name),
|
|---|
| 103 | },
|
|---|
| 104 | [],
|
|---|
| 105 | ),
|
|---|
| 106 | ],
|
|---|
| 107 | )
|
|---|
| 108 | else:
|
|---|
| 109 | return super()._alter_column_type_sql(model, old_field, new_field, new_type)
|
|---|
| 110 |
|
|---|
| 111 | def _alter_field(self, model, old_field, new_field, old_type, new_type,
|
|---|
| 112 | old_db_params, new_db_params, strict=False):
|
|---|
| 113 | # Drop indexes on varchar/text/citext columns that are changing to a
|
|---|
| 114 | # different type.
|
|---|
| 115 | if (old_field.db_index or old_field.unique) and (
|
|---|
| 116 | (old_type.startswith('varchar') and not new_type.startswith('varchar')) or
|
|---|
| 117 | (old_type.startswith('text') and not new_type.startswith('text')) or
|
|---|
| 118 | (old_type.startswith('citext') and not new_type.startswith('citext'))
|
|---|
| 119 | ):
|
|---|
| 120 | index_name = self._create_index_name(model._meta.db_table, [old_field.column], suffix='_like')
|
|---|
| 121 | self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
|
|---|
| 122 |
|
|---|
| 123 | super()._alter_field(
|
|---|
| 124 | model, old_field, new_field, old_type, new_type, old_db_params,
|
|---|
| 125 | new_db_params, strict,
|
|---|
| 126 | )
|
|---|
| 127 | # Added an index? Create any PostgreSQL-specific indexes.
|
|---|
| 128 | if ((not (old_field.db_index or old_field.unique) and new_field.db_index) or
|
|---|
| 129 | (not old_field.unique and new_field.unique)):
|
|---|
| 130 | like_index_statement = self._create_like_index_sql(model, new_field)
|
|---|
| 131 | if like_index_statement is not None:
|
|---|
| 132 | self.execute(like_index_statement)
|
|---|
| 133 |
|
|---|
| 134 | # Removed an index? Drop any PostgreSQL-specific indexes.
|
|---|
| 135 | if old_field.unique and not (new_field.db_index or new_field.unique):
|
|---|
| 136 | index_to_remove = self._create_index_name(model._meta.db_table, [old_field.column], suffix='_like')
|
|---|
| 137 | self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_to_remove))
|
|---|
| 138 |
|
|---|
| 139 | def column_sql(self, model, field, include_default=False):
|
|---|
| 140 | """
|
|---|
| 141 | Take a field and return its column definition.
|
|---|
| 142 | The field must already have had set_attributes_from_name() called.
|
|---|
| 143 | """
|
|---|
| 144 | # Get the column's type and use that as the basis of the SQL
|
|---|
| 145 | db_params = field.db_parameters(connection=self.connection)
|
|---|
| 146 | # Autoincrement SQL (for backends with post table definition variant)
|
|---|
| 147 | if settings.DEBUG: print("=> " + field.get_internal_type(), end='')
|
|---|
| 148 | sql = db_params['type']
|
|---|
| 149 | if settings.DEBUG: print(" type: " + sql)
|
|---|
| 150 | params = []
|
|---|
| 151 | # Check for fields that aren't actually columns (e.g. M2M)
|
|---|
| 152 | if sql is None:
|
|---|
| 153 | return None, None
|
|---|
| 154 | # Work out nullability
|
|---|
| 155 | null = field.null
|
|---|
| 156 | # If we were told to include a default value, do so
|
|---|
| 157 | include_default = include_default and not self.skip_default(field)
|
|---|
| 158 | if include_default:
|
|---|
| 159 | default_value = self.effective_default(field)
|
|---|
| 160 | if default_value is not None:
|
|---|
| 161 | if self.connection.features.requires_literal_defaults:
|
|---|
| 162 | # Some databases can't take defaults as a parameter (oracle)
|
|---|
| 163 | # If this is the case, the individual schema backend should
|
|---|
| 164 | # implement prepare_default
|
|---|
| 165 | sql += " DEFAULT %s" % self.prepare_default(default_value)
|
|---|
| 166 | else:
|
|---|
| 167 | sql += " DEFAULT %s"
|
|---|
| 168 | params += [default_value]
|
|---|
| 169 | # Oracle treats the empty string ('') as null, so coerce the null
|
|---|
| 170 | # option whenever '' is a possible value.
|
|---|
| 171 | if (field.empty_strings_allowed and not field.primary_key and
|
|---|
| 172 | self.connection.features.interprets_empty_strings_as_nulls):
|
|---|
| 173 | null = True
|
|---|
| 174 | if null and not self.connection.features.implied_column_null:
|
|---|
| 175 | sql += " NULL"
|
|---|
| 176 | elif not null:
|
|---|
| 177 | sql += " NOT NULL"
|
|---|
| 178 | # Primary key/unique outputs
|
|---|
| 179 | if field.primary_key:
|
|---|
| 180 | sql += " PRIMARY KEY"
|
|---|
| 181 | if field.get_internal_type() in ("IdentityAutoField", "IdentityBigAutoField"):
|
|---|
| 182 | sql += " GENERATED ALWAYS AS IDENTITY"
|
|---|
| 183 | elif field.unique:
|
|---|
| 184 | sql += " UNIQUE"
|
|---|
| 185 | # Optionally add the tablespace if it's an implicitly indexed column
|
|---|
| 186 | tablespace = field.db_tablespace or model._meta.db_tablespace
|
|---|
| 187 | if tablespace and self.connection.features.supports_tablespaces and field.unique:
|
|---|
| 188 | sql += " %s" % self.connection.ops.tablespace_sql(tablespace, inline=True)
|
|---|
| 189 | # Return the sql
|
|---|
| 190 | return sql, params
|
|---|
| 191 |
|
|---|
| 192 | def create_model(self, model):
|
|---|
| 193 | """
|
|---|
| 194 | Create a table and any accompanying indexes or unique constraints for
|
|---|
| 195 | the given `model`.
|
|---|
| 196 | """
|
|---|
| 197 | # Create column SQL, add FK deferreds if needed
|
|---|
| 198 | column_sqls = []
|
|---|
| 199 | params = []
|
|---|
| 200 | if settings.DEBUG: print('')
|
|---|
| 201 | for field in model._meta.local_fields:
|
|---|
| 202 | # SQL
|
|---|
| 203 | definition, extra_params = self.column_sql(model, field)
|
|---|
| 204 | if definition is None:
|
|---|
| 205 | continue
|
|---|
| 206 | # Check constraints can go on the column SQL here
|
|---|
| 207 | db_params = field.db_parameters(connection=self.connection)
|
|---|
| 208 | if db_params['check']:
|
|---|
| 209 | definition += " CHECK (%s)" % db_params['check']
|
|---|
| 210 | # Autoincrement SQL (for backends with inline variant)
|
|---|
| 211 | col_type_suffix = field.db_type_suffix(connection=self.connection)
|
|---|
| 212 | if col_type_suffix:
|
|---|
| 213 | definition += " %s" % col_type_suffix
|
|---|
| 214 | params.extend(extra_params)
|
|---|
| 215 | # FK
|
|---|
| 216 | if field.remote_field and field.db_constraint:
|
|---|
| 217 | to_table = field.remote_field.model._meta.db_table
|
|---|
| 218 | to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column
|
|---|
| 219 | if self.sql_create_inline_fk:
|
|---|
| 220 | definition += " " + self.sql_create_inline_fk % {
|
|---|
| 221 | "to_table": self.quote_name(to_table),
|
|---|
| 222 | "to_column": self.quote_name(to_column),
|
|---|
| 223 | }
|
|---|
| 224 | elif self.connection.features.supports_foreign_keys:
|
|---|
| 225 | self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s"))
|
|---|
| 226 | # Add the SQL to our big list
|
|---|
| 227 | column_sqls.append("%s %s" % (
|
|---|
| 228 | self.quote_name(field.column),
|
|---|
| 229 | definition,
|
|---|
| 230 | ))
|
|---|
| 231 | # Autoincrement SQL (for backends with post table definition variant)
|
|---|
| 232 | if field.get_internal_type() in ("AutoField", "BigAutoField"):
|
|---|
| 233 | autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column)
|
|---|
| 234 | if autoinc_sql:
|
|---|
| 235 | self.deferred_sql.extend(autoinc_sql)
|
|---|
| 236 |
|
|---|
| 237 | # Add any unique_togethers (always deferred, as some fields might be
|
|---|
| 238 | # created afterwards, like geometry fields with some backends)
|
|---|
| 239 | for fields in model._meta.unique_together:
|
|---|
| 240 | columns = [model._meta.get_field(field).column for field in fields]
|
|---|
| 241 | self.deferred_sql.append(self._create_unique_sql(model, columns))
|
|---|
| 242 | # Make the table
|
|---|
| 243 | sql = self.sql_create_table % {
|
|---|
| 244 | "table": self.quote_name(model._meta.db_table),
|
|---|
| 245 | "definition": ", ".join(column_sqls)
|
|---|
| 246 | }
|
|---|
| 247 | if model._meta.db_tablespace:
|
|---|
| 248 | tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)
|
|---|
| 249 | if tablespace_sql:
|
|---|
| 250 | sql += ' ' + tablespace_sql
|
|---|
| 251 | # Prevent using [] as params, in the case a literal '%' is used in the definition
|
|---|
| 252 | self.execute(sql, params or None)
|
|---|
| 253 | if settings.DEBUG: print("=> "+ str(sql))
|
|---|
| 254 |
|
|---|
| 255 | # Add any field index and index_together's (deferred as SQLite3 _remake_table needs it)
|
|---|
| 256 | self.deferred_sql.extend(self._model_indexes_sql(model))
|
|---|
| 257 |
|
|---|
| 258 | # Make M2M tables
|
|---|
| 259 | for field in model._meta.local_many_to_many:
|
|---|
| 260 | if field.remote_field.through._meta.auto_created:
|
|---|
| 261 | self.create_model(field.remote_field.through)
|
|---|