Ticket #28668: on_conflict_postgresql.patch
File on_conflict_postgresql.patch, 6.5 KB (added by , 7 years ago) |
---|
-
new file django/db/backends/postgresql/compiler.py
commit 0c7e582a90fe026b964311a773832c083417ae3c Author: Дилян Палаузов <dpa@mail.lab> Date: Thu Jan 25 18:20:18 2018 +0000 Add bulk_create on_conflict='ignore' diff --git a/django/db/backends/postgresql/compiler.py b/django/db/backends/postgresql/compiler.py new file mode 100644 index 0000000..a562e34
- + 1 from django.db.models.sql import compiler 2 3 4 SQLCompiler = compiler.SQLCompiler 5 SQLDeleteCompiler = compiler.SQLDeleteCompiler 6 SQLUpdateCompiler = compiler.SQLUpdateCompiler 7 SQLAggregateCompiler = compiler.SQLAggregateCompiler 8 9 10 class SQLInsertCompiler(compiler.SQLInsertCompiler): 11 def as_sql(self): 12 """ 13 Create queries that work like "INSERT INTO .. ON CONFLICT DO NOTHING RETURNUNG *" 14 but return the same amount of rows as in the input, setting NULL on already existing 15 rows. The cited query does not return anything for rows that were already in the 16 database. The drawback is that the pg-sequence counter increases everytime with 17 the numers of rows in the input, irrespective of the actually inserted rows. 18 Works only with PostgreSQL >= 9.5. 19 """ 20 fields = self.query.fields 21 if fields and self.connection.pg_version >= 90500 and getattr(self.query, 'on_conflict', '') == 'ignore': 22 qn = self.quote_name_unless_alias 23 opts = self.query.get_meta() 24 return [("WITH r AS (SELECT * FROM(VALUES (" + "),(".join( 25 ",".join("%s" for f in fields) for obj in self.query.objs 26 ) + ")) AS g(" + ",".join(qn(field.column) for field in fields) + "))," + 27 " s AS (INSERT INTO " + qn(opts.db_table) + " (" + ", ".join( 28 qn(field.column) for field in fields) + 29 ") SELECT * FROM r ON CONFLICT DO NOTHING RETURNING *) SELECT s." + 30 qn(opts.pk.column) + " FROM r LEFT JOIN s USING (" + ", ".join( 31 qn(field.column) for field in fields) + ")", 32 tuple(p for ps in self.assemble_as_sql(fields, [ 33 [self.prepare_value(field, self.pre_save_val( 34 field, obj)) for field in fields] for obj in self.query.objs 35 ])[1] for p in ps))] 36 return super().as_sql() -
django/db/backends/postgresql/operations.py
diff --git a/django/db/backends/postgresql/operations.py b/django/db/backends/postgresql/operations.py index 3b71cd4..31230d3 100644
a b from django.db.backends.base.operations import BaseDatabaseOperations 7 7 8 8 class DatabaseOperations(BaseDatabaseOperations): 9 9 cast_char_field_without_max_length = 'varchar' 10 compiler_module = "django.db.backends.postgresql.compiler" 10 11 11 12 def unification_cast_sql(self, output_field): 12 13 internal_type = output_field.get_internal_type() -
django/db/models/query.py
diff --git a/django/db/models/query.py b/django/db/models/query.py index 44bc51d..f65c09f 100644
a b class QuerySet: 419 419 if obj.pk is None: 420 420 obj.pk = obj._meta.pk.get_pk_value_on_save(obj) 421 421 422 def bulk_create(self, objs, batch_size=None ):422 def bulk_create(self, objs, batch_size=None, on_conflict=None): 423 423 """ 424 424 Insert each of the instances into the database. Do *not* call 425 425 save() on each of the instances, do not send any pre/post_save … … class QuerySet: 447 447 for parent in self.model._meta.get_parent_list(): 448 448 if parent._meta.concrete_model is not self.model._meta.concrete_model: 449 449 raise ValueError("Can't bulk create a multi-table inherited model") 450 if on_conflict and on_conflict.lower() != 'ignore': 451 raise ValueError("'%s' is an invalid value for on_conflict. Allowed values: 'ignore'" % on_conflict) 450 452 if not objs: 451 453 return objs 452 454 self._for_write = True … … class QuerySet: 457 459 with transaction.atomic(using=self.db, savepoint=False): 458 460 objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs) 459 461 if objs_with_pk: 460 self._batched_insert(objs_with_pk, fields, batch_size )462 self._batched_insert(objs_with_pk, fields, batch_size, on_conflict=on_conflict) 461 463 if objs_without_pk: 462 464 fields = [f for f in fields if not isinstance(f, AutoField)] 463 ids = self._batched_insert(objs_without_pk, fields, batch_size )465 ids = self._batched_insert(objs_without_pk, fields, batch_size, on_conflict=on_conflict) 464 466 if connection.features.can_return_ids_from_bulk_insert: 465 467 assert len(ids) == len(objs_without_pk) 466 468 for obj_without_pk, pk in zip(objs_without_pk, ids): … … class QuerySet: 1110 1112 # PRIVATE METHODS # 1111 1113 ################### 1112 1114 1113 def _insert(self, objs, fields, return_id=False, raw=False, using=None ):1115 def _insert(self, objs, fields, return_id=False, raw=False, using=None, on_conflict=None): 1114 1116 """ 1115 1117 Insert a new record for the given model. This provides an interface to 1116 1118 the InsertQuery class and is how Model.save() is implemented. … … class QuerySet: 1119 1121 if using is None: 1120 1122 using = self.db 1121 1123 query = sql.InsertQuery(self.model) 1124 if on_conflict: 1125 query.on_conflict = on_conflict.lower() 1122 1126 query.insert_values(fields, objs, raw=raw) 1123 1127 return query.get_compiler(using=using).execute_sql(return_id) 1124 1128 _insert.alters_data = True 1125 1129 _insert.queryset_only = False 1126 1130 1127 def _batched_insert(self, objs, fields, batch_size ):1131 def _batched_insert(self, objs, fields, batch_size, on_conflict=None): 1128 1132 """ 1129 1133 Helper method for bulk_create() to insert objs one batch at a time. 1130 1134 """ … … class QuerySet: 1133 1137 inserted_ids = [] 1134 1138 for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]: 1135 1139 if connections[self.db].features.can_return_ids_from_bulk_insert: 1136 inserted_id = self._insert(item, fields=fields, using=self.db, return_id=True )1140 inserted_id = self._insert(item, fields=fields, using=self.db, return_id=True, on_conflict=on_conflict) 1137 1141 if isinstance(inserted_id, list): 1138 1142 inserted_ids.extend(inserted_id) 1139 1143 else: