commit 0c7e582a90fe026b964311a773832c083417ae3c
Author: Дилян Палаузов <dpa@mail.lab>
Date:   Thu Jan 25 18:20:18 2018 +0000

    Add bulk_create on_conflict='ignore'

diff --git a/django/db/backends/postgresql/compiler.py b/django/db/backends/postgresql/compiler.py
new file mode 100644
index 0000000..a562e34
--- /dev/null
+++ b/django/db/backends/postgresql/compiler.py
@@ -0,0 +1,36 @@
+from django.db.models.sql import compiler
+
+
+SQLCompiler = compiler.SQLCompiler
+SQLDeleteCompiler = compiler.SQLDeleteCompiler
+SQLUpdateCompiler = compiler.SQLUpdateCompiler
+SQLAggregateCompiler = compiler.SQLAggregateCompiler
+
+
+class SQLInsertCompiler(compiler.SQLInsertCompiler):
+    def as_sql(self):
+        """
+        Create queries that work like "INSERT INTO .. ON CONFLICT DO NOTHING RETURNUNG *"
+        but return the same amount of rows as in the input, setting NULL on already existing
+        rows.  The cited query does not return anything for rows that were already in the
+        database.  The drawback is that the pg-sequence counter increases everytime with
+        the numers of rows in the input, irrespective of the actually inserted rows.
+        Works only with PostgreSQL >= 9.5.
+        """
+        fields = self.query.fields
+        if fields and self.connection.pg_version >= 90500 and getattr(self.query, 'on_conflict', '') == 'ignore':
+            qn = self.quote_name_unless_alias
+            opts = self.query.get_meta()
+            return [("WITH r AS (SELECT * FROM(VALUES (" + "),(".join(
+                ",".join("%s" for f in fields) for obj in self.query.objs
+            ) + ")) AS g(" + ",".join(qn(field.column) for field in fields) + "))," +
+                " s AS (INSERT INTO " + qn(opts.db_table) + " (" + ", ".join(
+                    qn(field.column) for field in fields) +
+                ") SELECT * FROM r ON CONFLICT DO NOTHING RETURNING *) SELECT s." +
+                qn(opts.pk.column) + " FROM r LEFT JOIN s USING (" + ", ".join(
+                    qn(field.column) for field in fields) + ")",
+                tuple(p for ps in self.assemble_as_sql(fields, [
+                    [self.prepare_value(field, self.pre_save_val(
+                        field, obj)) for field in fields] for obj in self.query.objs
+                ])[1] for p in ps))]
+        return super().as_sql()
diff --git a/django/db/backends/postgresql/operations.py b/django/db/backends/postgresql/operations.py
index 3b71cd4..31230d3 100644
--- a/django/db/backends/postgresql/operations.py
+++ b/django/db/backends/postgresql/operations.py
@@ -7,6 +7,7 @@ from django.db.backends.base.operations import BaseDatabaseOperations
 
 class DatabaseOperations(BaseDatabaseOperations):
     cast_char_field_without_max_length = 'varchar'
+    compiler_module = "django.db.backends.postgresql.compiler"
 
     def unification_cast_sql(self, output_field):
         internal_type = output_field.get_internal_type()
diff --git a/django/db/models/query.py b/django/db/models/query.py
index 44bc51d..f65c09f 100644
--- a/django/db/models/query.py
+++ b/django/db/models/query.py
@@ -419,7 +419,7 @@ class QuerySet:
             if obj.pk is None:
                 obj.pk = obj._meta.pk.get_pk_value_on_save(obj)
 
-    def bulk_create(self, objs, batch_size=None):
+    def bulk_create(self, objs, batch_size=None, on_conflict=None):
         """
         Insert each of the instances into the database. Do *not* call
         save() on each of the instances, do not send any pre/post_save
@@ -447,6 +447,8 @@ class QuerySet:
         for parent in self.model._meta.get_parent_list():
             if parent._meta.concrete_model is not self.model._meta.concrete_model:
                 raise ValueError("Can't bulk create a multi-table inherited model")
+        if on_conflict and on_conflict.lower() != 'ignore':
+            raise ValueError("'%s' is an invalid value for on_conflict. Allowed values: 'ignore'" % on_conflict)
         if not objs:
             return objs
         self._for_write = True
@@ -457,10 +459,10 @@ class QuerySet:
         with transaction.atomic(using=self.db, savepoint=False):
             objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
             if objs_with_pk:
-                self._batched_insert(objs_with_pk, fields, batch_size)
+                self._batched_insert(objs_with_pk, fields, batch_size, on_conflict=on_conflict)
             if objs_without_pk:
                 fields = [f for f in fields if not isinstance(f, AutoField)]
-                ids = self._batched_insert(objs_without_pk, fields, batch_size)
+                ids = self._batched_insert(objs_without_pk, fields, batch_size, on_conflict=on_conflict)
                 if connection.features.can_return_ids_from_bulk_insert:
                     assert len(ids) == len(objs_without_pk)
                 for obj_without_pk, pk in zip(objs_without_pk, ids):
@@ -1110,7 +1112,7 @@ class QuerySet:
     # PRIVATE METHODS #
     ###################
 
-    def _insert(self, objs, fields, return_id=False, raw=False, using=None):
+    def _insert(self, objs, fields, return_id=False, raw=False, using=None, on_conflict=None):
         """
         Insert a new record for the given model. This provides an interface to
         the InsertQuery class and is how Model.save() is implemented.
@@ -1119,12 +1121,14 @@ class QuerySet:
         if using is None:
             using = self.db
         query = sql.InsertQuery(self.model)
+        if on_conflict:
+            query.on_conflict = on_conflict.lower()
         query.insert_values(fields, objs, raw=raw)
         return query.get_compiler(using=using).execute_sql(return_id)
     _insert.alters_data = True
     _insert.queryset_only = False
 
-    def _batched_insert(self, objs, fields, batch_size):
+    def _batched_insert(self, objs, fields, batch_size, on_conflict=None):
         """
         Helper method for bulk_create() to insert objs one batch at a time.
         """
@@ -1133,7 +1137,7 @@ class QuerySet:
         inserted_ids = []
         for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]:
             if connections[self.db].features.can_return_ids_from_bulk_insert:
-                inserted_id = self._insert(item, fields=fields, using=self.db, return_id=True)
+                inserted_id = self._insert(item, fields=fields, using=self.db, return_id=True, on_conflict=on_conflict)
                 if isinstance(inserted_id, list):
                     inserted_ids.extend(inserted_id)
                 else:
