Ticket #17025: wherenode_refactor.diff
File wherenode_refactor.diff, 68.3 KB (added by , 13 years ago) |
---|
-
django/db/models/query.py
diff --git a/django/db/models/query.py b/django/db/models/query.py index be42d02..3070915 100644
a b class QuerySet(object): 883 883 Prepare the query for computing a result that contains aggregate annotations. 884 884 """ 885 885 opts = self.model._meta 886 if self.query.group_by is None:886 if not self.query.group_by: 887 887 field_names = [f.attname for f in opts.fields] 888 888 self.query.add_fields(field_names, False) 889 self.query. set_group_by()889 self.query.group_by = True 890 890 891 891 def _prepare(self): 892 892 return self … … class ValuesQuerySet(QuerySet): 938 938 939 939 if self._fields: 940 940 self.extra_names = [] 941 # We collect the aggregate names here from the backing query's 942 # aggregates. These are those aggregates that will be in the 943 # values list. We set the list here to [], so that in the end 944 # of this method we can check "is None" and set the aggregate 945 # mask of the backing query to empty indicating that we aren't 946 # actually interested in any fields. Naturally if we collect 947 # some aggregates, then the aggregate mask will be set to that. 941 948 self.aggregate_names = [] 942 949 if not self.query.extra and not self.query.aggregates: 943 950 # Short cut - if there are no extra or aggregates, then … … class ValuesQuerySet(QuerySet): 946 953 else: 947 954 self.query.default_cols = False 948 955 self.field_names = [] 956 # OK, we have a list of fields - now we split them into 957 # fields which are aggregates, those which are from extra 958 # and normal fields. Why? 949 959 for f in self._fields: 950 960 # we inspect the full extra_select list since we might 951 961 # be adding back an extra select item that we hadn't … … class ValuesQuerySet(QuerySet): 962 972 self.field_names = [f.attname for f in self.model._meta.fields] 963 973 self.aggregate_names = None 964 974 975 # Why can't we just keep the values we are interested in, pass that 976 # into compiler, and let it do the final pruning? 965 977 self.query.select = [] 966 978 if self.extra_names is not None: 967 979 self.query.set_extra_mask(self.extra_names) 968 980 self.query.add_fields(self.field_names, True) 981 # Ok, if we are called without fields, this means we do keep the 982 # aggregates. 969 983 if self.aggregate_names is not None: 970 984 self.query.set_aggregate_mask(self.aggregate_names) 971 985 … … class ValuesQuerySet(QuerySet): 997 1011 """ 998 1012 Prepare the query for computing a result that contains aggregate annotations. 999 1013 """ 1000 self.query.set_group_by() 1014 # This super call will add all the fields in the model into the query, 1015 # or do nothing if group_by is set. We call it, but it will not do 1016 # anything. 1017 self.query.group_by = True 1018 super(ValuesQuerySet, self)._setup_aggregate_query(aggregates) 1001 1019 1020 # Set the new additional aggregates into the aggregate mask. 1002 1021 if self.aggregate_names is not None: 1003 1022 self.aggregate_names.extend(aggregates) 1004 1023 self.query.set_aggregate_mask(self.aggregate_names) 1005 1024 1006 super(ValuesQuerySet, self)._setup_aggregate_query(aggregates) 1025 1007 1026 1008 1027 def _as_sql(self, connection): 1009 1028 """ -
django/db/models/query_utils.py
diff --git a/django/db/models/query_utils.py b/django/db/models/query_utils.py index a56ab5c..10e532d 100644
a b class Q(tree.Node): 43 43 def __init__(self, *args, **kwargs): 44 44 super(Q, self).__init__(children=list(args) + kwargs.items()) 45 45 46 def _new_instance(cls, children=None, connector=None, negated=False): 47 obj = tree.Node(children, connector, negated) 48 obj.__class__ = cls 49 return obj 50 _new_instance = classmethod(_new_instance) 51 46 52 def _combine(self, other, conn): 47 53 if not isinstance(other, Q): 48 54 raise TypeError(other) 49 55 obj = type(self)() 50 obj.add(self, conn) 51 obj.add(other, conn) 56 obj.connector = conn 57 if len(self) == 1 and not self.negated: 58 obj.add(self.children[0], conn) 59 else: 60 obj.add(self, conn) 61 if len(other) == 1 and not other.negated: 62 obj.add(other.children[0], conn) 63 else: 64 obj.add(other, conn) 52 65 return obj 53 66 54 67 def __or__(self, other): … … class Q(tree.Node): 58 71 return self._combine(other, self.AND) 59 72 60 73 def __invert__(self): 61 obj = type(self)() 62 obj.add(self, self.AND) 74 obj = self.clone() 63 75 obj.negate() 64 76 return obj 65 77 -
django/db/models/sql/aggregates.py
diff --git a/django/db/models/sql/aggregates.py b/django/db/models/sql/aggregates.py index 207bc0c..aef8483 100644
a b 1 import copy 1 2 """ 2 3 Classes to represent the default SQL aggregate functions 3 4 """ … … class Aggregate(object): 69 70 70 71 self.field = tmp 71 72 73 def clone(self): 74 clone = copy.copy(self) 75 clone.col = self.col[:] 76 return clone 77 72 78 def relabel_aliases(self, change_map): 73 79 if isinstance(self.col, (list, tuple)): 74 80 self.col = (change_map.get(self.col[0], self.col[0]), self.col[1]) -
django/db/models/sql/compiler.py
diff --git a/django/db/models/sql/compiler.py b/django/db/models/sql/compiler.py index 6bf7de2..174cfd1 100644
a b class SQLCompiler(object): 48 48 self.quote_cache[name] = r 49 49 return r 50 50 51 def where_to_sql(self): 52 """ 53 This method is responsible for: 54 - Removing always True / always False parts of the tree 55 - Splitting the tree into having and where 56 - Getting the group by columns from the having part of the query 57 - And finally turning the remaining trees into SQL and params 58 59 Returns 3-tuple of the form: 60 ((where, w_params), (having, h_params), having_group_by) 61 62 Where the having_group_by is a set of SQL snippets to add into the 63 group by, for example ["T1".some_field] 64 """ 65 # Prune the tree. If we are left with a tree that matches nothing 66 # this EmptyResultSet will be risen. 67 where = self.query.where.clone_internal() 68 where.final_prune(self.quote_name_unless_alias, self.connection) 69 if where.match_nothing: 70 raise EmptyResultSet 71 if self.query.aggregates: 72 having = self.query.where_class() 73 where.split_aggregates(having) 74 where.prune_tree(); having.prune_tree() 75 group_by = set(); having.get_group_by(group_by) 76 return (where.as_sql(), having.as_sql(), group_by) 77 else: 78 return (where and where.as_sql() or ('', []), ('', []), set()) 79 51 80 def as_sql(self, with_limits=True, with_col_aliases=False): 52 81 """ 53 82 Creates the SQL for this query. Returns the SQL string and list of … … class SQLCompiler(object): 68 97 from_, f_params = self.get_from_clause() 69 98 70 99 qn = self.quote_name_unless_alias 71 72 where, w_params = self.query.where.as_sql(qn=qn, connection=self.connection) 73 having, h_params = self.query.having.as_sql(qn=qn, connection=self.connection) 100 where_tpl, having_tpl, having_group_by = self.where_to_sql() 101 having, h_params = having_tpl 102 where, w_params = where_tpl 103 74 104 params = [] 75 105 for val in self.query.extra_select.itervalues(): 76 106 params.extend(val[1]) … … class SQLCompiler(object): 88 118 result.append('WHERE %s' % where) 89 119 params.extend(w_params) 90 120 91 grouping, gb_params = self.get_grouping( )92 if grouping:121 grouping, gb_params = self.get_grouping(having_group_by) 122 if self.query.group_by: 93 123 if ordering: 94 124 # If the backend can't group by PK (i.e., any database 95 125 # other than MySQL), then any fields mentioned in the … … class SQLCompiler(object): 101 131 gb_params.extend(col_params) 102 132 else: 103 133 ordering = self.connection.ops.force_no_ordering() 104 result.append('GROUP BY %s' % ', '.join(grouping)) 105 params.extend(gb_params) 134 if grouping: 135 result.append('GROUP BY %s' % ', '.join(grouping)) 136 params.extend(gb_params) 106 137 107 138 if having: 108 139 result.append('HAVING %s' % having) 109 140 params.extend(h_params) 110 141 111 if ordering: 142 # This is a hack: we rely on the ordering for GROUP BY. Subqueries do 143 # not use ordering, so instead of clearing the ordering, subqueries 144 # flag the query as not using the ordering there is defined. This is 145 # sure to bite us, and should be fixed. The real fix might be that 146 # relying on doing .order_by() to get the wanted GROUP BY might just 147 # need to be deprecated. Or maybe we should have a variable 148 # ordering_group_by, making it explicit that we collect the order_by 149 # GROUP BY clauses in different scope than the actual order by. But 150 # that just sounds hacky. Or maybe just resurrect the query.group_by 151 # set. 152 if ordering and self.query.use_ordering: 112 153 result.append('ORDER BY %s' % ', '.join(ordering)) 113 154 114 155 if with_limits: … … class SQLCompiler(object): 142 183 """ 143 184 obj = self.query.clone() 144 185 if obj.low_mark == 0 and obj.high_mark is None: 145 # If there is no slicing in use, then we can safely drop all ordering 146 obj.clear_ordering(True) 186 # If there is no slicing in use, then we can safely drop all 187 # ordering. 188 # TODO: We rely on ordering to determine the GROUP BY clause. 189 # So we keep the ordering, but tell the compiler not to append 190 # it to the query, just to group by it. Refactor. 191 obj.use_ordering = False 192 obj.order_by = [f for f in self.query.order_by if f not in self.query.aggregates] 193 # We essentially defined a group_by variable above. It seems clear 194 # that we need a custom group_by variable, which we can then use 195 # properly. This was in the original code. 147 196 obj.bump_prefix() 148 197 return obj.get_compiler(connection=self.connection).as_sql() 149 198 … … class SQLCompiler(object): 474 523 first = False 475 524 return result, [] 476 525 477 def get_grouping(self ):526 def get_grouping(self, where_group_by): 478 527 """ 479 528 Returns a tuple representing the SQL elements in the "group by" clause. 480 529 """ 530 if not self.query.group_by: 531 return [], [] 481 532 qn = self.quote_name_unless_alias 482 533 result, params = [], [] 483 if self.query.group_by is not None:484 485 486 self.query.group_by =[534 group_by = where_group_by 535 if (len(self.query.model._meta.fields) == len(self.query.select) and 536 self.connection.features.allows_group_by_pk): 537 group_by = set([ 487 538 (self.query.model._meta.db_table, self.query.model._meta.pk.column) 488 ] 489 490 group_by = self.query.group_by or [] 491 492 extra_selects = [] 493 for extra_select, extra_params in self.query.extra_select.itervalues(): 494 extra_selects.append(extra_select) 495 params.extend(extra_params) 496 cols = (group_by + self.query.select + 497 self.query.related_select_cols + extra_selects) 498 seen = set() 499 for col in cols: 500 if col in seen: 501 continue 502 seen.add(col) 503 if isinstance(col, (list, tuple)): 504 result.append('%s.%s' % (qn(col[0]), qn(col[1]))) 505 elif hasattr(col, 'as_sql'): 506 result.append(col.as_sql(qn, self.connection)) 507 else: 508 result.append('(%s)' % str(col)) 539 ]) 540 541 extra_selects = [] 542 for extra_select, extra_params in self.query.extra_select.itervalues(): 543 extra_selects.append(extra_select) 544 params.extend(extra_params) 545 546 cols = group_by.union(self.query.select + 547 self.query.related_select_cols + extra_selects) 548 for col in cols: 549 if isinstance(col, (list, tuple)): 550 result.append('%s.%s' % (qn(col[0]), qn(col[1]))) 551 elif hasattr(col, 'as_sql'): 552 result.append(col.as_sql(qn, self.connection)) 553 else: 554 result.append('(%s)' % str(col)) 509 555 return result, params 510 556 511 557 def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1, … … class SQLDeleteCompiler(SQLCompiler): 864 910 "Can only delete from one table at a time." 865 911 qn = self.quote_name_unless_alias 866 912 result = ['DELETE FROM %s' % qn(self.query.tables[0])] 867 where, params = self.query.where.as_sql(qn=qn, connection=self.connection) 913 where_tpl, _, _ = self.where_to_sql() 914 where, params = where_tpl 868 915 result.append('WHERE %s' % where) 869 916 return ' '.join(result), tuple(params) 870 917 … … class SQLUpdateCompiler(SQLCompiler): 909 956 if not values: 910 957 return '', () 911 958 result.append(', '.join(values)) 912 where, params = self.query.where.as_sql(qn=qn, connection=self.connection) 959 where_tpl, _, _ = self.where_to_sql() 960 where, params = where_tpl 913 961 if where: 914 962 result.append('WHERE %s' % where) 915 963 return ' '.join(result), tuple(update_params + params) -
django/db/models/sql/datastructures.py
diff --git a/django/db/models/sql/datastructures.py b/django/db/models/sql/datastructures.py index 92d64e1..b8e06da 100644
a b the SQL domain. 6 6 class EmptyResultSet(Exception): 7 7 pass 8 8 9 class FullResultSet(Exception):10 pass11 12 9 class MultiJoin(Exception): 13 10 """ 14 11 Used by join construction code to indicate the point at which a -
django/db/models/sql/query.py
diff --git a/django/db/models/sql/query.py b/django/db/models/sql/query.py index 61fd2be..453df65 100644
a b from django.db.models.sql import aggregates as base_aggregates_module 20 20 from django.db.models.sql.constants import * 21 21 from django.db.models.sql.datastructures import EmptyResultSet, Empty, MultiJoin 22 22 from django.db.models.sql.expressions import SQLEvaluator 23 from django.db.models.sql.where import (WhereNode, Constraint, E verythingNode,24 ExtraWhere,AND, OR)23 from django.db.models.sql.where import (WhereNode, Constraint, ExtraWhere, 24 AND, OR) 25 25 from django.core.exceptions import FieldError 26 26 27 27 __all__ = ['Query', 'RawQuery'] … … class RawQuery(object): 47 47 return RawQuery(self.sql, using, params=self.params) 48 48 49 49 def convert_values(self, value, field, connection): 50 """Convert the database-returned value into a type that is consistent 50 """ 51 Convert the database-returned value into a type that is consistent 51 52 across database backends. 52 53 53 54 By default, this defers to the underlying backend operations, but … … class RawQuery(object): 81 82 self.cursor = connections[self.using].cursor() 82 83 self.cursor.execute(self.sql, self.params) 83 84 84 85 85 class Query(object): 86 86 """ 87 87 A single SQL query. … … class Query(object): 121 121 self.tables = [] # Aliases in the order they are created. 122 122 self.where = where() 123 123 self.where_class = where 124 self.group_by = None 125 self.having = where() 124 self.use_ordering = True 126 125 self.order_by = [] 127 126 self.low_mark, self.high_mark = 0, None # Used for offset/limit 128 127 self.distinct = False … … class Query(object): 131 130 self.select_related = False 132 131 self.related_select_cols = [] 133 132 134 # SQL aggregate-related attributes 133 # Here is some random rambling about aggregates. First, the current 134 # implementation is pretty darned hard to understand. There is little 135 # to no documentation, and there is usage of these variables all over 136 # the place. 137 # 138 # So, first, here is a list of what we will need for successful 139 # aggregate queries. First we will naturally need the actual 140 # aggregates, these are stored in self.aggregates, and this is 141 # pretty much clear thing. 142 # 143 # Next, we will need the fields to group by with. This we shouldn't 144 # keep record of, as the set of fields to group by is the wanted 145 # select fields, having fields, order fields, and extra select fields. 146 # We can and do compute these when the query gets executed. They will 147 # come as a side product when preparing other parts of the query for 148 # execution. 149 150 # Still, we need the having clause. This is under control now, as 151 # we have gotten rid of the query.having. We split the query.where 152 # into having and where based on the actual need. 153 154 # The current implementation of blindly adding the fields to the query 155 # is a bit dangerous - it leads to potential multijoins which will 156 # result in duplicate rows for aggregation. This is a hard problem to 157 # solve correctly. 158 159 # So, what do these variables below represent? aggregates is clear, 160 # it represents the aggregates in the query. Next comes group_by, this 161 # is a variable that represents if we should do a GROUP BY at all. 162 163 # Then we have self.aggregate_select_mask. This is the fields actually 164 # present in the query. Why self.aggregates do not get changed when we 165 # change the aggregates actually in the query is unknown. The actual 166 # fields in the query can be accessed through self.aggregate_select, 167 # which is a property showing only the fields in the aggregate select 168 # mask. 169 170 # To make things more complicated, db/query.py keeps its own variable 171 # aggregate_names. It seems this is collected from the aggregate_select 172 # property, and then used to add fields to the query. This is just 173 # speculation, I do not understand completely what it does. 174 135 175 self.aggregates = SortedDict() # Maps alias -> SQL aggregate function 176 self.group_by = False 136 177 self.aggregate_select_mask = None 137 178 self._aggregate_select_cache = None 138 179 … … class Query(object): 254 295 obj.dupe_avoidance = self.dupe_avoidance.copy() 255 296 obj.select = self.select[:] 256 297 obj.tables = self.tables[:] 257 obj.where = copy.deepcopy(self.where, memo=memo) 298 # We do not need to clone the leaf nodes - they are immutable until 299 # the query is executed, or relabel_alias is called. In either case 300 # we will take care of the copying where needed. This can be a major 301 # speed optimization when the where three has a lot of leaf nodes. 302 obj.where = self.where.clone_internal() 258 303 obj.where_class = self.where_class 259 if self.group_by is None:260 obj.group_by = None261 else:262 obj.group_by = self.group_by[:]263 obj.having = copy.deepcopy(self.having, memo=memo)264 304 obj.order_by = self.order_by[:] 305 obj.use_ordering = self.use_ordering 265 306 obj.low_mark, obj.high_mark = self.low_mark, self.high_mark 266 307 obj.distinct = self.distinct 267 308 obj.select_for_update = self.select_for_update 268 309 obj.select_for_update_nowait = self.select_for_update_nowait 269 310 obj.select_related = self.select_related 270 311 obj.related_select_cols = [] 271 obj.aggregates = copy.deepcopy(self.aggregates, memo=memo) 312 if self.aggregates: 313 obj.aggregates = copy.deepcopy(self.aggregates, memo=memo) 314 else: 315 obj.aggregates = SortedDict() 272 316 if self.aggregate_select_mask is None: 273 317 obj.aggregate_select_mask = None 274 318 else: … … class Query(object): 279 323 # It will get re-populated in the cloned queryset the next time it's 280 324 # used. 281 325 obj._aggregate_select_cache = None 326 obj.group_by = self.group_by 282 327 obj.max_depth = self.max_depth 283 328 obj.extra = self.extra.copy() 284 329 if self.extra_select_mask is None: … … class Query(object): 291 336 obj._extra_select_cache = self._extra_select_cache.copy() 292 337 obj.extra_tables = self.extra_tables 293 338 obj.extra_order_by = self.extra_order_by 294 obj.deferred_loading = copy.deepcopy(self.deferred_loading, memo=memo)339 obj.deferred_loading = self.deferred_loading[0].copy(), self.deferred_loading[1] 295 340 if self.filter_is_sticky and self.used_aliases: 296 341 obj.used_aliases = self.used_aliases.copy() 297 342 else: … … class Query(object): 343 388 # If there is a group by clause, aggregating does not add useful 344 389 # information but retrieves only the first row. Aggregate 345 390 # over the subquery instead. 346 if self.group_by is not None:391 if self.group_by: 347 392 from django.db.models.sql.subqueries import AggregateQuery 348 393 query = AggregateQuery(self.model) 349 394 … … class Query(object): 406 451 obj.add_subquery(subquery, using=using) 407 452 except EmptyResultSet: 408 453 # add_subquery evaluates the query, if it's an EmptyResultSet 409 # then there are can be no results, and therefore there the 410 # count is obviously 0 454 # then there can be no results. Therefore the count is 0. 411 455 return 0 412 456 413 457 obj.add_count_column() … … class Query(object): 499 543 if self.alias_refcount.get(alias) or rhs.alias_refcount.get(alias): 500 544 self.promote_alias(alias, True) 501 545 502 # Now relabel a copy of the rhs where-clause and add it to the current 503 # one. 504 if rhs.where: 505 w = copy.deepcopy(rhs.where) 506 w.relabel_aliases(change_map) 507 if not self.where: 508 # Since 'self' matches everything, add an explicit "include 509 # everything" where-constraint so that connections between the 510 # where clauses won't exclude valid results. 511 self.where.add(EverythingNode(), AND) 512 elif self.where: 513 # rhs has an empty where clause. 514 w = self.where_class() 515 w.add(EverythingNode(), AND) 546 if connector == OR and (not self.where or not rhs.where): 547 # One of the two sides matches everything and the connector is OR. 548 # This means the new where condition must match everything. 549 self.where = self.where_class() 516 550 else: 517 w = self.where_class() 518 self.where.add(w, connector) 551 rhs_where = rhs.where.clone() 552 rhs_where.relabel_aliases(change_map) 553 self.where = self.where_class([self.where, rhs_where], connector) 554 # the root node's connector must always be AND 555 if self.where.connector == OR: 556 self.where = self.where_class([self.where]) 557 self.where.prune_tree() 519 558 520 559 # Selection columns and extra extensions are those provided by 'rhs'. 521 560 self.select = [] … … class Query(object): 735 774 assert set(change_map.keys()).intersection(set(change_map.values())) == set() 736 775 737 776 # 1. Update references in "select" (normal columns plus aliases), 738 # "group by" , "where" and "having".777 # "group by" and "where" 739 778 self.where.relabel_aliases(change_map) 740 self.having.relabel_aliases(change_map) 741 for columns in [self.select, self.group_by or []]: 779 for columns in [self.select]: 742 780 for pos, col in enumerate(columns): 743 781 if isinstance(col, (list, tuple)): 744 782 old_alias = col[0] … … class Query(object): 803 841 The 'exceptions' parameter is a container that holds alias names which 804 842 should not be changed. 805 843 """ 844 # We must make sure the leaf nodes of the where tree will be cloned, 845 # as they will be relabeled. 846 self.where = self.where.clone() 847 806 848 current = ord(self.alias_prefix) 807 849 assert current < ord('Z') 808 850 prefix = chr(current + 1) … … class Query(object): 952 994 self.unref_alias(alias) 953 995 self.included_inherited_models = {} 954 996 955 def need_force_having(self, q_object):956 """957 Returns whether or not all elements of this q_object need to be put958 together in the HAVING clause.959 """960 for child in q_object.children:961 if isinstance(child, Node):962 if self.need_force_having(child):963 return True964 else:965 if child[0].split(LOOKUP_SEP)[0] in self.aggregates:966 return True967 return False968 969 997 def add_aggregate(self, aggregate, model, alias, is_summary): 970 998 """ 971 999 Adds a single aggregate expression to the Query … … class Query(object): 982 1010 aggregate.name, field_name, field_name)) 983 1011 elif ((len(field_list) > 1) or 984 1012 (field_list[0] not in [i.name for i in opts.fields]) or 985 self.group_by is Noneor1013 not self.group_by or 986 1014 not is_summary): 987 1015 # If: 988 1016 # - the field descriptor has more than one part (foo__bar), or … … class Query(object): 1014 1042 # Add the aggregate to the query 1015 1043 aggregate.add_to_query(self, alias, col=col, source=source, is_summary=is_summary) 1016 1044 1045 def add_where_leaf(self, data, negated=False): 1046 leaf_class = self.where.leaf_class() 1047 self.where.add(leaf_class(data, negated), AND) 1048 1017 1049 def add_filter(self, filter_expr, connector=AND, negate=False, trim=False, 1018 can_reuse=None, process_extras=True , force_having=False):1050 can_reuse=None, process_extras=True): 1019 1051 """ 1020 1052 Add a single filter to the query. The 'filter_expr' is a pair: 1021 1053 (filter_string, value). E.g. ('name__contains', 'fred') … … class Query(object): 1053 1085 1054 1086 # By default, this is a WHERE clause. If an aggregate is referenced 1055 1087 # in the value, the filter will be promoted to a HAVING 1056 having_clause = False1057 1088 1058 1089 # Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all 1059 1090 # uses of None as a query value. … … class Query(object): 1067 1098 elif hasattr(value, 'evaluate'): 1068 1099 # If value is a query expression, evaluate it 1069 1100 value = SQLEvaluator(value, self) 1070 having_clause = value.contains_aggregate1071 1072 1101 for alias, aggregate in self.aggregates.items(): 1073 1102 if alias in (parts[0], LOOKUP_SEP.join(parts)): 1074 entry = self.where_class() 1075 entry.add((aggregate, lookup_type, value), AND) 1076 if negate: 1077 entry.negate() 1078 self.having.add(entry, connector) 1103 self.add_where_leaf((aggregate, lookup_type, value)) 1079 1104 return 1080 1105 1081 1106 opts = self.get_meta() … … class Query(object): 1142 1167 self.promote_alias_chain(join_it, join_promote) 1143 1168 self.promote_alias_chain(table_it, table_promote or join_promote) 1144 1169 1145 if having_clause or force_having: 1146 if (alias, col) not in self.group_by: 1147 self.group_by.append((alias, col)) 1148 self.having.add((Constraint(alias, col, field), lookup_type, value), 1149 connector) 1150 else: 1151 self.where.add((Constraint(alias, col, field), lookup_type, value), 1152 connector) 1170 self.add_where_leaf((Constraint(alias, col, field), lookup_type, value)) 1153 1171 1154 1172 if negate: 1155 1173 self.promote_alias_chain(join_list) … … class Query(object): 1158 1176 for alias in join_list: 1159 1177 if self.alias_map[alias][JOIN_TYPE] == self.LOUTER: 1160 1178 j_col = self.alias_map[alias][RHS_JOIN_COL] 1161 entry = self.where_class() 1162 entry.add( 1179 self.add_where_leaf( 1163 1180 (Constraint(alias, j_col, None), 'isnull', True), 1164 AND1181 negated=True 1165 1182 ) 1166 entry.negate()1167 self.where.add(entry, AND)1168 1183 break 1169 1184 if not (lookup_type == 'in' 1170 1185 and not hasattr(value, 'as_sql') … … class Query(object): 1174 1189 # exclude the "foo__in=[]" case from this handling, because 1175 1190 # it's short-circuited in the Where class. 1176 1191 # We also need to handle the case where a subquery is provided 1177 self. where.add((Constraint(alias, col, None), 'isnull', False), AND)1192 self.add_where_leaf((Constraint(alias, col, None), 'isnull', False)) 1178 1193 1179 1194 if can_reuse is not None: 1180 1195 can_reuse.update(join_list) … … class Query(object): 1183 1198 self.add_filter(filter, negate=negate, can_reuse=can_reuse, 1184 1199 process_extras=False) 1185 1200 1186 def add_q(self, q_object , used_aliases=None, force_having=False):1201 def add_q(self, q_object): 1187 1202 """ 1188 1203 Adds a Q-object to the current filter. 1189 1204 1190 1205 Can also be used to add anything that has an 'add_to_query()' method. 1206 1207 In case add_to_query path is not executed, this method's main purpose 1208 is to walk the q_object's internal nodes and manage the state of the 1209 self.where. Leaf nodes will be handled by add_filter. 1210 1211 The self.where tree is managed by pushing new nodes to the tree. This 1212 way self.where is always at the right node when add_filter adds items 1213 to it. 1214 1215 We need to start a new subtree when: 1216 - The connector of the q_object is different than the connector of 1217 the where tree. 1218 - The q_object is negated. 1219 1220 After call of this function with q_object=~Q(pk=1)&~Q(Q(pk=3)|Q(pk=2)) 1221 we should have the following tree: 1222 AND 1223 / \ 1224 NOT NOT 1225 | \ 1226 pk=1 OR 1227 / \ 1228 pk=3 pk=2 1229 1230 This method will call recursively itself for those childrens of the 1231 q_object which are Q-objs, and call add_filter for the leaf nodes. 1232 1233 We will add all filters to self.where. When the query is executed, the 1234 tree is splitted into where and having clauses. 1191 1235 """ 1192 if used_aliases is None: 1193 used_aliases = self.used_aliases1236 1237 # Complex custom objects are responsible for adding themselves. 1194 1238 if hasattr(q_object, 'add_to_query'): 1195 # Complex custom objects are responsible for adding themselves. 1196 q_object.add_to_query(self, used_aliases) 1197 else: 1198 if self.where and q_object.connector != AND and len(q_object) > 1: 1199 self.where.start_subtree(AND) 1200 subtree = True 1239 q_object.add_to_query(self, self.used_aliases) 1240 return 1241 1242 # Start subtree if needed. At the end we check if anything got added 1243 # into the subtrees. If not, prune em. 1244 connector = q_object.connector 1245 subtree_parent = None 1246 if self.where.connector <> connector or q_object.negated: 1247 subtree = self.where_class(connector=connector) 1248 subtree_parent = self.where 1249 self.where.add(subtree, self.where.connector) 1250 self.where = subtree 1251 if q_object.negated: 1252 self.where.negate() 1253 1254 # Aliases that were newly added or not used at all need to 1255 # be promoted to outer joins if they are nullable relations. 1256 # (they shouldn't turn the whole conditional into the empty 1257 # set just because they don't match anything). Take the 1258 # before snapshot of the aliases. 1259 if connector == OR: 1260 refcounts_before = self.alias_refcount.copy() 1261 1262 for child in q_object.children: 1263 if isinstance(child, Node): 1264 self.add_q(child) 1201 1265 else: 1202 subtree = False 1203 connector = AND 1204 if q_object.connector == OR and not force_having: 1205 force_having = self.need_force_having(q_object) 1206 for child in q_object.children: 1207 if connector == OR: 1208 refcounts_before = self.alias_refcount.copy() 1209 if force_having: 1210 self.having.start_subtree(connector) 1211 else: 1212 self.where.start_subtree(connector) 1213 if isinstance(child, Node): 1214 self.add_q(child, used_aliases, force_having=force_having) 1215 else: 1216 self.add_filter(child, connector, q_object.negated, 1217 can_reuse=used_aliases, force_having=force_having) 1218 if force_having: 1219 self.having.end_subtree() 1220 else: 1221 self.where.end_subtree() 1222 1223 if connector == OR: 1224 # Aliases that were newly added or not used at all need to 1225 # be promoted to outer joins if they are nullable relations. 1226 # (they shouldn't turn the whole conditional into the empty 1227 # set just because they don't match anything). 1228 self.promote_unused_aliases(refcounts_before, used_aliases) 1229 connector = q_object.connector 1230 if q_object.negated: 1231 self.where.negate() 1232 if subtree: 1233 self.where.end_subtree() 1234 if self.filter_is_sticky: 1235 self.used_aliases = used_aliases 1266 self.add_filter(child, connector, q_object.negated, 1267 can_reuse=self.used_aliases) 1268 1269 if connector == OR: 1270 self.promote_unused_aliases(refcounts_before, self.used_aliases) 1271 if subtree_parent: 1272 self.where = subtree_parent 1273 self.where.prune_tree() 1236 1274 1237 1275 def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True, 1238 1276 allow_explicit_fk=False, can_reuse=None, negate=False, … … class Query(object): 1254 1292 column (used for any 'where' constraint), the final 'opts' value and the 1255 1293 list of tables joined. 1256 1294 """ 1295 1257 1296 joins = [alias] 1258 1297 last = [0] 1259 1298 dupe_set = set() … … class Query(object): 1533 1572 # database from tripping over IN (...,NULL,...) selects and returning 1534 1573 # nothing 1535 1574 alias, col = query.select[0] 1536 query. where.add((Constraint(alias, col, None), 'isnull', False), AND)1575 query.add_where_leaf((Constraint(alias, col, None), 'isnull', False)) 1537 1576 1538 1577 self.add_filter(('%s__in' % prefix, query), negate=True, trim=True, 1539 1578 can_reuse=can_reuse) … … class Query(object): 1659 1698 if force_empty: 1660 1699 self.default_ordering = False 1661 1700 1662 def set_group_by(self):1663 """1664 Expands the GROUP BY clause required by the query.1665 1666 This will usually be the set of all non-aggregate fields in the1667 return data. If the database backend supports grouping by the1668 primary key, and the query would be equivalent, the optimization1669 will be made automatically.1670 """1671 self.group_by = []1672 1673 for sel in self.select:1674 self.group_by.append(sel)1675 1676 1701 def add_count_column(self): 1677 1702 """ 1678 1703 Converts the query to do count(...) or count(distinct(pk)) in order to … … class Query(object): 1705 1730 # Clear out the select cache to reflect the new unmasked aggregates. 1706 1731 self.aggregates = {None: count} 1707 1732 self.set_aggregate_mask(None) 1708 self.group_by = None1733 self.group_by = False 1709 1734 1710 1735 def add_select_related(self, fields): 1711 1736 """ … … class Query(object): 1748 1773 # This is order preserving, since self.extra_select is a SortedDict. 1749 1774 self.extra.update(select_pairs) 1750 1775 if where or params: 1751 self. where.add(ExtraWhere(where, params), AND)1776 self.add_where_leaf(ExtraWhere(where, params)) 1752 1777 if tables: 1753 1778 self.extra_tables += tuple(tables) 1754 1779 if order_by: … … class Query(object): 1824 1849 target[model] = set([f.name for f in fields]) 1825 1850 1826 1851 def set_aggregate_mask(self, names): 1852 if 'n_authors' in self.aggregate_select and names is None: 1853 import ipdb; ipdb.set_trace() 1827 1854 "Set the mask of aggregates that will actually be returned by the SELECT" 1828 1855 if names is None: 1829 1856 self.aggregate_select_mask = None -
django/db/models/sql/subqueries.py
diff --git a/django/db/models/sql/subqueries.py b/django/db/models/sql/subqueries.py index 1b03647..3a7774c 100644
a b class DeleteQuery(Query): 37 37 field = self.model._meta.pk 38 38 for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE): 39 39 where = self.where_class() 40 where.add((Constraint(None, field.column, field), 'in', 41 pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]), AND) 40 leaf = where.leaf_class() 41 where.add(leaf((Constraint(None, field.column, field), 'in', 42 pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE])), AND) 42 43 self.do_query(self.model._meta.db_table, where, using=using) 43 44 44 45 class UpdateQuery(Query): … … class UpdateQuery(Query): 73 74 self.add_update_values(values) 74 75 for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE): 75 76 self.where = self.where_class() 76 self.where.add((Constraint(None, pk_field.column, pk_field), 'in', 77 pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]), 78 AND) 77 self.add_where_leaf((Constraint(None, pk_field.column, pk_field), 'in', 78 pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE])) 79 79 self.get_compiler(using).execute_sql(None) 80 80 81 81 def add_update_values(self, values): -
django/db/models/sql/where.py
diff --git a/django/db/models/sql/where.py b/django/db/models/sql/where.py index 3e9dbf0..ea26f62 100644
a b from itertools import repeat 6 6 7 7 from django.utils import tree 8 8 from django.db.models.fields import Field 9 from d atastructures import EmptyResultSet, FullResultSet9 from django.db.models.sql.aggregates import Aggregate 10 10 11 11 # Connection types 12 12 AND = 'AND' 13 13 OR = 'OR' 14 14 15 class EmptyShortCircuit(Exception):15 class WhereLeaf(object): 16 16 """ 17 Internal exception used to indicate that a "matches nothing" node should be 18 added to the where-clause. 19 """ 20 pass 17 Represents a leaf node in a where tree. Contains single constraint, 18 and knows how to turn it into sql and params. 21 19 22 class WhereNode(tree.Node): 20 This implements many of the WhereNode's methods. Here the methods 21 will do the terminal work, while WhereNode's methods will be mostly 22 recursive in nature. 23 23 """ 24 Used to represent the SQL where-clause.25 26 The class is tied to the Query class that created it (in order to create27 the correct SQL).28 24 29 The children in this tree are usually either Q-like objects or lists of 30 [table_alias, field_name, db_type, lookup_type, value_annotation, 31 params]. However, a child could also be any class with as_sql() and 32 relabel_aliases() methods. 33 """ 34 default = AND 25 # Fast and pretty way to test if the node is a leaf node. 26 is_leaf = True 35 27 36 def add(self, data, connector): 37 """ 38 Add a node to the where-tree. If the data is a list or tuple, it is 39 expected to be of the form (obj, lookup_type, value), where obj is 40 a Constraint object, and is then slightly munged before being stored 41 (to avoid storing any reference to field objects). Otherwise, the 'data' 42 is stored unchanged and can be any class with an 'as_sql()' method. 43 """ 28 def __init__(self, data, negated=False): 29 self.sql = '' 30 self.negated = negated 31 self.params = [] 32 self.match_all = False 33 self.match_nothing = False 44 34 if not isinstance(data, (list, tuple)): 45 super(WhereNode, self).add(data, connector) 46 return 47 48 obj, lookup_type, value = data 49 if hasattr(value, '__iter__') and hasattr(value, 'next'): 50 # Consume any generators immediately, so that we can determine 51 # emptiness and transform any non-empty values correctly. 52 value = list(value) 53 54 # The "annotation" parameter is used to pass auxilliary information 55 # about the value(s) to the query construction. Specifically, datetime 56 # and empty values need special handling. Other types could be used 57 # here in the future (using Python types is suggested for consistency). 58 if isinstance(value, datetime.datetime): 59 annotation = datetime.datetime 60 elif hasattr(value, 'value_annotation'): 61 annotation = value.value_annotation 35 self.data = data 62 36 else: 63 annotation = bool(value) 64 65 if hasattr(obj, "prepare"): 66 value = obj.prepare(lookup_type, value) 67 super(WhereNode, self).add((obj, lookup_type, annotation, value), 68 connector) 69 return 70 71 super(WhereNode, self).add((obj, lookup_type, annotation, value), 72 connector) 37 # Preprocess the data 38 obj, lookup_type, value = data 39 40 if hasattr(value, '__iter__') and hasattr(value, 'next'): 41 # Consume any generators immediately, so that we can determine 42 # emptiness and transform any non-empty values correctly. 43 value = list(value) 44 45 # The "annotation" parameter is used to pass auxilliary information 46 # about the value(s) to the query construction. Specifically, datetime 47 # and empty values need special handling. Other types could be used 48 # here in the future (using Python types is suggested for consistency). 49 if isinstance(value, datetime.datetime): 50 annotation = datetime.datetime 51 elif hasattr(value, 'value_annotation'): 52 annotation = value.value_annotation 53 else: 54 annotation = bool(value) 73 55 74 def as_sql(self, qn, connection): 75 """ 76 Returns the SQL version of the where clause and the value to be 77 substituted in. Returns None, None if this node is empty. 56 if hasattr(obj, "prepare"): 57 value = obj.prepare(lookup_type, value) 58 self.data = (obj, lookup_type, annotation, value) 78 59 79 If 'node' is provided, that is the root of the SQL generation 80 (generally not needed except by the internal implementation for 81 recursion). 82 """ 83 if not self.children: 84 return None, [] 85 result = [] 86 result_params = [] 87 empty = True 88 for child in self.children: 89 try: 90 if hasattr(child, 'as_sql'): 91 sql, params = child.as_sql(qn=qn, connection=connection) 92 else: 93 # A leaf node in the tree. 94 sql, params = self.make_atom(child, qn, connection) 95 96 except EmptyResultSet: 97 if self.connector == AND and not self.negated: 98 # We can bail out early in this particular case (only). 99 raise 100 elif self.negated: 101 empty = False 102 continue 103 except FullResultSet: 104 if self.connector == OR: 105 if self.negated: 106 empty = True 107 break 108 # We match everything. No need for any constraints. 109 return '', [] 110 if self.negated: 111 empty = True 112 continue 113 114 empty = False 115 if sql: 116 result.append(sql) 117 result_params.extend(params) 118 if empty: 119 raise EmptyResultSet 60 def create_sql(self, qn, connection): 61 if hasattr(self.data, 'as_sql'): 62 self.sql, self.params = self.data.as_sql(qn, connection) 63 else: 64 self.sql, self.params = self.make_atom(qn, connection) 65 if self.negated and self.sql: 66 self.sql = 'NOT ' + self.sql 120 67 121 conn = ' %s ' % self.connector 122 sql_string = conn.join(result) 123 if sql_string: 124 if self.negated: 125 sql_string = 'NOT (%s)' % sql_string 126 elif len(self.children) != 1: 127 sql_string = '(%s)' % sql_string 128 return sql_string, result_params 68 def as_sql(self): 69 return self.sql, self.params 129 70 130 def make_atom(self, child,qn, connection):71 def make_atom(self, qn, connection): 131 72 """ 132 73 Turn a tuple (table_alias, column_name, db_type, lookup_type, 133 74 value_annot, params) into valid SQL. … … class WhereNode(tree.Node): 135 76 Returns the string for the SQL fragment and the parameters to use for 136 77 it. 137 78 """ 138 lvalue, lookup_type, value_annot, params_or_value = child79 lvalue, lookup_type, value_annot, params_or_value = self.data 139 80 if hasattr(lvalue, 'process'): 81 from django.db.models.base import ObjectDoesNotExist 140 82 try: 141 83 lvalue, params = lvalue.process(lookup_type, params_or_value, connection) 142 except EmptyShortCircuit: 143 raise EmptyResultSet 84 except ObjectDoesNotExist: 85 self.set_sql_matches_nothing() 86 return '', [] 144 87 else: 145 88 params = Field().get_db_prep_lookup(lookup_type, params_or_value, 146 89 connection=connection, prepared=True) … … class WhereNode(tree.Node): 175 118 176 119 if lookup_type == 'in': 177 120 if not value_annot: 178 raise EmptyResultSet 121 self.set_sql_matches_nothing() 122 return '', [] 179 123 if extra: 180 124 return ('%s IN %s' % (field_sql, extra), params) 181 125 max_in_list_size = connection.ops.max_in_list_size() … … class WhereNode(tree.Node): 210 154 return connection.ops.regex_lookup(lookup_type) % (field_sql, cast_sql), params 211 155 212 156 raise TypeError('Invalid lookup_type: %r' % lookup_type) 157 158 159 def set_sql_matches_nothing(self): 160 if self.negated: 161 self.match_everything = True 162 else: 163 self.match_nothing = True 213 164 165 def subtree_contains_aggregate(self): 166 """ 167 The leaf node contains aggregate if it has an aggregate in it, or it 168 contains a subquery which contains an aggregate as a value. 169 """ 170 return (isinstance(self.data[0], Aggregate) or 171 (len(self.data) == 4 and 172 hasattr(self.data[3], 'contains_aggregate') and 173 self.data[3].contains_aggregate)) 174 214 175 def sql_for_columns(self, data, qn, connection): 215 176 """ 216 177 Returns the SQL fragment used for the left-hand side of a column … … class WhereNode(tree.Node): 224 185 lhs = qn(name) 225 186 return connection.ops.field_cast_sql(db_type) % lhs 226 187 227 def relabel_aliases(self, change_map, node=None): 188 def relabel_aliases(self, change_map): 189 if hasattr(self.data, 'relabel_aliases'): 190 self.data.relabel_aliases(change_map) 191 elif isinstance(self.data[0], (list, tuple)): 192 elt = list(self.data[0]) 193 if elt[0] in change_map: 194 elt[0] = change_map[elt[0]] 195 self.data = (tuple(elt),) + self.data[1:] 196 else: 197 self.data[0].relabel_aliases(change_map) 198 199 # Check if the query value also requires relabelling 200 if hasattr(self.data[3], 'relabel_aliases'): 201 self.data[3].relabel_aliases(change_map) 202 203 def get_group_by(self, group_by): 204 if isinstance(self.data, tuple) and not isinstance(self.data[0], Aggregate): 205 group_by.add((self.data[0].alias, self.data[0].col)) 206 207 def clone(self): 228 208 """ 229 Relabels the alias values of any children. 'change_map' is a dictionary 230 mapping old (current) alias values to the new values. 209 TODO: It is unfortunate that the data can be all sorts of things. It 210 would be a good idea to make the Constraint a bit larger class, so 211 that it could hold also the lookup type and value. Then we would 212 always have something implementing similar interface in Data. 231 213 """ 232 if not node: 233 node = self 234 for pos, child in enumerate(node.children): 235 if hasattr(child, 'relabel_aliases'): 236 child.relabel_aliases(change_map) 237 elif isinstance(child, tree.Node): 238 self.relabel_aliases(change_map, child) 239 elif isinstance(child, (list, tuple)): 240 if isinstance(child[0], (list, tuple)): 241 elt = list(child[0]) 242 if elt[0] in change_map: 243 elt[0] = change_map[elt[0]] 244 node.children[pos] = (tuple(elt),) + child[1:] 245 else: 246 child[0].relabel_aliases(change_map) 214 clone = self.__class__(None, self.negated) 215 if hasattr(self.data, 'clone'): 216 clone.data = self.data.clone() 217 218 else: 219 if hasattr(self.data[3], 'clone'): 220 new_data3 = self.data[3].clone() 221 else: 222 new_data3 = self.data[3] 223 clone.data = (self.data[0].clone(), self.data[1], self.data[2], new_data3) 224 return clone 225 226 def negate(self): 227 self.negated = not self.negated 247 228 248 # Check if the query value also requires relabelling249 if hasattr(child[3], 'relabel_aliases'):250 child[3].relabel_aliases(change_map)229 def __str__(self): 230 return "%s%s, %s, %s" % (self.negated and 'NOT: ' or '', 231 self.data[0], self.data[1], self.data[3]) 251 232 252 class EverythingNode(object):233 class WhereNode(tree.Node): 253 234 """ 254 A node that matches everything. 235 Used to represent the SQL where-clause. 236 237 The class is tied to the Query class that created it (in order to create 238 the correct SQL). 239 240 The children in this tree are usually either Q-like objects or lists of 241 [table_alias, field_name, db_type, lookup_type, value_annotation, 242 params]. However, a child could also be any class with as_sql() and 243 relabel_aliases() methods. 255 244 """ 256 245 257 def as_sql(self, qn=None, connection=None):258 raise FullResultSet246 default = AND 247 is_leaf = False 259 248 260 def relabel_aliases(self, change_map, node=None): 261 return 249 def leaf_class(cls): 250 # Subclass hook 251 return WhereLeaf 252 leaf_class = classmethod(leaf_class) 262 253 263 class NothingNode(object): 264 """ 265 A node that matches nothing. 266 """ 267 def as_sql(self, qn=None, connection=None): 268 raise EmptyResultSet 254 def clone_internal(self): 255 clone = self._new_instance() 256 clone.negated = self.negated; clone.connector = self.connector 257 clone.children = [c.is_leaf and c or c.clone() for c in self.children] 258 return clone 259 260 261 def final_prune(self, qn, connection): 262 """ 263 This will do the final pruning of the tree, that is, removing parts 264 of the tree that must match everything / nothing. 265 266 Due to the fact that the only way to get to know that is calling 267 as_sql(), we will at the same time turn the leaf nodes into sql. 268 """ 269 # There variables make sense only in the context of the final prune. 270 # There is no need to clone them, and there is no need to have them 271 # elsewhere. So, define them here instead of __init__. 272 self.match_all = False 273 self.match_nothing = False 274 for child in self.children[:]: 275 if child.is_leaf: 276 child.create_sql(qn, connection) 277 else: 278 child.final_prune(qn, connection) 279 if child.match_all: 280 if self.connector == OR: 281 self.match_all = True 282 break 283 self.children.remove(child) 284 if child.match_nothing: 285 if self.connector == AND: 286 self.match_nothing = True 287 break 288 self.children.remove(child) 289 else: 290 # We got through the loop without a break. Check if there are any 291 # children left. If not, this node must be a match_all node. 292 if not self.children: 293 self.match_all = True 294 if self.negated: 295 # If the node is negated, then turn the tables around. 296 self.match_all, self.match_nothing = self.match_nothing, self.match_all 297 298 def split_aggregates(self, having, parent=None): 299 """ 300 Remove those parts of self that must go into the having clause. Part 301 must go into having if: 302 - It is connected to parent with OR and the subtree contains 303 aggregate 304 - The node is a leaf node and it contains aggregate 305 """ 306 from django.conf import settings 307 if self.connector == OR: 308 if self.subtree_contains_aggregate(): 309 having.add(self, AND) 310 # Note that OR cannot be the highest node in the tree, a where 311 # tree must always contain AND as root, and as such parent 312 # can't be None here. 313 parent.children.remove(self) 314 else: 315 if self.negated: 316 # TODO: I believe this might be broken. If in fact it isn't, 317 # we need a comment why it isn't so. 318 neg_node = having._new_instance(negated=True) 319 having.add(neg_node, AND) 320 having = neg_node 321 for child in self.children[:]: 322 if child.is_leaf: 323 if child.subtree_contains_aggregate(): 324 having.add(child, AND) 325 self.children.remove(child) 326 else: 327 child.split_aggregates(having, self) 328 329 def subtree_contains_aggregate(self): 330 """ 331 Returns whether or not all elements of this q_object need to be put 332 together in the HAVING clause. 333 """ 334 for child in self.children: 335 if child.subtree_contains_aggregate(): 336 return True 337 return False 338 339 def as_sql(self): 340 """ 341 Turns this tree into SQL and params. It is assumed that leaf nodes are already 342 TODO: rename, and have as_sql implement the normal as_sql(qn, connection) 343 interface. 344 """ 345 if not self: 346 return '', [] 347 sql_snippets, params = [], [] 348 for child in self.children: 349 child_sql, child_params = child.as_sql() 350 sql_snippets.append(child_sql); params.extend(child_params) 351 352 conn = ' %s ' % self.connector 353 sql_string = conn.join(sql_snippets) 354 if self.negated and sql_string: 355 sql_string = 'NOT (%s)' % sql_string 356 elif len(self.children) != 1: 357 sql_string = '(%s)' % sql_string 358 return sql_string, params 359 360 def get_group_by(self, group_by): 361 for child in self.children: 362 child.get_group_by(group_by) 269 363 270 364 def relabel_aliases(self, change_map, node=None): 271 return 365 """ 366 Relabels the alias values of any children. 'change_map' is a dictionary 367 mapping old (current) alias values to the new values. 368 """ 369 for child in self.children: 370 child.relabel_aliases(change_map) 272 371 273 372 class ExtraWhere(object): 274 373 def __init__(self, sqls, params): 275 374 self.sqls = sqls 276 375 self.params = params 277 376 377 def relabel_aliases(self, change_map): 378 return 379 278 380 def as_sql(self, qn=None, connection=None): 279 381 return " AND ".join(self.sqls), tuple(self.params or ()) 280 382 383 def clone(self): 384 return self 385 281 386 class Constraint(object): 282 387 """ 283 388 An object that can be passed to WhereNode.add() and knows how to 284 389 pre-process itself prior to including in the WhereNode. 285 390 """ 391 286 392 def __init__(self, alias, col, field): 287 393 self.alias, self.col, self.field = alias, col, field 288 394 … … class Constraint(object): 318 424 def process(self, lookup_type, value, connection): 319 425 """ 320 426 Returns a tuple of data suitable for inclusion in a WhereNode 321 instance. 427 instance. Can raise ObjectDoesNotExist 322 428 """ 323 # Because of circular imports, we need to import this here. 324 from django.db.models.base import ObjectDoesNotExist 325 try: 326 if self.field: 327 params = self.field.get_db_prep_lookup(lookup_type, value, 328 connection=connection, prepared=True) 329 db_type = self.field.db_type(connection=connection) 330 else: 331 # This branch is used at times when we add a comparison to NULL 332 # (we don't really want to waste time looking up the associated 333 # field object at the calling location). 334 params = Field().get_db_prep_lookup(lookup_type, value, 335 connection=connection, prepared=True) 336 db_type = None 337 except ObjectDoesNotExist: 338 raise EmptyShortCircuit 339 429 if self.field: 430 params = self.field.get_db_prep_lookup(lookup_type, value, 431 connection=connection, prepared=True) 432 db_type = self.field.db_type(connection=connection) 433 else: 434 # This branch is used at times when we add a comparison to NULL 435 # (we don't really want to waste time looking up the associated 436 # field object at the calling location). 437 params = Field().get_db_prep_lookup(lookup_type, value, 438 connection=connection, prepared=True) 439 db_type = None 340 440 return (self.alias, self.col, db_type), params 341 441 342 442 def relabel_aliases(self, change_map): 343 443 if self.alias in change_map: 344 444 self.alias = change_map[self.alias] 445 446 def clone(self): 447 return Constraint(self.alias, self.col, self.field) 448 449 def __str__(self): 450 return "%s.%s" % (self.alias, self.col) -
django/utils/tree.py
diff --git a/django/utils/tree.py b/django/utils/tree.py index 36b5977..f733d1b 100644
a b class Node(object): 19 19 """ 20 20 Constructs a new Node. If no connector is given, the default will be 21 21 used. 22 23 Warning: You probably don't want to pass in the 'negated' parameter. It24 is NOT the same as constructing a node and calling negate() on the25 result.26 22 """ 27 23 self.children = children and children[:] or [] 28 24 self.connector = connector or self.default 29 self. subtree_parents = []25 self.parent = None 30 26 self.negated = negated 31 27 32 28 # We need this because of django.db.models.query_utils.Q. Q. __init__() is 33 29 # problematic, but it is a natural Node subclass in all other respects. 30 # The __init__ of Q has different signature, and thus _new_instance of Q 31 # does call Q's version of __init__. 34 32 def _new_instance(cls, children=None, connector=None, negated=False): 33 return cls(children, connector, negated) 34 _new_instance = classmethod(_new_instance) 35 36 def clone(self): 35 37 """ 36 This is called to create a new instance of this class when we need new 37 Nodes (or subclasses) in the internal code in this class. Normally, it 38 just shadows __init__(). However, subclasses with an __init__ signature 39 that is not an extension of Node.__init__ might need to implement this 40 method to allow a Node to create a new instance of them (if they have 41 any extra setting up to do). 38 Clones the internal nodes of the tree. If also_leafs is False, does 39 not copy leaf nodes. This is a useful optimization for WhereNode 40 because WhereLeaf nodes do not need copying except when relabel_aliases 41 is called. 42 42 """ 43 obj = Node(children, connector, negated) 44 obj.__class__ = cls 43 obj = self._new_instance() 44 obj.children = [ 45 isinstance(c, tuple) and c or c.clone() for c in self.children 46 ] 47 obj.connector = self.connector 48 obj.negated = self.negated 45 49 return obj 46 _new_instance = classmethod(_new_instance) 50 51 def __repr__(self): 52 return self.as_subtree 47 53 48 54 def __str__(self): 49 55 if self.negated: … … class Node(object): 52 58 return '(%s: %s)' % (self.connector, ', '.join([str(c) for c in 53 59 self.children])) 54 60 55 def __deepcopy__(self, memodict): 56 """ 57 Utility method used by copy.deepcopy(). 58 """ 59 obj = Node(connector=self.connector, negated=self.negated) 60 obj.__class__ = self.__class__ 61 obj.children = copy.deepcopy(self.children, memodict) 62 obj.subtree_parents = copy.deepcopy(self.subtree_parents, memodict) 63 return obj 61 def _as_subtree(self, indent=0): 62 buf = [] 63 if self.negated: 64 buf.append(" " * indent + "NOT") 65 buf.append((" " * indent) + self.connector + ":") 66 indent += 2 67 for child in self.children: 68 if isinstance(child, Node): 69 buf.append(child._as_subtree(indent=indent)) 70 else: 71 buf.append((" " * indent) + str(child)) 72 return "\n".join(buf) 73 as_subtree = property(_as_subtree) 64 74 65 75 def __len__(self): 66 76 """ … … class Node(object): 82 92 83 93 def add(self, node, conn_type): 84 94 """ 85 Adds a new node to the tree. If the conn_type is the same as the root's86 current connector type, the node is added to the first level.95 Adds a new node to the tree. If the conn_type is the same as the 96 root's current connector type, the node is added to the first level. 87 97 Otherwise, the whole tree is pushed down one level and a new root 88 connector is created, connecting the existing tree and the newnode.98 connector is created, connecting the existing tree and the added node. 89 99 """ 90 100 if node in self.children and conn_type == self.connector: 91 101 return 92 if len(self.children) < 2:93 self.connector = conn_type94 102 if self.connector == conn_type: 95 if isinstance(node, Node) and (node.connector == conn_type or 96 len(node) == 1): 97 self.children.extend(node.children) 98 else: 99 self.children.append(node) 103 self.children.append(node) 100 104 else: 101 obj = self._new_instance(self.children, self.connector, 102 self.negated) 103 self.connector = conn_type 104 self.children = [obj, node] 105 obj = self._new_instance([node], conn_type) 106 self.children.append(obj) 105 107 106 108 def negate(self): 107 109 """ 108 Negate the sense of the root connector. This reorganises the children 109 so that the current node has a single child: a negated node containing 110 all the previous children. This slightly odd construction makes adding 111 new children behave more intuitively. 112 113 Interpreting the meaning of this negate is up to client code. This 114 method is useful for implementing "not" arrangements. 115 """ 116 self.children = [self._new_instance(self.children, self.connector, 117 not self.negated)] 118 self.connector = self.default 119 120 def start_subtree(self, conn_type): 121 """ 122 Sets up internal state so that new nodes are added to a subtree of the 123 current node. The conn_type specifies how the sub-tree is joined to the 124 existing children. 125 """ 126 if len(self.children) == 1: 127 self.connector = conn_type 128 elif self.connector != conn_type: 129 self.children = [self._new_instance(self.children, self.connector, 130 self.negated)] 131 self.connector = conn_type 132 self.negated = False 133 134 self.subtree_parents.append(self.__class__(self.children, 135 self.connector, self.negated)) 136 self.connector = self.default 137 self.negated = False 138 self.children = [] 139 140 def end_subtree(self): 141 """ 142 Closes off the most recently unmatched start_subtree() call. 143 144 This puts the current state into a node of the parent tree and returns 145 the current instances state to be the parent. 146 """ 147 obj = self.subtree_parents.pop() 148 node = self.__class__(self.children, self.connector) 149 self.connector = obj.connector 150 self.negated = obj.negated 151 self.children = obj.children 152 self.children.append(node) 153 110 Negate the sense of this node. 111 """ 112 self.negated = not self.negated 113 114 def prune_tree(self): 115 """ 116 Removes empty children nodes, and non-necessary intermediatry 117 nodes from this node. 118 """ 119 for child in self.children[:]: 120 if not child: 121 self.children.remove(child) 122 elif not child.is_leaf: 123 child.prune_tree() 124 if len(child) == 1: 125 # There is no need for this node.we can prune internal 126 # nodes with just on child 127 grandchild = child.children[0] 128 if child.negated: 129 grandchild.negate() 130 self.children.remove(child) 131 self.children.append(grandchild) 132 elif not child: 133 self.children.remove(child) -
tests/regressiontests/aggregation_regress/tests.py
diff --git a/tests/regressiontests/aggregation_regress/tests.py b/tests/regressiontests/aggregation_regress/tests.py index acdc59a..badc1cb 100644
a b class AggregationTests(TestCase): 465 465 # Regression for #15709 - Ensure each group_by field only exists once 466 466 # per query 467 467 qs = Book.objects.values('publisher').annotate(max_pages=Max('pages')).order_by() 468 grouping, gb_params = qs.query.get_compiler(qs.db).get_grouping( )468 grouping, gb_params = qs.query.get_compiler(qs.db).get_grouping(set()) 469 469 self.assertEqual(len(grouping), 1) 470 470 471 471 def test_duplicate_alias(self): -
tests/regressiontests/queries/tests.py
diff --git a/tests/regressiontests/queries/tests.py b/tests/regressiontests/queries/tests.py index d8fd5bc..4f505a3 100644
a b class Queries1Tests(BaseQuerysetTest): 820 820 q = Note.objects.filter(Q(extrainfo__author=self.a1)|Q(extrainfo=xx)).query 821 821 self.assertEqual( 822 822 len([x[2] for x in q.alias_map.values() if x[2] == q.LOUTER and q.alias_refcount[x[1]]]), 823 1823 2 824 824 ) 825 825 826 826