You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

219 lines
7.8 KiB

  1. """
  2. Query subclasses which provide extra functionality beyond simple data retrieval.
  3. """
  4. from django.core.exceptions import FieldError
  5. from django.db import connections
  6. from django.db.models.query_utils import Q
  7. from django.db.models.sql.constants import (
  8. CURSOR, GET_ITERATOR_CHUNK_SIZE, NO_RESULTS,
  9. )
  10. from django.db.models.sql.query import Query
  11. from django.utils import six
  12. __all__ = ['DeleteQuery', 'UpdateQuery', 'InsertQuery', 'AggregateQuery']
  13. class DeleteQuery(Query):
  14. """
  15. Delete queries are done through this class, since they are more constrained
  16. than general queries.
  17. """
  18. compiler = 'SQLDeleteCompiler'
  19. def do_query(self, table, where, using):
  20. self.tables = [table]
  21. self.where = where
  22. cursor = self.get_compiler(using).execute_sql(CURSOR)
  23. return cursor.rowcount if cursor else 0
  24. def delete_batch(self, pk_list, using, field=None):
  25. """
  26. Set up and execute delete queries for all the objects in pk_list.
  27. More than one physical query may be executed if there are a
  28. lot of values in pk_list.
  29. """
  30. # number of objects deleted
  31. num_deleted = 0
  32. if not field:
  33. field = self.get_meta().pk
  34. for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
  35. self.where = self.where_class()
  36. self.add_q(Q(
  37. **{field.attname + '__in': pk_list[offset:offset + GET_ITERATOR_CHUNK_SIZE]}))
  38. num_deleted += self.do_query(self.get_meta().db_table, self.where, using=using)
  39. return num_deleted
  40. def delete_qs(self, query, using):
  41. """
  42. Delete the queryset in one SQL query (if possible). For simple queries
  43. this is done by copying the query.query.where to self.query, for
  44. complex queries by using subquery.
  45. """
  46. innerq = query.query
  47. # Make sure the inner query has at least one table in use.
  48. innerq.get_initial_alias()
  49. # The same for our new query.
  50. self.get_initial_alias()
  51. innerq_used_tables = [t for t in innerq.tables
  52. if innerq.alias_refcount[t]]
  53. if not innerq_used_tables or innerq_used_tables == self.tables:
  54. # There is only the base table in use in the query.
  55. self.where = innerq.where
  56. else:
  57. pk = query.model._meta.pk
  58. if not connections[using].features.update_can_self_select:
  59. # We can't do the delete using subquery.
  60. values = list(query.values_list('pk', flat=True))
  61. if not values:
  62. return 0
  63. return self.delete_batch(values, using)
  64. else:
  65. innerq.clear_select_clause()
  66. innerq.select = [
  67. pk.get_col(self.get_initial_alias())
  68. ]
  69. values = innerq
  70. self.where = self.where_class()
  71. self.add_q(Q(pk__in=values))
  72. cursor = self.get_compiler(using).execute_sql(CURSOR)
  73. return cursor.rowcount if cursor else 0
  74. class UpdateQuery(Query):
  75. """
  76. Represents an "update" SQL query.
  77. """
  78. compiler = 'SQLUpdateCompiler'
  79. def __init__(self, *args, **kwargs):
  80. super(UpdateQuery, self).__init__(*args, **kwargs)
  81. self._setup_query()
  82. def _setup_query(self):
  83. """
  84. Runs on initialization and after cloning. Any attributes that would
  85. normally be set in __init__ should go in here, instead, so that they
  86. are also set up after a clone() call.
  87. """
  88. self.values = []
  89. self.related_ids = None
  90. if not hasattr(self, 'related_updates'):
  91. self.related_updates = {}
  92. def clone(self, klass=None, **kwargs):
  93. return super(UpdateQuery, self).clone(klass,
  94. related_updates=self.related_updates.copy(), **kwargs)
  95. def update_batch(self, pk_list, values, using):
  96. self.add_update_values(values)
  97. for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
  98. self.where = self.where_class()
  99. self.add_q(Q(pk__in=pk_list[offset: offset + GET_ITERATOR_CHUNK_SIZE]))
  100. self.get_compiler(using).execute_sql(NO_RESULTS)
  101. def add_update_values(self, values):
  102. """
  103. Convert a dictionary of field name to value mappings into an update
  104. query. This is the entry point for the public update() method on
  105. querysets.
  106. """
  107. values_seq = []
  108. for name, val in six.iteritems(values):
  109. field = self.get_meta().get_field(name)
  110. direct = not (field.auto_created and not field.concrete) or not field.concrete
  111. model = field.model._meta.concrete_model
  112. if not direct or (field.is_relation and field.many_to_many):
  113. raise FieldError(
  114. 'Cannot update model field %r (only non-relations and '
  115. 'foreign keys permitted).' % field
  116. )
  117. if model is not self.get_meta().model:
  118. self.add_related_update(model, field, val)
  119. continue
  120. values_seq.append((field, model, val))
  121. return self.add_update_fields(values_seq)
  122. def add_update_fields(self, values_seq):
  123. """
  124. Append a sequence of (field, model, value) triples to the internal list
  125. that will be used to generate the UPDATE query. Might be more usefully
  126. called add_update_targets() to hint at the extra information here.
  127. """
  128. self.values.extend(values_seq)
  129. def add_related_update(self, model, field, value):
  130. """
  131. Adds (name, value) to an update query for an ancestor model.
  132. Updates are coalesced so that we only run one update query per ancestor.
  133. """
  134. self.related_updates.setdefault(model, []).append((field, None, value))
  135. def get_related_updates(self):
  136. """
  137. Returns a list of query objects: one for each update required to an
  138. ancestor model. Each query will have the same filtering conditions as
  139. the current query but will only update a single table.
  140. """
  141. if not self.related_updates:
  142. return []
  143. result = []
  144. for model, values in six.iteritems(self.related_updates):
  145. query = UpdateQuery(model)
  146. query.values = values
  147. if self.related_ids is not None:
  148. query.add_filter(('pk__in', self.related_ids))
  149. result.append(query)
  150. return result
  151. class InsertQuery(Query):
  152. compiler = 'SQLInsertCompiler'
  153. def __init__(self, *args, **kwargs):
  154. super(InsertQuery, self).__init__(*args, **kwargs)
  155. self.fields = []
  156. self.objs = []
  157. def clone(self, klass=None, **kwargs):
  158. extras = {
  159. 'fields': self.fields[:],
  160. 'objs': self.objs[:],
  161. 'raw': self.raw,
  162. }
  163. extras.update(kwargs)
  164. return super(InsertQuery, self).clone(klass, **extras)
  165. def insert_values(self, fields, objs, raw=False):
  166. """
  167. Set up the insert query from the 'insert_values' dictionary. The
  168. dictionary gives the model field names and their target values.
  169. If 'raw_values' is True, the values in the 'insert_values' dictionary
  170. are inserted directly into the query, rather than passed as SQL
  171. parameters. This provides a way to insert NULL and DEFAULT keywords
  172. into the query, for example.
  173. """
  174. self.fields = fields
  175. self.objs = objs
  176. self.raw = raw
  177. class AggregateQuery(Query):
  178. """
  179. An AggregateQuery takes another query as a parameter to the FROM
  180. clause and only selects the elements in the provided list.
  181. """
  182. compiler = 'SQLAggregateCompiler'
  183. def add_subquery(self, query, using):
  184. self.subquery, self.sub_params = query.get_compiler(using).as_sql(
  185. with_col_aliases=True,
  186. subquery=True,
  187. )