subqueries.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285
  1. """
  2. Query subclasses which provide extra functionality beyond simple data retrieval.
  3. """
  4. from django.conf import settings
  5. from django.core.exceptions import FieldError
  6. from django.db import connections
  7. from django.db.models.query_utils import Q
  8. from django.db.models.constants import LOOKUP_SEP
  9. from django.db.models.fields import DateField, DateTimeField, FieldDoesNotExist
  10. from django.db.models.sql.constants import GET_ITERATOR_CHUNK_SIZE, NO_RESULTS, SelectInfo
  11. from django.db.models.sql.datastructures import Date, DateTime
  12. from django.db.models.sql.query import Query
  13. from django.utils import six
  14. from django.utils import timezone
  15. __all__ = ['DeleteQuery', 'UpdateQuery', 'InsertQuery', 'DateQuery',
  16. 'DateTimeQuery', 'AggregateQuery']
  17. class DeleteQuery(Query):
  18. """
  19. Delete queries are done through this class, since they are more constrained
  20. than general queries.
  21. """
  22. compiler = 'SQLDeleteCompiler'
  23. def do_query(self, table, where, using):
  24. self.tables = [table]
  25. self.where = where
  26. self.get_compiler(using).execute_sql(NO_RESULTS)
  27. def delete_batch(self, pk_list, using, field=None):
  28. """
  29. Set up and execute delete queries for all the objects in pk_list.
  30. More than one physical query may be executed if there are a
  31. lot of values in pk_list.
  32. """
  33. if not field:
  34. field = self.get_meta().pk
  35. for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
  36. self.where = self.where_class()
  37. self.add_q(Q(
  38. **{field.attname + '__in': pk_list[offset:offset + GET_ITERATOR_CHUNK_SIZE]}))
  39. self.do_query(self.get_meta().db_table, self.where, using=using)
  40. def delete_qs(self, query, using):
  41. """
  42. Delete the queryset in one SQL query (if possible). For simple queries
  43. this is done by copying the query.query.where to self.query, for
  44. complex queries by using subquery.
  45. """
  46. innerq = query.query
  47. # Make sure the inner query has at least one table in use.
  48. innerq.get_initial_alias()
  49. # The same for our new query.
  50. self.get_initial_alias()
  51. innerq_used_tables = [t for t in innerq.tables
  52. if innerq.alias_refcount[t]]
  53. if ((not innerq_used_tables or innerq_used_tables == self.tables)
  54. and not len(innerq.having)):
  55. # There is only the base table in use in the query, and there is
  56. # no aggregate filtering going on.
  57. self.where = innerq.where
  58. else:
  59. pk = query.model._meta.pk
  60. if not connections[using].features.update_can_self_select:
  61. # We can't do the delete using subquery.
  62. values = list(query.values_list('pk', flat=True))
  63. if not values:
  64. return
  65. self.delete_batch(values, using)
  66. return
  67. else:
  68. innerq.clear_select_clause()
  69. innerq.select = [
  70. SelectInfo((self.get_initial_alias(), pk.column), None)
  71. ]
  72. values = innerq
  73. self.where = self.where_class()
  74. self.add_q(Q(pk__in=values))
  75. self.get_compiler(using).execute_sql(NO_RESULTS)
  76. class UpdateQuery(Query):
  77. """
  78. Represents an "update" SQL query.
  79. """
  80. compiler = 'SQLUpdateCompiler'
  81. def __init__(self, *args, **kwargs):
  82. super(UpdateQuery, self).__init__(*args, **kwargs)
  83. self._setup_query()
  84. def _setup_query(self):
  85. """
  86. Runs on initialization and after cloning. Any attributes that would
  87. normally be set in __init__ should go in here, instead, so that they
  88. are also set up after a clone() call.
  89. """
  90. self.values = []
  91. self.related_ids = None
  92. if not hasattr(self, 'related_updates'):
  93. self.related_updates = {}
  94. def clone(self, klass=None, **kwargs):
  95. return super(UpdateQuery, self).clone(klass,
  96. related_updates=self.related_updates.copy(), **kwargs)
  97. def update_batch(self, pk_list, values, using):
  98. self.add_update_values(values)
  99. for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
  100. self.where = self.where_class()
  101. self.add_q(Q(pk__in=pk_list[offset: offset + GET_ITERATOR_CHUNK_SIZE]))
  102. self.get_compiler(using).execute_sql(NO_RESULTS)
  103. def add_update_values(self, values):
  104. """
  105. Convert a dictionary of field name to value mappings into an update
  106. query. This is the entry point for the public update() method on
  107. querysets.
  108. """
  109. values_seq = []
  110. for name, val in six.iteritems(values):
  111. field, model, direct, m2m = self.get_meta().get_field_by_name(name)
  112. if not direct or m2m:
  113. raise FieldError('Cannot update model field %r (only non-relations and foreign keys permitted).' % field)
  114. if model:
  115. self.add_related_update(model, field, val)
  116. continue
  117. values_seq.append((field, model, val))
  118. return self.add_update_fields(values_seq)
  119. def add_update_fields(self, values_seq):
  120. """
  121. Turn a sequence of (field, model, value) triples into an update query.
  122. Used by add_update_values() as well as the "fast" update path when
  123. saving models.
  124. """
  125. self.values.extend(values_seq)
  126. def add_related_update(self, model, field, value):
  127. """
  128. Adds (name, value) to an update query for an ancestor model.
  129. Updates are coalesced so that we only run one update query per ancestor.
  130. """
  131. self.related_updates.setdefault(model, []).append((field, None, value))
  132. def get_related_updates(self):
  133. """
  134. Returns a list of query objects: one for each update required to an
  135. ancestor model. Each query will have the same filtering conditions as
  136. the current query but will only update a single table.
  137. """
  138. if not self.related_updates:
  139. return []
  140. result = []
  141. for model, values in six.iteritems(self.related_updates):
  142. query = UpdateQuery(model)
  143. query.values = values
  144. if self.related_ids is not None:
  145. query.add_filter(('pk__in', self.related_ids))
  146. result.append(query)
  147. return result
  148. class InsertQuery(Query):
  149. compiler = 'SQLInsertCompiler'
  150. def __init__(self, *args, **kwargs):
  151. super(InsertQuery, self).__init__(*args, **kwargs)
  152. self.fields = []
  153. self.objs = []
  154. def clone(self, klass=None, **kwargs):
  155. extras = {
  156. 'fields': self.fields[:],
  157. 'objs': self.objs[:],
  158. 'raw': self.raw,
  159. }
  160. extras.update(kwargs)
  161. return super(InsertQuery, self).clone(klass, **extras)
  162. def insert_values(self, fields, objs, raw=False):
  163. """
  164. Set up the insert query from the 'insert_values' dictionary. The
  165. dictionary gives the model field names and their target values.
  166. If 'raw_values' is True, the values in the 'insert_values' dictionary
  167. are inserted directly into the query, rather than passed as SQL
  168. parameters. This provides a way to insert NULL and DEFAULT keywords
  169. into the query, for example.
  170. """
  171. self.fields = fields
  172. self.objs = objs
  173. self.raw = raw
  174. class DateQuery(Query):
  175. """
  176. A DateQuery is a normal query, except that it specifically selects a single
  177. date field. This requires some special handling when converting the results
  178. back to Python objects, so we put it in a separate class.
  179. """
  180. compiler = 'SQLDateCompiler'
  181. def add_select(self, field_name, lookup_type, order='ASC'):
  182. """
  183. Converts the query into an extraction query.
  184. """
  185. try:
  186. result = self.setup_joins(
  187. field_name.split(LOOKUP_SEP),
  188. self.get_meta(),
  189. self.get_initial_alias(),
  190. )
  191. except FieldError:
  192. raise FieldDoesNotExist("%s has no field named '%s'" % (
  193. self.get_meta().object_name, field_name
  194. ))
  195. field = result[0]
  196. self._check_field(field) # overridden in DateTimeQuery
  197. alias = result[3][-1]
  198. select = self._get_select((alias, field.column), lookup_type)
  199. self.clear_select_clause()
  200. self.select = [SelectInfo(select, None)]
  201. self.distinct = True
  202. self.order_by = [1] if order == 'ASC' else [-1]
  203. if field.null:
  204. self.add_filter(("%s__isnull" % field_name, False))
  205. def _check_field(self, field):
  206. assert isinstance(field, DateField), \
  207. "%r isn't a DateField." % field.name
  208. if settings.USE_TZ:
  209. assert not isinstance(field, DateTimeField), \
  210. "%r is a DateTimeField, not a DateField." % field.name
  211. def _get_select(self, col, lookup_type):
  212. return Date(col, lookup_type)
  213. class DateTimeQuery(DateQuery):
  214. """
  215. A DateTimeQuery is like a DateQuery but for a datetime field. If time zone
  216. support is active, the tzinfo attribute contains the time zone to use for
  217. converting the values before truncating them. Otherwise it's set to None.
  218. """
  219. compiler = 'SQLDateTimeCompiler'
  220. def clone(self, klass=None, memo=None, **kwargs):
  221. if 'tzinfo' not in kwargs and hasattr(self, 'tzinfo'):
  222. kwargs['tzinfo'] = self.tzinfo
  223. return super(DateTimeQuery, self).clone(klass, memo, **kwargs)
  224. def _check_field(self, field):
  225. assert isinstance(field, DateTimeField), \
  226. "%r isn't a DateTimeField." % field.name
  227. def _get_select(self, col, lookup_type):
  228. if self.tzinfo is None:
  229. tzname = None
  230. else:
  231. tzname = timezone._get_timezone_name(self.tzinfo)
  232. return DateTime(col, lookup_type, tzname)
  233. class AggregateQuery(Query):
  234. """
  235. An AggregateQuery takes another query as a parameter to the FROM
  236. clause and only selects the elements in the provided list.
  237. """
  238. compiler = 'SQLAggregateCompiler'
  239. def add_subquery(self, query, using):
  240. self.subquery, self.sub_params = query.get_compiler(using).as_sql(with_col_aliases=True)