query.py 76 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928
  1. """
  2. The main QuerySet implementation. This provides the public API for the ORM.
  3. """
  4. from collections import deque
  5. import copy
  6. import sys
  7. from django.conf import settings
  8. from django.core import exceptions
  9. from django.db import connections, router, transaction, IntegrityError
  10. from django.db.models.constants import LOOKUP_SEP
  11. from django.db.models.fields import AutoField, Empty
  12. from django.db.models.query_utils import (Q, select_related_descend,
  13. deferred_class_factory, InvalidQuery)
  14. from django.db.models.deletion import Collector
  15. from django.db.models.sql.constants import CURSOR
  16. from django.db.models import sql
  17. from django.utils.functional import partition
  18. from django.utils import six
  19. from django.utils import timezone
  20. # The maximum number (one less than the max to be precise) of results to fetch
  21. # in a get() query
  22. MAX_GET_RESULTS = 20
  23. # The maximum number of items to display in a QuerySet.__repr__
  24. REPR_OUTPUT_SIZE = 20
  25. # Pull into this namespace for backwards compatibility.
  26. EmptyResultSet = sql.EmptyResultSet
  27. def _pickle_queryset(class_bases, class_dict):
  28. """
  29. Used by `__reduce__` to create the initial version of the `QuerySet` class
  30. onto which the output of `__getstate__` will be applied.
  31. See `__reduce__` for more details.
  32. """
  33. new = Empty()
  34. new.__class__ = type(class_bases[0].__name__, class_bases, class_dict)
  35. return new
  36. class QuerySet(object):
  37. """
  38. Represents a lazy database lookup for a set of objects.
  39. """
  40. def __init__(self, model=None, query=None, using=None, hints=None):
  41. self.model = model
  42. self._db = using
  43. self._hints = hints or {}
  44. self.query = query or sql.Query(self.model)
  45. self._result_cache = None
  46. self._sticky_filter = False
  47. self._for_write = False
  48. self._prefetch_related_lookups = []
  49. self._prefetch_done = False
  50. self._known_related_objects = {} # {rel_field, {pk: rel_obj}}
  51. def as_manager(cls):
  52. # Address the circular dependency between `Queryset` and `Manager`.
  53. from django.db.models.manager import Manager
  54. return Manager.from_queryset(cls)()
  55. as_manager.queryset_only = True
  56. as_manager = classmethod(as_manager)
  57. ########################
  58. # PYTHON MAGIC METHODS #
  59. ########################
  60. def __deepcopy__(self, memo):
  61. """
  62. Deep copy of a QuerySet doesn't populate the cache
  63. """
  64. obj = self.__class__()
  65. for k, v in self.__dict__.items():
  66. if k == '_result_cache':
  67. obj.__dict__[k] = None
  68. else:
  69. obj.__dict__[k] = copy.deepcopy(v, memo)
  70. return obj
  71. def __getstate__(self):
  72. """
  73. Allows the QuerySet to be pickled.
  74. """
  75. # Force the cache to be fully populated.
  76. self._fetch_all()
  77. obj_dict = self.__dict__.copy()
  78. return obj_dict
  79. def __reduce__(self):
  80. """
  81. Used by pickle to deal with the types that we create dynamically when
  82. specialized queryset such as `ValuesQuerySet` are used in conjunction
  83. with querysets that are *subclasses* of `QuerySet`.
  84. See `_clone` implementation for more details.
  85. """
  86. if hasattr(self, '_specialized_queryset_class'):
  87. class_bases = (
  88. self._specialized_queryset_class,
  89. self._base_queryset_class,
  90. )
  91. class_dict = {
  92. '_specialized_queryset_class': self._specialized_queryset_class,
  93. '_base_queryset_class': self._base_queryset_class,
  94. }
  95. return _pickle_queryset, (class_bases, class_dict), self.__getstate__()
  96. return super(QuerySet, self).__reduce__()
  97. def __repr__(self):
  98. data = list(self[:REPR_OUTPUT_SIZE + 1])
  99. if len(data) > REPR_OUTPUT_SIZE:
  100. data[-1] = "...(remaining elements truncated)..."
  101. return repr(data)
  102. def __len__(self):
  103. self._fetch_all()
  104. return len(self._result_cache)
  105. def __iter__(self):
  106. """
  107. The queryset iterator protocol uses three nested iterators in the
  108. default case:
  109. 1. sql.compiler:execute_sql()
  110. - Returns 100 rows at time (constants.GET_ITERATOR_CHUNK_SIZE)
  111. using cursor.fetchmany(). This part is responsible for
  112. doing some column masking, and returning the rows in chunks.
  113. 2. sql/compiler.results_iter()
  114. - Returns one row at time. At this point the rows are still just
  115. tuples. In some cases the return values are converted to
  116. Python values at this location (see resolve_columns(),
  117. resolve_aggregate()).
  118. 3. self.iterator()
  119. - Responsible for turning the rows into model objects.
  120. """
  121. self._fetch_all()
  122. return iter(self._result_cache)
  123. def __nonzero__(self):
  124. self._fetch_all()
  125. return bool(self._result_cache)
  126. def __getitem__(self, k):
  127. """
  128. Retrieves an item or slice from the set of results.
  129. """
  130. if not isinstance(k, (slice,) + six.integer_types):
  131. raise TypeError
  132. assert ((not isinstance(k, slice) and (k >= 0)) or
  133. (isinstance(k, slice) and (k.start is None or k.start >= 0) and
  134. (k.stop is None or k.stop >= 0))), \
  135. "Negative indexing is not supported."
  136. if self._result_cache is not None:
  137. return self._result_cache[k]
  138. if isinstance(k, slice):
  139. qs = self._clone()
  140. if k.start is not None:
  141. start = int(k.start)
  142. else:
  143. start = None
  144. if k.stop is not None:
  145. stop = int(k.stop)
  146. else:
  147. stop = None
  148. qs.query.set_limits(start, stop)
  149. return list(qs)[::k.step] if k.step else qs
  150. qs = self._clone()
  151. qs.query.set_limits(k, k + 1)
  152. return list(qs)[0]
  153. def __and__(self, other):
  154. self._merge_sanity_check(other)
  155. if isinstance(other, EmptyQuerySet):
  156. return other
  157. if isinstance(self, EmptyQuerySet):
  158. return self
  159. combined = self._clone()
  160. combined._merge_known_related_objects(other)
  161. combined.query.combine(other.query, sql.AND)
  162. return combined
  163. def __or__(self, other):
  164. self._merge_sanity_check(other)
  165. if isinstance(self, EmptyQuerySet):
  166. return other
  167. if isinstance(other, EmptyQuerySet):
  168. return self
  169. combined = self._clone()
  170. combined._merge_known_related_objects(other)
  171. combined.query.combine(other.query, sql.OR)
  172. return combined
  173. ####################################
  174. # METHODS THAT DO DATABASE QUERIES #
  175. ####################################
  176. def iterator(self):
  177. """
  178. An iterator over the results from applying this QuerySet to the
  179. database.
  180. """
  181. fill_cache = False
  182. if connections[self.db].features.supports_select_related:
  183. fill_cache = self.query.select_related
  184. if isinstance(fill_cache, dict):
  185. requested = fill_cache
  186. else:
  187. requested = None
  188. max_depth = self.query.max_depth
  189. extra_select = list(self.query.extra_select)
  190. aggregate_select = list(self.query.aggregate_select)
  191. only_load = self.query.get_loaded_field_names()
  192. if not fill_cache:
  193. fields = self.model._meta.concrete_fields
  194. load_fields = []
  195. # If only/defer clauses have been specified,
  196. # build the list of fields that are to be loaded.
  197. if only_load:
  198. for field, model in self.model._meta.get_concrete_fields_with_model():
  199. if model is None:
  200. model = self.model
  201. try:
  202. if field.name in only_load[model]:
  203. # Add a field that has been explicitly included
  204. load_fields.append(field.name)
  205. except KeyError:
  206. # Model wasn't explicitly listed in the only_load table
  207. # Therefore, we need to load all fields from this model
  208. load_fields.append(field.name)
  209. index_start = len(extra_select)
  210. aggregate_start = index_start + len(load_fields or self.model._meta.concrete_fields)
  211. skip = None
  212. if load_fields and not fill_cache:
  213. # Some fields have been deferred, so we have to initialize
  214. # via keyword arguments.
  215. skip = set()
  216. init_list = []
  217. for field in fields:
  218. if field.name not in load_fields:
  219. skip.add(field.attname)
  220. else:
  221. init_list.append(field.attname)
  222. model_cls = deferred_class_factory(self.model, skip)
  223. # Cache db and model outside the loop
  224. db = self.db
  225. model = self.model
  226. compiler = self.query.get_compiler(using=db)
  227. if fill_cache:
  228. klass_info = get_klass_info(model, max_depth=max_depth,
  229. requested=requested, only_load=only_load)
  230. for row in compiler.results_iter():
  231. if fill_cache:
  232. obj, _ = get_cached_row(row, index_start, db, klass_info,
  233. offset=len(aggregate_select))
  234. else:
  235. # Omit aggregates in object creation.
  236. row_data = row[index_start:aggregate_start]
  237. if skip:
  238. obj = model_cls(**dict(zip(init_list, row_data)))
  239. else:
  240. obj = model(*row_data)
  241. # Store the source database of the object
  242. obj._state.db = db
  243. # This object came from the database; it's not being added.
  244. obj._state.adding = False
  245. if extra_select:
  246. for i, k in enumerate(extra_select):
  247. setattr(obj, k, row[i])
  248. # Add the aggregates to the model
  249. if aggregate_select:
  250. for i, aggregate in enumerate(aggregate_select):
  251. setattr(obj, aggregate, row[i + aggregate_start])
  252. # Add the known related objects to the model, if there are any
  253. if self._known_related_objects:
  254. for field, rel_objs in self._known_related_objects.items():
  255. # Avoid overwriting objects loaded e.g. by select_related
  256. if hasattr(obj, field.get_cache_name()):
  257. continue
  258. pk = getattr(obj, field.get_attname())
  259. try:
  260. rel_obj = rel_objs[pk]
  261. except KeyError:
  262. pass # may happen in qs1 | qs2 scenarios
  263. else:
  264. setattr(obj, field.name, rel_obj)
  265. yield obj
  266. def aggregate(self, *args, **kwargs):
  267. """
  268. Returns a dictionary containing the calculations (aggregation)
  269. over the current queryset
  270. If args is present the expression is passed as a kwarg using
  271. the Aggregate object's default alias.
  272. """
  273. if self.query.distinct_fields:
  274. raise NotImplementedError("aggregate() + distinct(fields) not implemented.")
  275. for arg in args:
  276. kwargs[arg.default_alias] = arg
  277. query = self.query.clone()
  278. force_subq = query.low_mark != 0 or query.high_mark is not None
  279. for (alias, aggregate_expr) in kwargs.items():
  280. query.add_aggregate(aggregate_expr, self.model, alias,
  281. is_summary=True)
  282. return query.get_aggregation(using=self.db, force_subq=force_subq)
  283. def count(self):
  284. """
  285. Performs a SELECT COUNT() and returns the number of records as an
  286. integer.
  287. If the QuerySet is already fully cached this simply returns the length
  288. of the cached results set to avoid multiple SELECT COUNT(*) calls.
  289. """
  290. if self._result_cache is not None:
  291. return len(self._result_cache)
  292. return self.query.get_count(using=self.db)
  293. def get(self, *args, **kwargs):
  294. """
  295. Performs the query and returns a single object matching the given
  296. keyword arguments.
  297. """
  298. clone = self.filter(*args, **kwargs)
  299. if self.query.can_filter():
  300. clone = clone.order_by()
  301. if (not clone.query.select_for_update or
  302. connections[self.db].features.supports_select_for_update_with_limit):
  303. clone = clone[:MAX_GET_RESULTS + 1]
  304. num = len(clone)
  305. if num == 1:
  306. return clone._result_cache[0]
  307. if not num:
  308. raise self.model.DoesNotExist(
  309. "%s matching query does not exist." %
  310. self.model._meta.object_name)
  311. raise self.model.MultipleObjectsReturned(
  312. "get() returned more than one %s -- it returned %s!" % (
  313. self.model._meta.object_name,
  314. num if num <= MAX_GET_RESULTS else 'more than %s' % MAX_GET_RESULTS
  315. )
  316. )
  317. def create(self, **kwargs):
  318. """
  319. Creates a new object with the given kwargs, saving it to the database
  320. and returning the created object.
  321. """
  322. obj = self.model(**kwargs)
  323. self._for_write = True
  324. obj.save(force_insert=True, using=self.db)
  325. return obj
  326. def bulk_create(self, objs, batch_size=None):
  327. """
  328. Inserts each of the instances into the database. This does *not* call
  329. save() on each of the instances, does not send any pre/post save
  330. signals, and does not set the primary key attribute if it is an
  331. autoincrement field.
  332. """
  333. # So this case is fun. When you bulk insert you don't get the primary
  334. # keys back (if it's an autoincrement), so you can't insert into the
  335. # child tables which references this. There are two workarounds, 1)
  336. # this could be implemented if you didn't have an autoincrement pk,
  337. # and 2) you could do it by doing O(n) normal inserts into the parent
  338. # tables to get the primary keys back, and then doing a single bulk
  339. # insert into the childmost table. Some databases might allow doing
  340. # this by using RETURNING clause for the insert query. We're punting
  341. # on these for now because they are relatively rare cases.
  342. assert batch_size is None or batch_size > 0
  343. if self.model._meta.parents:
  344. raise ValueError("Can't bulk create an inherited model")
  345. if not objs:
  346. return objs
  347. self._for_write = True
  348. connection = connections[self.db]
  349. fields = self.model._meta.local_concrete_fields
  350. with transaction.commit_on_success_unless_managed(using=self.db):
  351. if (connection.features.can_combine_inserts_with_and_without_auto_increment_pk
  352. and self.model._meta.has_auto_field):
  353. self._batched_insert(objs, fields, batch_size)
  354. else:
  355. objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
  356. if objs_with_pk:
  357. self._batched_insert(objs_with_pk, fields, batch_size)
  358. if objs_without_pk:
  359. fields = [f for f in fields if not isinstance(f, AutoField)]
  360. self._batched_insert(objs_without_pk, fields, batch_size)
  361. return objs
  362. def get_or_create(self, defaults=None, **kwargs):
  363. """
  364. Looks up an object with the given kwargs, creating one if necessary.
  365. Returns a tuple of (object, created), where created is a boolean
  366. specifying whether an object was created.
  367. """
  368. lookup, params = self._extract_model_params(defaults, **kwargs)
  369. self._for_write = True
  370. try:
  371. return self.get(**lookup), False
  372. except self.model.DoesNotExist:
  373. return self._create_object_from_params(lookup, params)
  374. def update_or_create(self, defaults=None, **kwargs):
  375. """
  376. Looks up an object with the given kwargs, updating one with defaults
  377. if it exists, otherwise creates a new one.
  378. Returns a tuple (object, created), where created is a boolean
  379. specifying whether an object was created.
  380. """
  381. defaults = defaults or {}
  382. lookup, params = self._extract_model_params(defaults, **kwargs)
  383. self._for_write = True
  384. try:
  385. obj = self.get(**lookup)
  386. except self.model.DoesNotExist:
  387. obj, created = self._create_object_from_params(lookup, params)
  388. if created:
  389. return obj, created
  390. for k, v in six.iteritems(defaults):
  391. setattr(obj, k, v)
  392. with transaction.atomic(using=self.db):
  393. obj.save(using=self.db)
  394. return obj, False
  395. def _create_object_from_params(self, lookup, params):
  396. """
  397. Tries to create an object using passed params.
  398. Used by get_or_create and update_or_create
  399. """
  400. obj = self.model(**params)
  401. try:
  402. with transaction.atomic(using=self.db):
  403. obj.save(force_insert=True, using=self.db)
  404. return obj, True
  405. except IntegrityError:
  406. exc_info = sys.exc_info()
  407. try:
  408. return self.get(**lookup), False
  409. except self.model.DoesNotExist:
  410. pass
  411. six.reraise(*exc_info)
  412. def _extract_model_params(self, defaults, **kwargs):
  413. """
  414. Prepares `lookup` (kwargs that are valid model attributes), `params`
  415. (for creating a model instance) based on given kwargs; for use by
  416. get_or_create and update_or_create.
  417. """
  418. defaults = defaults or {}
  419. lookup = kwargs.copy()
  420. for f in self.model._meta.fields:
  421. if f.attname in lookup:
  422. lookup[f.name] = lookup.pop(f.attname)
  423. params = dict((k, v) for k, v in kwargs.items() if LOOKUP_SEP not in k)
  424. params.update(defaults)
  425. return lookup, params
  426. def _earliest_or_latest(self, field_name=None, direction="-"):
  427. """
  428. Returns the latest object, according to the model's
  429. 'get_latest_by' option or optional given field_name.
  430. """
  431. order_by = field_name or getattr(self.model._meta, 'get_latest_by')
  432. assert bool(order_by), "earliest() and latest() require either a "\
  433. "field_name parameter or 'get_latest_by' in the model"
  434. assert self.query.can_filter(), \
  435. "Cannot change a query once a slice has been taken."
  436. obj = self._clone()
  437. obj.query.set_limits(high=1)
  438. obj.query.clear_ordering(force_empty=True)
  439. obj.query.add_ordering('%s%s' % (direction, order_by))
  440. return obj.get()
  441. def earliest(self, field_name=None):
  442. return self._earliest_or_latest(field_name=field_name, direction="")
  443. def latest(self, field_name=None):
  444. return self._earliest_or_latest(field_name=field_name, direction="-")
  445. def first(self):
  446. """
  447. Returns the first object of a query, returns None if no match is found.
  448. """
  449. qs = self if self.ordered else self.order_by('pk')
  450. try:
  451. return qs[0]
  452. except IndexError:
  453. return None
  454. def last(self):
  455. """
  456. Returns the last object of a query, returns None if no match is found.
  457. """
  458. qs = self.reverse() if self.ordered else self.order_by('-pk')
  459. try:
  460. return qs[0]
  461. except IndexError:
  462. return None
  463. def in_bulk(self, id_list):
  464. """
  465. Returns a dictionary mapping each of the given IDs to the object with
  466. that ID.
  467. """
  468. assert self.query.can_filter(), \
  469. "Cannot use 'limit' or 'offset' with in_bulk"
  470. if not id_list:
  471. return {}
  472. qs = self.filter(pk__in=id_list).order_by()
  473. return dict((obj._get_pk_val(), obj) for obj in qs)
  474. def delete(self):
  475. """
  476. Deletes the records in the current QuerySet.
  477. """
  478. assert self.query.can_filter(), \
  479. "Cannot use 'limit' or 'offset' with delete."
  480. del_query = self._clone()
  481. # The delete is actually 2 queries - one to find related objects,
  482. # and one to delete. Make sure that the discovery of related
  483. # objects is performed on the same database as the deletion.
  484. del_query._for_write = True
  485. # Disable non-supported fields.
  486. del_query.query.select_for_update = False
  487. del_query.query.select_related = False
  488. del_query.query.clear_ordering(force_empty=True)
  489. collector = Collector(using=del_query.db)
  490. collector.collect(del_query)
  491. collector.delete()
  492. # Clear the result cache, in case this QuerySet gets reused.
  493. self._result_cache = None
  494. delete.alters_data = True
  495. delete.queryset_only = True
  496. def _raw_delete(self, using):
  497. """
  498. Deletes objects found from the given queryset in single direct SQL
  499. query. No signals are sent, and there is no protection for cascades.
  500. """
  501. sql.DeleteQuery(self.model).delete_qs(self, using)
  502. _raw_delete.alters_data = True
  503. def update(self, **kwargs):
  504. """
  505. Updates all elements in the current QuerySet, setting all the given
  506. fields to the appropriate values.
  507. """
  508. assert self.query.can_filter(), \
  509. "Cannot update a query once a slice has been taken."
  510. self._for_write = True
  511. query = self.query.clone(sql.UpdateQuery)
  512. query.add_update_values(kwargs)
  513. with transaction.commit_on_success_unless_managed(using=self.db):
  514. rows = query.get_compiler(self.db).execute_sql(CURSOR)
  515. self._result_cache = None
  516. return rows
  517. update.alters_data = True
  518. def _update(self, values):
  519. """
  520. A version of update that accepts field objects instead of field names.
  521. Used primarily for model saving and not intended for use by general
  522. code (it requires too much poking around at model internals to be
  523. useful at that level).
  524. """
  525. assert self.query.can_filter(), \
  526. "Cannot update a query once a slice has been taken."
  527. query = self.query.clone(sql.UpdateQuery)
  528. query.add_update_fields(values)
  529. self._result_cache = None
  530. return query.get_compiler(self.db).execute_sql(CURSOR)
  531. _update.alters_data = True
  532. _update.queryset_only = False
  533. def exists(self):
  534. if self._result_cache is None:
  535. return self.query.has_results(using=self.db)
  536. return bool(self._result_cache)
  537. def _prefetch_related_objects(self):
  538. # This method can only be called once the result cache has been filled.
  539. prefetch_related_objects(self._result_cache, self._prefetch_related_lookups)
  540. self._prefetch_done = True
  541. ##################################################
  542. # PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS #
  543. ##################################################
  544. def raw(self, raw_query, params=None, translations=None, using=None):
  545. if using is None:
  546. using = self.db
  547. return RawQuerySet(raw_query, model=self.model,
  548. params=params, translations=translations,
  549. using=using)
  550. def values(self, *fields):
  551. return self._clone(klass=ValuesQuerySet, setup=True, _fields=fields)
  552. def values_list(self, *fields, **kwargs):
  553. flat = kwargs.pop('flat', False)
  554. if kwargs:
  555. raise TypeError('Unexpected keyword arguments to values_list: %s'
  556. % (list(kwargs),))
  557. if flat and len(fields) > 1:
  558. raise TypeError("'flat' is not valid when values_list is called with more than one field.")
  559. return self._clone(klass=ValuesListQuerySet, setup=True, flat=flat,
  560. _fields=fields)
  561. def dates(self, field_name, kind, order='ASC'):
  562. """
  563. Returns a list of date objects representing all available dates for
  564. the given field_name, scoped to 'kind'.
  565. """
  566. assert kind in ("year", "month", "day"), \
  567. "'kind' must be one of 'year', 'month' or 'day'."
  568. assert order in ('ASC', 'DESC'), \
  569. "'order' must be either 'ASC' or 'DESC'."
  570. return self._clone(klass=DateQuerySet, setup=True,
  571. _field_name=field_name, _kind=kind, _order=order)
  572. def datetimes(self, field_name, kind, order='ASC', tzinfo=None):
  573. """
  574. Returns a list of datetime objects representing all available
  575. datetimes for the given field_name, scoped to 'kind'.
  576. """
  577. assert kind in ("year", "month", "day", "hour", "minute", "second"), \
  578. "'kind' must be one of 'year', 'month', 'day', 'hour', 'minute' or 'second'."
  579. assert order in ('ASC', 'DESC'), \
  580. "'order' must be either 'ASC' or 'DESC'."
  581. if settings.USE_TZ:
  582. if tzinfo is None:
  583. tzinfo = timezone.get_current_timezone()
  584. else:
  585. tzinfo = None
  586. return self._clone(klass=DateTimeQuerySet, setup=True,
  587. _field_name=field_name, _kind=kind, _order=order, _tzinfo=tzinfo)
  588. def none(self):
  589. """
  590. Returns an empty QuerySet.
  591. """
  592. clone = self._clone()
  593. clone.query.set_empty()
  594. return clone
  595. ##################################################################
  596. # PUBLIC METHODS THAT ALTER ATTRIBUTES AND RETURN A NEW QUERYSET #
  597. ##################################################################
  598. def all(self):
  599. """
  600. Returns a new QuerySet that is a copy of the current one. This allows a
  601. QuerySet to proxy for a model manager in some cases.
  602. """
  603. return self._clone()
  604. def filter(self, *args, **kwargs):
  605. """
  606. Returns a new QuerySet instance with the args ANDed to the existing
  607. set.
  608. """
  609. return self._filter_or_exclude(False, *args, **kwargs)
  610. def exclude(self, *args, **kwargs):
  611. """
  612. Returns a new QuerySet instance with NOT (args) ANDed to the existing
  613. set.
  614. """
  615. return self._filter_or_exclude(True, *args, **kwargs)
  616. def _filter_or_exclude(self, negate, *args, **kwargs):
  617. if args or kwargs:
  618. assert self.query.can_filter(), \
  619. "Cannot filter a query once a slice has been taken."
  620. clone = self._clone()
  621. if negate:
  622. clone.query.add_q(~Q(*args, **kwargs))
  623. else:
  624. clone.query.add_q(Q(*args, **kwargs))
  625. return clone
  626. def complex_filter(self, filter_obj):
  627. """
  628. Returns a new QuerySet instance with filter_obj added to the filters.
  629. filter_obj can be a Q object (or anything with an add_to_query()
  630. method) or a dictionary of keyword lookup arguments.
  631. This exists to support framework features such as 'limit_choices_to',
  632. and usually it will be more natural to use other methods.
  633. """
  634. if isinstance(filter_obj, Q) or hasattr(filter_obj, 'add_to_query'):
  635. clone = self._clone()
  636. clone.query.add_q(filter_obj)
  637. return clone
  638. else:
  639. return self._filter_or_exclude(None, **filter_obj)
  640. def select_for_update(self, nowait=False):
  641. """
  642. Returns a new QuerySet instance that will select objects with a
  643. FOR UPDATE lock.
  644. """
  645. obj = self._clone()
  646. obj._for_write = True
  647. obj.query.select_for_update = True
  648. obj.query.select_for_update_nowait = nowait
  649. return obj
  650. def select_related(self, *fields):
  651. """
  652. Returns a new QuerySet instance that will select related objects.
  653. If fields are specified, they must be ForeignKey fields and only those
  654. related objects are included in the selection.
  655. If select_related(None) is called, the list is cleared.
  656. """
  657. obj = self._clone()
  658. if fields == (None,):
  659. obj.query.select_related = False
  660. elif fields:
  661. obj.query.add_select_related(fields)
  662. else:
  663. obj.query.select_related = True
  664. return obj
  665. def prefetch_related(self, *lookups):
  666. """
  667. Returns a new QuerySet instance that will prefetch the specified
  668. Many-To-One and Many-To-Many related objects when the QuerySet is
  669. evaluated.
  670. When prefetch_related() is called more than once, the list of lookups to
  671. prefetch is appended to. If prefetch_related(None) is called, the list
  672. is cleared.
  673. """
  674. clone = self._clone()
  675. if lookups == (None,):
  676. clone._prefetch_related_lookups = []
  677. else:
  678. clone._prefetch_related_lookups.extend(lookups)
  679. return clone
  680. def annotate(self, *args, **kwargs):
  681. """
  682. Return a query set in which the returned objects have been annotated
  683. with data aggregated from related fields.
  684. """
  685. for arg in args:
  686. if arg.default_alias in kwargs:
  687. raise ValueError("The named annotation '%s' conflicts with the "
  688. "default name for another annotation."
  689. % arg.default_alias)
  690. kwargs[arg.default_alias] = arg
  691. names = getattr(self, '_fields', None)
  692. if names is None:
  693. names = set(self.model._meta.get_all_field_names())
  694. for aggregate in kwargs:
  695. if aggregate in names:
  696. raise ValueError("The annotation '%s' conflicts with a field on "
  697. "the model." % aggregate)
  698. obj = self._clone()
  699. obj._setup_aggregate_query(list(kwargs))
  700. # Add the aggregates to the query
  701. for (alias, aggregate_expr) in kwargs.items():
  702. obj.query.add_aggregate(aggregate_expr, self.model, alias,
  703. is_summary=False)
  704. return obj
  705. def order_by(self, *field_names):
  706. """
  707. Returns a new QuerySet instance with the ordering changed.
  708. """
  709. assert self.query.can_filter(), \
  710. "Cannot reorder a query once a slice has been taken."
  711. obj = self._clone()
  712. obj.query.clear_ordering(force_empty=False)
  713. obj.query.add_ordering(*field_names)
  714. return obj
  715. def distinct(self, *field_names):
  716. """
  717. Returns a new QuerySet instance that will select only distinct results.
  718. """
  719. assert self.query.can_filter(), \
  720. "Cannot create distinct fields once a slice has been taken."
  721. obj = self._clone()
  722. obj.query.add_distinct_fields(*field_names)
  723. return obj
  724. def extra(self, select=None, where=None, params=None, tables=None,
  725. order_by=None, select_params=None):
  726. """
  727. Adds extra SQL fragments to the query.
  728. """
  729. assert self.query.can_filter(), \
  730. "Cannot change a query once a slice has been taken"
  731. clone = self._clone()
  732. clone.query.add_extra(select, select_params, where, params, tables, order_by)
  733. return clone
  734. def reverse(self):
  735. """
  736. Reverses the ordering of the QuerySet.
  737. """
  738. clone = self._clone()
  739. clone.query.standard_ordering = not clone.query.standard_ordering
  740. return clone
  741. def defer(self, *fields):
  742. """
  743. Defers the loading of data for certain fields until they are accessed.
  744. The set of fields to defer is added to any existing set of deferred
  745. fields. The only exception to this is if None is passed in as the only
  746. parameter, in which case all deferrals are removed (None acts as a
  747. reset option).
  748. """
  749. clone = self._clone()
  750. if fields == (None,):
  751. clone.query.clear_deferred_loading()
  752. else:
  753. clone.query.add_deferred_loading(fields)
  754. return clone
  755. def only(self, *fields):
  756. """
  757. Essentially, the opposite of defer. Only the fields passed into this
  758. method and that are not already specified as deferred are loaded
  759. immediately when the queryset is evaluated.
  760. """
  761. if fields == (None,):
  762. # Can only pass None to defer(), not only(), as the rest option.
  763. # That won't stop people trying to do this, so let's be explicit.
  764. raise TypeError("Cannot pass None as an argument to only().")
  765. clone = self._clone()
  766. clone.query.add_immediate_loading(fields)
  767. return clone
  768. def using(self, alias):
  769. """
  770. Selects which database this QuerySet should execute its query against.
  771. """
  772. clone = self._clone()
  773. clone._db = alias
  774. return clone
  775. ###################################
  776. # PUBLIC INTROSPECTION ATTRIBUTES #
  777. ###################################
  778. def ordered(self):
  779. """
  780. Returns True if the QuerySet is ordered -- i.e. has an order_by()
  781. clause or a default ordering on the model.
  782. """
  783. if self.query.extra_order_by or self.query.order_by:
  784. return True
  785. elif self.query.default_ordering and self.query.get_meta().ordering:
  786. return True
  787. else:
  788. return False
  789. ordered = property(ordered)
  790. @property
  791. def db(self):
  792. "Return the database that will be used if this query is executed now"
  793. if self._for_write:
  794. return self._db or router.db_for_write(self.model, **self._hints)
  795. return self._db or router.db_for_read(self.model, **self._hints)
  796. ###################
  797. # PRIVATE METHODS #
  798. ###################
  799. def _insert(self, objs, fields, return_id=False, raw=False, using=None):
  800. """
  801. Inserts a new record for the given model. This provides an interface to
  802. the InsertQuery class and is how Model.save() is implemented.
  803. """
  804. self._for_write = True
  805. if using is None:
  806. using = self.db
  807. query = sql.InsertQuery(self.model)
  808. query.insert_values(fields, objs, raw=raw)
  809. return query.get_compiler(using=using).execute_sql(return_id)
  810. _insert.alters_data = True
  811. _insert.queryset_only = False
  812. def _batched_insert(self, objs, fields, batch_size):
  813. """
  814. A little helper method for bulk_insert to insert the bulk one batch
  815. at a time. Inserts recursively a batch from the front of the bulk and
  816. then _batched_insert() the remaining objects again.
  817. """
  818. if not objs:
  819. return
  820. ops = connections[self.db].ops
  821. batch_size = (batch_size or max(ops.bulk_batch_size(fields, objs), 1))
  822. for batch in [objs[i:i + batch_size]
  823. for i in range(0, len(objs), batch_size)]:
  824. self.model._base_manager._insert(batch, fields=fields,
  825. using=self.db)
  826. def _clone(self, klass=None, setup=False, **kwargs):
  827. if klass is None:
  828. klass = self.__class__
  829. elif not issubclass(self.__class__, klass):
  830. base_queryset_class = getattr(self, '_base_queryset_class', self.__class__)
  831. class_bases = (klass, base_queryset_class)
  832. class_dict = {
  833. '_base_queryset_class': base_queryset_class,
  834. '_specialized_queryset_class': klass,
  835. }
  836. klass = type(klass.__name__, class_bases, class_dict)
  837. query = self.query.clone()
  838. if self._sticky_filter:
  839. query.filter_is_sticky = True
  840. c = klass(model=self.model, query=query, using=self._db, hints=self._hints)
  841. c._for_write = self._for_write
  842. c._prefetch_related_lookups = self._prefetch_related_lookups[:]
  843. c._known_related_objects = self._known_related_objects
  844. c.__dict__.update(kwargs)
  845. if setup and hasattr(c, '_setup_query'):
  846. c._setup_query()
  847. return c
  848. def _fetch_all(self):
  849. if self._result_cache is None:
  850. self._result_cache = list(self.iterator())
  851. if self._prefetch_related_lookups and not self._prefetch_done:
  852. self._prefetch_related_objects()
  853. def _next_is_sticky(self):
  854. """
  855. Indicates that the next filter call and the one following that should
  856. be treated as a single filter. This is only important when it comes to
  857. determining when to reuse tables for many-to-many filters. Required so
  858. that we can filter naturally on the results of related managers.
  859. This doesn't return a clone of the current QuerySet (it returns
  860. "self"). The method is only used internally and should be immediately
  861. followed by a filter() that does create a clone.
  862. """
  863. self._sticky_filter = True
  864. return self
  865. def _merge_sanity_check(self, other):
  866. """
  867. Checks that we are merging two comparable QuerySet classes. By default
  868. this does nothing, but see the ValuesQuerySet for an example of where
  869. it's useful.
  870. """
  871. pass
  872. def _merge_known_related_objects(self, other):
  873. """
  874. Keep track of all known related objects from either QuerySet instance.
  875. """
  876. for field, objects in other._known_related_objects.items():
  877. self._known_related_objects.setdefault(field, {}).update(objects)
  878. def _setup_aggregate_query(self, aggregates):
  879. """
  880. Prepare the query for computing a result that contains aggregate annotations.
  881. """
  882. opts = self.model._meta
  883. if self.query.group_by is None:
  884. field_names = [f.attname for f in opts.concrete_fields]
  885. self.query.add_fields(field_names, False)
  886. self.query.set_group_by()
  887. def _prepare(self):
  888. return self
  889. def _as_sql(self, connection):
  890. """
  891. Returns the internal query's SQL and parameters (as a tuple).
  892. """
  893. obj = self.values("pk")
  894. if obj._db is None or connection == connections[obj._db]:
  895. return obj.query.get_compiler(connection=connection).as_nested_sql()
  896. raise ValueError("Can't do subqueries with queries on different DBs.")
  897. # When used as part of a nested query, a queryset will never be an "always
  898. # empty" result.
  899. value_annotation = True
  900. def _add_hints(self, **hints):
  901. """
  902. Update hinting information for later use by Routers
  903. """
  904. # If there is any hinting information, add it to what we already know.
  905. # If we have a new hint for an existing key, overwrite with the new value.
  906. self._hints.update(hints)
  907. def _has_filters(self):
  908. """
  909. Checks if this QuerySet has any filtering going on. Note that this
  910. isn't equivalent for checking if all objects are present in results,
  911. for example qs[1:]._has_filters() -> False.
  912. """
  913. return self.query.has_filters()
  914. class InstanceCheckMeta(type):
  915. def __instancecheck__(self, instance):
  916. return instance.query.is_empty()
  917. class EmptyQuerySet(six.with_metaclass(InstanceCheckMeta)):
  918. """
  919. Marker class usable for checking if a queryset is empty by .none():
  920. isinstance(qs.none(), EmptyQuerySet) -> True
  921. """
  922. def __init__(self, *args, **kwargs):
  923. raise TypeError("EmptyQuerySet can't be instantiated")
  924. class ValuesQuerySet(QuerySet):
  925. def __init__(self, *args, **kwargs):
  926. super(ValuesQuerySet, self).__init__(*args, **kwargs)
  927. # select_related isn't supported in values(). (FIXME -#3358)
  928. self.query.select_related = False
  929. # QuerySet.clone() will also set up the _fields attribute with the
  930. # names of the model fields to select.
  931. def only(self, *fields):
  932. raise NotImplementedError("ValuesQuerySet does not implement only()")
  933. def defer(self, *fields):
  934. raise NotImplementedError("ValuesQuerySet does not implement defer()")
  935. def iterator(self):
  936. # Purge any extra columns that haven't been explicitly asked for
  937. extra_names = list(self.query.extra_select)
  938. field_names = self.field_names
  939. aggregate_names = list(self.query.aggregate_select)
  940. names = extra_names + field_names + aggregate_names
  941. for row in self.query.get_compiler(self.db).results_iter():
  942. yield dict(zip(names, row))
  943. def delete(self):
  944. # values().delete() doesn't work currently - make sure it raises an
  945. # user friendly error.
  946. raise TypeError("Queries with .values() or .values_list() applied "
  947. "can't be deleted")
  948. def _setup_query(self):
  949. """
  950. Constructs the field_names list that the values query will be
  951. retrieving.
  952. Called by the _clone() method after initializing the rest of the
  953. instance.
  954. """
  955. self.query.clear_deferred_loading()
  956. self.query.clear_select_fields()
  957. if self._fields:
  958. self.extra_names = []
  959. self.aggregate_names = []
  960. if not self.query._extra and not self.query._aggregates:
  961. # Short cut - if there are no extra or aggregates, then
  962. # the values() clause must be just field names.
  963. self.field_names = list(self._fields)
  964. else:
  965. self.query.default_cols = False
  966. self.field_names = []
  967. for f in self._fields:
  968. # we inspect the full extra_select list since we might
  969. # be adding back an extra select item that we hadn't
  970. # had selected previously.
  971. if self.query._extra and f in self.query._extra:
  972. self.extra_names.append(f)
  973. elif f in self.query.aggregate_select:
  974. self.aggregate_names.append(f)
  975. else:
  976. self.field_names.append(f)
  977. else:
  978. # Default to all fields.
  979. self.extra_names = None
  980. self.field_names = [f.attname for f in self.model._meta.concrete_fields]
  981. self.aggregate_names = None
  982. self.query.select = []
  983. if self.extra_names is not None:
  984. self.query.set_extra_mask(self.extra_names)
  985. self.query.add_fields(self.field_names, True)
  986. if self.aggregate_names is not None:
  987. self.query.set_aggregate_mask(self.aggregate_names)
  988. def _clone(self, klass=None, setup=False, **kwargs):
  989. """
  990. Cloning a ValuesQuerySet preserves the current fields.
  991. """
  992. c = super(ValuesQuerySet, self)._clone(klass, **kwargs)
  993. if not hasattr(c, '_fields'):
  994. # Only clone self._fields if _fields wasn't passed into the cloning
  995. # call directly.
  996. c._fields = self._fields[:]
  997. c.field_names = self.field_names
  998. c.extra_names = self.extra_names
  999. c.aggregate_names = self.aggregate_names
  1000. if setup and hasattr(c, '_setup_query'):
  1001. c._setup_query()
  1002. return c
  1003. def _merge_sanity_check(self, other):
  1004. super(ValuesQuerySet, self)._merge_sanity_check(other)
  1005. if (set(self.extra_names) != set(other.extra_names) or
  1006. set(self.field_names) != set(other.field_names) or
  1007. self.aggregate_names != other.aggregate_names):
  1008. raise TypeError("Merging '%s' classes must involve the same values in each case."
  1009. % self.__class__.__name__)
  1010. def _setup_aggregate_query(self, aggregates):
  1011. """
  1012. Prepare the query for computing a result that contains aggregate annotations.
  1013. """
  1014. self.query.set_group_by()
  1015. if self.aggregate_names is not None:
  1016. self.aggregate_names.extend(aggregates)
  1017. self.query.set_aggregate_mask(self.aggregate_names)
  1018. super(ValuesQuerySet, self)._setup_aggregate_query(aggregates)
  1019. def _as_sql(self, connection):
  1020. """
  1021. For ValuesQuerySet (and subclasses like ValuesListQuerySet), they can
  1022. only be used as nested queries if they're already set up to select only
  1023. a single field (in which case, that is the field column that is
  1024. returned). This differs from QuerySet.as_sql(), where the column to
  1025. select is set up by Django.
  1026. """
  1027. if ((self._fields and len(self._fields) > 1) or
  1028. (not self._fields and len(self.model._meta.fields) > 1)):
  1029. raise TypeError('Cannot use a multi-field %s as a filter value.'
  1030. % self.__class__.__name__)
  1031. obj = self._clone()
  1032. if obj._db is None or connection == connections[obj._db]:
  1033. return obj.query.get_compiler(connection=connection).as_nested_sql()
  1034. raise ValueError("Can't do subqueries with queries on different DBs.")
  1035. def _prepare(self):
  1036. """
  1037. Validates that we aren't trying to do a query like
  1038. value__in=qs.values('value1', 'value2'), which isn't valid.
  1039. """
  1040. if ((self._fields and len(self._fields) > 1) or
  1041. (not self._fields and len(self.model._meta.fields) > 1)):
  1042. raise TypeError('Cannot use a multi-field %s as a filter value.'
  1043. % self.__class__.__name__)
  1044. return self
  1045. class ValuesListQuerySet(ValuesQuerySet):
  1046. def iterator(self):
  1047. if self.flat and len(self._fields) == 1:
  1048. for row in self.query.get_compiler(self.db).results_iter():
  1049. yield row[0]
  1050. elif not self.query.extra_select and not self.query.aggregate_select:
  1051. for row in self.query.get_compiler(self.db).results_iter():
  1052. yield tuple(row)
  1053. else:
  1054. # When extra(select=...) or an annotation is involved, the extra
  1055. # cols are always at the start of the row, and we need to reorder
  1056. # the fields to match the order in self._fields.
  1057. extra_names = list(self.query.extra_select)
  1058. field_names = self.field_names
  1059. aggregate_names = list(self.query.aggregate_select)
  1060. names = extra_names + field_names + aggregate_names
  1061. # If a field list has been specified, use it. Otherwise, use the
  1062. # full list of fields, including extras and aggregates.
  1063. if self._fields:
  1064. fields = list(self._fields) + [f for f in aggregate_names if f not in self._fields]
  1065. else:
  1066. fields = names
  1067. for row in self.query.get_compiler(self.db).results_iter():
  1068. data = dict(zip(names, row))
  1069. yield tuple(data[f] for f in fields)
  1070. def _clone(self, *args, **kwargs):
  1071. clone = super(ValuesListQuerySet, self)._clone(*args, **kwargs)
  1072. if not hasattr(clone, "flat"):
  1073. # Only assign flat if the clone didn't already get it from kwargs
  1074. clone.flat = self.flat
  1075. return clone
  1076. class DateQuerySet(QuerySet):
  1077. def iterator(self):
  1078. return self.query.get_compiler(self.db).results_iter()
  1079. def _setup_query(self):
  1080. """
  1081. Sets up any special features of the query attribute.
  1082. Called by the _clone() method after initializing the rest of the
  1083. instance.
  1084. """
  1085. self.query.clear_deferred_loading()
  1086. self.query = self.query.clone(klass=sql.DateQuery, setup=True)
  1087. self.query.select = []
  1088. self.query.add_select(self._field_name, self._kind, self._order)
  1089. def _clone(self, klass=None, setup=False, **kwargs):
  1090. c = super(DateQuerySet, self)._clone(klass, False, **kwargs)
  1091. c._field_name = self._field_name
  1092. c._kind = self._kind
  1093. if setup and hasattr(c, '_setup_query'):
  1094. c._setup_query()
  1095. return c
  1096. class DateTimeQuerySet(QuerySet):
  1097. def iterator(self):
  1098. return self.query.get_compiler(self.db).results_iter()
  1099. def _setup_query(self):
  1100. """
  1101. Sets up any special features of the query attribute.
  1102. Called by the _clone() method after initializing the rest of the
  1103. instance.
  1104. """
  1105. self.query.clear_deferred_loading()
  1106. self.query = self.query.clone(klass=sql.DateTimeQuery, setup=True, tzinfo=self._tzinfo)
  1107. self.query.select = []
  1108. self.query.add_select(self._field_name, self._kind, self._order)
  1109. def _clone(self, klass=None, setup=False, **kwargs):
  1110. c = super(DateTimeQuerySet, self)._clone(klass, False, **kwargs)
  1111. c._field_name = self._field_name
  1112. c._kind = self._kind
  1113. c._tzinfo = self._tzinfo
  1114. if setup and hasattr(c, '_setup_query'):
  1115. c._setup_query()
  1116. return c
  1117. def get_klass_info(klass, max_depth=0, cur_depth=0, requested=None,
  1118. only_load=None, from_parent=None):
  1119. """
  1120. Helper function that recursively returns an information for a klass, to be
  1121. used in get_cached_row. It exists just to compute this information only
  1122. once for entire queryset. Otherwise it would be computed for each row, which
  1123. leads to poor performance on large querysets.
  1124. Arguments:
  1125. * klass - the class to retrieve (and instantiate)
  1126. * max_depth - the maximum depth to which a select_related()
  1127. relationship should be explored.
  1128. * cur_depth - the current depth in the select_related() tree.
  1129. Used in recursive calls to determine if we should dig deeper.
  1130. * requested - A dictionary describing the select_related() tree
  1131. that is to be retrieved. keys are field names; values are
  1132. dictionaries describing the keys on that related object that
  1133. are themselves to be select_related().
  1134. * only_load - if the query has had only() or defer() applied,
  1135. this is the list of field names that will be returned. If None,
  1136. the full field list for `klass` can be assumed.
  1137. * from_parent - the parent model used to get to this model
  1138. Note that when travelling from parent to child, we will only load child
  1139. fields which aren't in the parent.
  1140. """
  1141. if max_depth and requested is None and cur_depth > max_depth:
  1142. # We've recursed deeply enough; stop now.
  1143. return None
  1144. if only_load:
  1145. load_fields = only_load.get(klass) or set()
  1146. # When we create the object, we will also be creating populating
  1147. # all the parent classes, so traverse the parent classes looking
  1148. # for fields that must be included on load.
  1149. for parent in klass._meta.get_parent_list():
  1150. fields = only_load.get(parent)
  1151. if fields:
  1152. load_fields.update(fields)
  1153. else:
  1154. load_fields = None
  1155. if load_fields:
  1156. # Handle deferred fields.
  1157. skip = set()
  1158. init_list = []
  1159. # Build the list of fields that *haven't* been requested
  1160. for field, model in klass._meta.get_concrete_fields_with_model():
  1161. if from_parent and model and issubclass(from_parent, model):
  1162. # Avoid loading fields already loaded for parent model for
  1163. # child models.
  1164. continue
  1165. elif field.name not in load_fields:
  1166. skip.add(field.attname)
  1167. else:
  1168. init_list.append(field.attname)
  1169. # Retrieve all the requested fields
  1170. field_count = len(init_list)
  1171. if skip:
  1172. klass = deferred_class_factory(klass, skip)
  1173. field_names = init_list
  1174. else:
  1175. field_names = ()
  1176. else:
  1177. # Load all fields on klass
  1178. field_count = len(klass._meta.concrete_fields)
  1179. # Check if we need to skip some parent fields.
  1180. if from_parent and len(klass._meta.local_concrete_fields) != len(klass._meta.concrete_fields):
  1181. # Only load those fields which haven't been already loaded into
  1182. # 'from_parent'.
  1183. non_seen_models = [p for p in klass._meta.get_parent_list()
  1184. if not issubclass(from_parent, p)]
  1185. # Load local fields, too...
  1186. non_seen_models.append(klass)
  1187. field_names = [f.attname for f in klass._meta.concrete_fields
  1188. if f.model in non_seen_models]
  1189. field_count = len(field_names)
  1190. # Try to avoid populating field_names variable for performance reasons.
  1191. # If field_names variable is set, we use **kwargs based model init
  1192. # which is slower than normal init.
  1193. if field_count == len(klass._meta.concrete_fields):
  1194. field_names = ()
  1195. restricted = requested is not None
  1196. related_fields = []
  1197. for f in klass._meta.fields:
  1198. if select_related_descend(f, restricted, requested, load_fields):
  1199. if restricted:
  1200. next = requested[f.name]
  1201. else:
  1202. next = None
  1203. klass_info = get_klass_info(f.rel.to, max_depth=max_depth, cur_depth=cur_depth + 1,
  1204. requested=next, only_load=only_load)
  1205. related_fields.append((f, klass_info))
  1206. reverse_related_fields = []
  1207. if restricted:
  1208. for o in klass._meta.get_all_related_objects():
  1209. if o.field.unique and select_related_descend(o.field, restricted, requested,
  1210. only_load.get(o.model), reverse=True):
  1211. next = requested[o.field.related_query_name()]
  1212. parent = klass if issubclass(o.model, klass) else None
  1213. klass_info = get_klass_info(o.model, max_depth=max_depth, cur_depth=cur_depth + 1,
  1214. requested=next, only_load=only_load, from_parent=parent)
  1215. reverse_related_fields.append((o.field, klass_info))
  1216. if field_names:
  1217. pk_idx = field_names.index(klass._meta.pk.attname)
  1218. else:
  1219. pk_idx = klass._meta.pk_index()
  1220. return klass, field_names, field_count, related_fields, reverse_related_fields, pk_idx
  1221. def get_cached_row(row, index_start, using, klass_info, offset=0,
  1222. parent_data=()):
  1223. """
  1224. Helper function that recursively returns an object with the specified
  1225. related attributes already populated.
  1226. This method may be called recursively to populate deep select_related()
  1227. clauses.
  1228. Arguments:
  1229. * row - the row of data returned by the database cursor
  1230. * index_start - the index of the row at which data for this
  1231. object is known to start
  1232. * offset - the number of additional fields that are known to
  1233. exist in row for `klass`. This usually means the number of
  1234. annotated results on `klass`.
  1235. * using - the database alias on which the query is being executed.
  1236. * klass_info - result of the get_klass_info function
  1237. * parent_data - parent model data in format (field, value). Used
  1238. to populate the non-local fields of child models.
  1239. """
  1240. if klass_info is None:
  1241. return None
  1242. klass, field_names, field_count, related_fields, reverse_related_fields, pk_idx = klass_info
  1243. fields = row[index_start:index_start + field_count]
  1244. # If the pk column is None (or the equivalent '' in the case the
  1245. # connection interprets empty strings as nulls), then the related
  1246. # object must be non-existent - set the relation to None.
  1247. if (fields[pk_idx] is None or
  1248. (connections[using].features.interprets_empty_strings_as_nulls and
  1249. fields[pk_idx] == '')):
  1250. obj = None
  1251. elif field_names:
  1252. fields = list(fields)
  1253. for rel_field, value in parent_data:
  1254. field_names.append(rel_field.attname)
  1255. fields.append(value)
  1256. obj = klass(**dict(zip(field_names, fields)))
  1257. else:
  1258. obj = klass(*fields)
  1259. # If an object was retrieved, set the database state.
  1260. if obj:
  1261. obj._state.db = using
  1262. obj._state.adding = False
  1263. # Instantiate related fields
  1264. index_end = index_start + field_count + offset
  1265. # Iterate over each related object, populating any
  1266. # select_related() fields
  1267. for f, klass_info in related_fields:
  1268. # Recursively retrieve the data for the related object
  1269. cached_row = get_cached_row(row, index_end, using, klass_info)
  1270. # If the recursive descent found an object, populate the
  1271. # descriptor caches relevant to the object
  1272. if cached_row:
  1273. rel_obj, index_end = cached_row
  1274. if obj is not None:
  1275. # If the base object exists, populate the
  1276. # descriptor cache
  1277. setattr(obj, f.get_cache_name(), rel_obj)
  1278. if f.unique and rel_obj is not None:
  1279. # If the field is unique, populate the
  1280. # reverse descriptor cache on the related object
  1281. setattr(rel_obj, f.related.get_cache_name(), obj)
  1282. # Now do the same, but for reverse related objects.
  1283. # Only handle the restricted case - i.e., don't do a depth
  1284. # descent into reverse relations unless explicitly requested
  1285. for f, klass_info in reverse_related_fields:
  1286. # Transfer data from this object to childs.
  1287. parent_data = []
  1288. for rel_field, rel_model in klass_info[0]._meta.get_fields_with_model():
  1289. if rel_model is not None and isinstance(obj, rel_model):
  1290. parent_data.append((rel_field, getattr(obj, rel_field.attname)))
  1291. # Recursively retrieve the data for the related object
  1292. cached_row = get_cached_row(row, index_end, using, klass_info,
  1293. parent_data=parent_data)
  1294. # If the recursive descent found an object, populate the
  1295. # descriptor caches relevant to the object
  1296. if cached_row:
  1297. rel_obj, index_end = cached_row
  1298. if obj is not None:
  1299. # populate the reverse descriptor cache
  1300. setattr(obj, f.related.get_cache_name(), rel_obj)
  1301. if rel_obj is not None:
  1302. # If the related object exists, populate
  1303. # the descriptor cache.
  1304. setattr(rel_obj, f.get_cache_name(), obj)
  1305. # Populate related object caches using parent data.
  1306. for rel_field, _ in parent_data:
  1307. if rel_field.rel:
  1308. setattr(rel_obj, rel_field.attname, getattr(obj, rel_field.attname))
  1309. try:
  1310. cached_obj = getattr(obj, rel_field.get_cache_name())
  1311. setattr(rel_obj, rel_field.get_cache_name(), cached_obj)
  1312. except AttributeError:
  1313. # Related object hasn't been cached yet
  1314. pass
  1315. return obj, index_end
  1316. class RawQuerySet(object):
  1317. """
  1318. Provides an iterator which converts the results of raw SQL queries into
  1319. annotated model instances.
  1320. """
  1321. def __init__(self, raw_query, model=None, query=None, params=None,
  1322. translations=None, using=None, hints=None):
  1323. self.raw_query = raw_query
  1324. self.model = model
  1325. self._db = using
  1326. self._hints = hints or {}
  1327. self.query = query or sql.RawQuery(sql=raw_query, using=self.db, params=params)
  1328. self.params = params or ()
  1329. self.translations = translations or {}
  1330. def __iter__(self):
  1331. # Mapping of attrnames to row column positions. Used for constructing
  1332. # the model using kwargs, needed when not all model's fields are present
  1333. # in the query.
  1334. model_init_field_names = {}
  1335. # A list of tuples of (column name, column position). Used for
  1336. # annotation fields.
  1337. annotation_fields = []
  1338. # Cache some things for performance reasons outside the loop.
  1339. db = self.db
  1340. compiler = connections[db].ops.compiler('SQLCompiler')(
  1341. self.query, connections[db], db
  1342. )
  1343. need_resolv_columns = hasattr(compiler, 'resolve_columns')
  1344. query = iter(self.query)
  1345. try:
  1346. # Find out which columns are model's fields, and which ones should be
  1347. # annotated to the model.
  1348. for pos, column in enumerate(self.columns):
  1349. if column in self.model_fields:
  1350. model_init_field_names[self.model_fields[column].attname] = pos
  1351. else:
  1352. annotation_fields.append((column, pos))
  1353. # Find out which model's fields are not present in the query.
  1354. skip = set()
  1355. for field in self.model._meta.fields:
  1356. if field.attname not in model_init_field_names:
  1357. skip.add(field.attname)
  1358. if skip:
  1359. if self.model._meta.pk.attname in skip:
  1360. raise InvalidQuery('Raw query must include the primary key')
  1361. model_cls = deferred_class_factory(self.model, skip)
  1362. else:
  1363. model_cls = self.model
  1364. # All model's fields are present in the query. So, it is possible
  1365. # to use *args based model instantiation. For each field of the model,
  1366. # record the query column position matching that field.
  1367. model_init_field_pos = []
  1368. for field in self.model._meta.fields:
  1369. model_init_field_pos.append(model_init_field_names[field.attname])
  1370. if need_resolv_columns:
  1371. fields = [self.model_fields.get(c, None) for c in self.columns]
  1372. # Begin looping through the query values.
  1373. for values in query:
  1374. if need_resolv_columns:
  1375. values = compiler.resolve_columns(values, fields)
  1376. # Associate fields to values
  1377. if skip:
  1378. model_init_kwargs = {}
  1379. for attname, pos in six.iteritems(model_init_field_names):
  1380. model_init_kwargs[attname] = values[pos]
  1381. instance = model_cls(**model_init_kwargs)
  1382. else:
  1383. model_init_args = [values[pos] for pos in model_init_field_pos]
  1384. instance = model_cls(*model_init_args)
  1385. if annotation_fields:
  1386. for column, pos in annotation_fields:
  1387. setattr(instance, column, values[pos])
  1388. instance._state.db = db
  1389. instance._state.adding = False
  1390. yield instance
  1391. finally:
  1392. # Done iterating the Query. If it has its own cursor, close it.
  1393. if hasattr(self.query, 'cursor') and self.query.cursor:
  1394. self.query.cursor.close()
  1395. def __repr__(self):
  1396. text = self.raw_query
  1397. if self.params:
  1398. text = text % (self.params if hasattr(self.params, 'keys') else tuple(self.params))
  1399. return "<RawQuerySet: %r>" % text
  1400. def __getitem__(self, k):
  1401. return list(self)[k]
  1402. @property
  1403. def db(self):
  1404. "Return the database that will be used if this query is executed now"
  1405. return self._db or router.db_for_read(self.model, **self._hints)
  1406. def using(self, alias):
  1407. """
  1408. Selects which database this Raw QuerySet should execute its query against.
  1409. """
  1410. return RawQuerySet(self.raw_query, model=self.model,
  1411. query=self.query.clone(using=alias),
  1412. params=self.params, translations=self.translations,
  1413. using=alias)
  1414. @property
  1415. def columns(self):
  1416. """
  1417. A list of model field names in the order they'll appear in the
  1418. query results.
  1419. """
  1420. if not hasattr(self, '_columns'):
  1421. self._columns = self.query.get_columns()
  1422. # Adjust any column names which don't match field names
  1423. for (query_name, model_name) in self.translations.items():
  1424. try:
  1425. index = self._columns.index(query_name)
  1426. self._columns[index] = model_name
  1427. except ValueError:
  1428. # Ignore translations for non-existent column names
  1429. pass
  1430. return self._columns
  1431. @property
  1432. def model_fields(self):
  1433. """
  1434. A dict mapping column names to model field names.
  1435. """
  1436. if not hasattr(self, '_model_fields'):
  1437. converter = connections[self.db].introspection.table_name_converter
  1438. self._model_fields = {}
  1439. for field in self.model._meta.fields:
  1440. name, column = field.get_attname_column()
  1441. self._model_fields[converter(column)] = field
  1442. return self._model_fields
  1443. class Prefetch(object):
  1444. def __init__(self, lookup, queryset=None, to_attr=None):
  1445. # `prefetch_through` is the path we traverse to perform the prefetch.
  1446. self.prefetch_through = lookup
  1447. # `prefetch_to` is the path to the attribute that stores the result.
  1448. self.prefetch_to = lookup
  1449. if to_attr:
  1450. self.prefetch_to = LOOKUP_SEP.join(lookup.split(LOOKUP_SEP)[:-1] + [to_attr])
  1451. self.queryset = queryset
  1452. self.to_attr = to_attr
  1453. def add_prefix(self, prefix):
  1454. self.prefetch_through = LOOKUP_SEP.join([prefix, self.prefetch_through])
  1455. self.prefetch_to = LOOKUP_SEP.join([prefix, self.prefetch_to])
  1456. def get_current_prefetch_through(self, level):
  1457. return LOOKUP_SEP.join(self.prefetch_through.split(LOOKUP_SEP)[:level + 1])
  1458. def get_current_prefetch_to(self, level):
  1459. return LOOKUP_SEP.join(self.prefetch_to.split(LOOKUP_SEP)[:level + 1])
  1460. def get_current_to_attr(self, level):
  1461. parts = self.prefetch_to.split(LOOKUP_SEP)
  1462. to_attr = parts[level]
  1463. as_attr = self.to_attr and level == len(parts) - 1
  1464. return to_attr, as_attr
  1465. def get_current_queryset(self, level):
  1466. if self.get_current_prefetch_to(level) == self.prefetch_to:
  1467. return self.queryset
  1468. return None
  1469. def __eq__(self, other):
  1470. if isinstance(other, Prefetch):
  1471. return self.prefetch_to == other.prefetch_to
  1472. return False
  1473. def __hash__(self):
  1474. return hash(self.__class__) ^ hash(self.prefetch_to)
  1475. def normalize_prefetch_lookups(lookups, prefix=None):
  1476. """
  1477. Helper function that normalize lookups into Prefetch objects.
  1478. """
  1479. ret = []
  1480. for lookup in lookups:
  1481. if not isinstance(lookup, Prefetch):
  1482. lookup = Prefetch(lookup)
  1483. if prefix:
  1484. lookup.add_prefix(prefix)
  1485. ret.append(lookup)
  1486. return ret
  1487. def prefetch_related_objects(result_cache, related_lookups):
  1488. """
  1489. Helper function for prefetch_related functionality
  1490. Populates prefetched objects caches for a list of results
  1491. from a QuerySet
  1492. """
  1493. if len(result_cache) == 0:
  1494. return # nothing to do
  1495. related_lookups = normalize_prefetch_lookups(related_lookups)
  1496. # We need to be able to dynamically add to the list of prefetch_related
  1497. # lookups that we look up (see below). So we need some book keeping to
  1498. # ensure we don't do duplicate work.
  1499. done_queries = {} # dictionary of things like 'foo__bar': [results]
  1500. auto_lookups = set() # we add to this as we go through.
  1501. followed_descriptors = set() # recursion protection
  1502. all_lookups = deque(related_lookups)
  1503. while all_lookups:
  1504. lookup = all_lookups.popleft()
  1505. if lookup.prefetch_to in done_queries:
  1506. if lookup.queryset:
  1507. raise ValueError("'%s' lookup was already seen with a different queryset. "
  1508. "You may need to adjust the ordering of your lookups." % lookup.prefetch_to)
  1509. continue
  1510. # Top level, the list of objects to decorate is the result cache
  1511. # from the primary QuerySet. It won't be for deeper levels.
  1512. obj_list = result_cache
  1513. through_attrs = lookup.prefetch_through.split(LOOKUP_SEP)
  1514. for level, through_attr in enumerate(through_attrs):
  1515. # Prepare main instances
  1516. if len(obj_list) == 0:
  1517. break
  1518. prefetch_to = lookup.get_current_prefetch_to(level)
  1519. if prefetch_to in done_queries:
  1520. # Skip any prefetching, and any object preparation
  1521. obj_list = done_queries[prefetch_to]
  1522. continue
  1523. # Prepare objects:
  1524. good_objects = True
  1525. for obj in obj_list:
  1526. # Since prefetching can re-use instances, it is possible to have
  1527. # the same instance multiple times in obj_list, so obj might
  1528. # already be prepared.
  1529. if not hasattr(obj, '_prefetched_objects_cache'):
  1530. try:
  1531. obj._prefetched_objects_cache = {}
  1532. except AttributeError:
  1533. # Must be in a QuerySet subclass that is not returning
  1534. # Model instances, either in Django or 3rd
  1535. # party. prefetch_related() doesn't make sense, so quit
  1536. # now.
  1537. good_objects = False
  1538. break
  1539. if not good_objects:
  1540. break
  1541. # Descend down tree
  1542. # We assume that objects retrieved are homogeneous (which is the premise
  1543. # of prefetch_related), so what applies to first object applies to all.
  1544. first_obj = obj_list[0]
  1545. prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(first_obj, through_attr)
  1546. if not attr_found:
  1547. raise AttributeError("Cannot find '%s' on %s object, '%s' is an invalid "
  1548. "parameter to prefetch_related()" %
  1549. (through_attr, first_obj.__class__.__name__, lookup.prefetch_through))
  1550. if level == len(through_attrs) - 1 and prefetcher is None:
  1551. # Last one, this *must* resolve to something that supports
  1552. # prefetching, otherwise there is no point adding it and the
  1553. # developer asking for it has made a mistake.
  1554. raise ValueError("'%s' does not resolve to an item that supports "
  1555. "prefetching - this is an invalid parameter to "
  1556. "prefetch_related()." % lookup.prefetch_through)
  1557. if prefetcher is not None and not is_fetched:
  1558. obj_list, additional_lookups = prefetch_one_level(obj_list, prefetcher, lookup, level)
  1559. # We need to ensure we don't keep adding lookups from the
  1560. # same relationships to stop infinite recursion. So, if we
  1561. # are already on an automatically added lookup, don't add
  1562. # the new lookups from relationships we've seen already.
  1563. if not (lookup in auto_lookups and descriptor in followed_descriptors):
  1564. done_queries[prefetch_to] = obj_list
  1565. new_lookups = normalize_prefetch_lookups(additional_lookups, prefetch_to)
  1566. auto_lookups.update(new_lookups)
  1567. all_lookups.extendleft(new_lookups)
  1568. followed_descriptors.add(descriptor)
  1569. else:
  1570. # Either a singly related object that has already been fetched
  1571. # (e.g. via select_related), or hopefully some other property
  1572. # that doesn't support prefetching but needs to be traversed.
  1573. # We replace the current list of parent objects with the list
  1574. # of related objects, filtering out empty or missing values so
  1575. # that we can continue with nullable or reverse relations.
  1576. new_obj_list = []
  1577. for obj in obj_list:
  1578. try:
  1579. new_obj = getattr(obj, through_attr)
  1580. except exceptions.ObjectDoesNotExist:
  1581. continue
  1582. if new_obj is None:
  1583. continue
  1584. # We special-case `list` rather than something more generic
  1585. # like `Iterable` because we don't want to accidentally match
  1586. # user models that define __iter__.
  1587. if isinstance(new_obj, list):
  1588. new_obj_list.extend(new_obj)
  1589. else:
  1590. new_obj_list.append(new_obj)
  1591. obj_list = new_obj_list
  1592. def get_prefetcher(instance, attr):
  1593. """
  1594. For the attribute 'attr' on the given instance, finds
  1595. an object that has a get_prefetch_queryset().
  1596. Returns a 4 tuple containing:
  1597. (the object with get_prefetch_queryset (or None),
  1598. the descriptor object representing this relationship (or None),
  1599. a boolean that is False if the attribute was not found at all,
  1600. a boolean that is True if the attribute has already been fetched)
  1601. """
  1602. prefetcher = None
  1603. is_fetched = False
  1604. # For singly related objects, we have to avoid getting the attribute
  1605. # from the object, as this will trigger the query. So we first try
  1606. # on the class, in order to get the descriptor object.
  1607. rel_obj_descriptor = getattr(instance.__class__, attr, None)
  1608. if rel_obj_descriptor is None:
  1609. attr_found = hasattr(instance, attr)
  1610. else:
  1611. attr_found = True
  1612. if rel_obj_descriptor:
  1613. # singly related object, descriptor object has the
  1614. # get_prefetch_queryset() method.
  1615. if hasattr(rel_obj_descriptor, 'get_prefetch_queryset'):
  1616. prefetcher = rel_obj_descriptor
  1617. if rel_obj_descriptor.is_cached(instance):
  1618. is_fetched = True
  1619. else:
  1620. # descriptor doesn't support prefetching, so we go ahead and get
  1621. # the attribute on the instance rather than the class to
  1622. # support many related managers
  1623. rel_obj = getattr(instance, attr)
  1624. if hasattr(rel_obj, 'get_prefetch_queryset'):
  1625. prefetcher = rel_obj
  1626. return prefetcher, rel_obj_descriptor, attr_found, is_fetched
  1627. def prefetch_one_level(instances, prefetcher, lookup, level):
  1628. """
  1629. Helper function for prefetch_related_objects
  1630. Runs prefetches on all instances using the prefetcher object,
  1631. assigning results to relevant caches in instance.
  1632. The prefetched objects are returned, along with any additional
  1633. prefetches that must be done due to prefetch_related lookups
  1634. found from default managers.
  1635. """
  1636. # prefetcher must have a method get_prefetch_queryset() which takes a list
  1637. # of instances, and returns a tuple:
  1638. # (queryset of instances of self.model that are related to passed in instances,
  1639. # callable that gets value to be matched for returned instances,
  1640. # callable that gets value to be matched for passed in instances,
  1641. # boolean that is True for singly related objects,
  1642. # cache name to assign to).
  1643. # The 'values to be matched' must be hashable as they will be used
  1644. # in a dictionary.
  1645. rel_qs, rel_obj_attr, instance_attr, single, cache_name = (
  1646. prefetcher.get_prefetch_queryset(instances, lookup.get_current_queryset(level)))
  1647. # We have to handle the possibility that the QuerySet we just got back
  1648. # contains some prefetch_related lookups. We don't want to trigger the
  1649. # prefetch_related functionality by evaluating the query. Rather, we need
  1650. # to merge in the prefetch_related lookups.
  1651. additional_lookups = getattr(rel_qs, '_prefetch_related_lookups', [])
  1652. if additional_lookups:
  1653. # Don't need to clone because the manager should have given us a fresh
  1654. # instance, so we access an internal instead of using public interface
  1655. # for performance reasons.
  1656. rel_qs._prefetch_related_lookups = []
  1657. all_related_objects = list(rel_qs)
  1658. rel_obj_cache = {}
  1659. for rel_obj in all_related_objects:
  1660. rel_attr_val = rel_obj_attr(rel_obj)
  1661. rel_obj_cache.setdefault(rel_attr_val, []).append(rel_obj)
  1662. for obj in instances:
  1663. instance_attr_val = instance_attr(obj)
  1664. vals = rel_obj_cache.get(instance_attr_val, [])
  1665. to_attr, as_attr = lookup.get_current_to_attr(level)
  1666. if single:
  1667. val = vals[0] if vals else None
  1668. to_attr = to_attr if as_attr else cache_name
  1669. setattr(obj, to_attr, val)
  1670. else:
  1671. if as_attr:
  1672. setattr(obj, to_attr, vals)
  1673. else:
  1674. # Cache in the QuerySet.all().
  1675. qs = getattr(obj, to_attr).all()
  1676. qs._result_cache = vals
  1677. # We don't want the individual qs doing prefetch_related now,
  1678. # since we have merged this into the current work.
  1679. qs._prefetch_done = True
  1680. obj._prefetched_objects_cache[cache_name] = qs
  1681. return all_related_objects, additional_lookups