operations.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538
  1. from __future__ import absolute_import, unicode_literals
  2. import datetime
  3. import uuid
  4. import django
  5. from django.conf import settings
  6. from django.db import utils
  7. from django.db.backends.base.operations import BaseDatabaseOperations
  8. from django.utils import six, timezone
  9. from django.utils.encoding import force_text, smart_text
  10. from . import fields as mssql_fields
  11. try:
  12. import pytz
  13. except ImportError:
  14. pytz = None
  15. class DatabaseOperations(BaseDatabaseOperations):
  16. compiler_module = "sqlserver_ado.compiler"
  17. _convert_values_map = {
  18. # custom fields
  19. 'DateTimeOffsetField': mssql_fields.DateTimeOffsetField(),
  20. 'LegacyDateField': mssql_fields.LegacyDateField(),
  21. 'LegacyDateTimeField': mssql_fields.LegacyDateTimeField(),
  22. 'LegacyTimeField': mssql_fields.LegacyTimeField(),
  23. 'NewDateField': mssql_fields.DateField(),
  24. 'NewDateTimeField': mssql_fields.DateTimeField(),
  25. 'NewTimeField': mssql_fields.TimeField(),
  26. }
  27. def __init__(self, *args, **kwargs):
  28. super(DatabaseOperations, self).__init__(*args, **kwargs)
  29. self.value_to_db_datetime = self._new_value_to_db_datetime
  30. self.value_to_db_time = self._new_value_to_db_time
  31. self._convert_values_map.update({
  32. 'DateField': self._convert_values_map['NewDateField'],
  33. 'DateTimeField': self._convert_values_map['NewDateTimeField'],
  34. 'TimeField': self._convert_values_map['NewTimeField'],
  35. })
  36. def cache_key_culling_sql(self):
  37. return """
  38. SELECT [cache_key]
  39. FROM (SELECT [cache_key], ROW_NUMBER() OVER (ORDER BY [cache_key]) AS [rank] FROM %s) AS [RankedCache]
  40. WHERE [rank] = %%s + 1
  41. """
  42. def date_extract_sql(self, lookup_type, field_name):
  43. field_name = self.quote_name(field_name)
  44. if lookup_type == 'week_day':
  45. lookup_type = 'weekday'
  46. return 'DATEPART(%s, %s)' % (lookup_type, field_name)
  47. def date_interval_sql(self, timedelta):
  48. """
  49. Do nothing here, we'll handle it in the combine.
  50. """
  51. return timedelta, []
  52. def date_trunc_sql(self, lookup_type, field_name):
  53. return "DATEADD(%s, DATEDIFF(%s, 0, %s), 0)" % (lookup_type, lookup_type, field_name)
  54. def _switch_tz_offset_sql(self, field_name, tzname):
  55. """
  56. Returns the SQL that will convert field_name to UTC from tzname.
  57. """
  58. field_name = self.quote_name(field_name)
  59. if settings.USE_TZ:
  60. if pytz is None:
  61. from django.core.exceptions import ImproperlyConfigured
  62. raise ImproperlyConfigured("This query requires pytz, "
  63. "but it isn't installed.")
  64. tz = pytz.timezone(tzname)
  65. td = tz.utcoffset(datetime.datetime(2000, 1, 1))
  66. def total_seconds(td):
  67. if hasattr(td, 'total_seconds'):
  68. return td.total_seconds()
  69. else:
  70. return td.days * 24 * 60 * 60 + td.seconds
  71. total_minutes = total_seconds(td) // 60
  72. hours, minutes = divmod(total_minutes, 60)
  73. tzoffset = "%+03d:%02d" % (hours, minutes)
  74. field_name =\
  75. "CAST(SWITCHOFFSET(TODATETIMEOFFSET(%s, '+00:00'), '%s') AS DATETIME2)" % (field_name, tzoffset)
  76. return field_name
  77. def datetime_extract_sql(self, lookup_type, field_name, tzname):
  78. """
  79. Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute' or
  80. 'second', returns the SQL that extracts a value from the given
  81. datetime field field_name, and a tuple of parameters.
  82. """
  83. if lookup_type == 'week_day':
  84. lookup_type = 'weekday'
  85. return 'DATEPART({0}, {1})'.format(
  86. lookup_type,
  87. self._switch_tz_offset_sql(field_name, tzname),
  88. ), []
  89. def datetime_trunc_sql(self, lookup_type, field_name, tzname):
  90. """
  91. Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute' or
  92. 'second', returns the SQL that truncates the given datetime field
  93. field_name to a datetime object with only the given specificity, and
  94. a tuple of parameters.
  95. """
  96. field_name = self._switch_tz_offset_sql(field_name, tzname)
  97. reference_date = '0' # 1900-01-01
  98. if lookup_type in ['minute', 'second']:
  99. # Prevent DATEDIFF overflow by using the first day of the year as
  100. # the reference point. Only using for minute and second to avoid any
  101. # potential performance hit for queries against very large datasets.
  102. reference_date = "CONVERT(datetime2, CONVERT(char(4), {field_name}, 112) + '0101', 112)".format(
  103. field_name=field_name,
  104. )
  105. sql = "DATEADD({lookup}, DATEDIFF({lookup}, {reference_date}, {field_name}), {reference_date})".format(
  106. lookup=lookup_type,
  107. field_name=field_name,
  108. reference_date=reference_date,
  109. )
  110. return sql, []
  111. def get_db_converters(self, expression):
  112. converters = super(DatabaseOperations, self).get_db_converters(expression)
  113. internal_type = expression.output_field.get_internal_type()
  114. if internal_type == 'TextField':
  115. converters.append(self.convert_textfield_value)
  116. elif internal_type in ['BooleanField', 'NullBooleanField']:
  117. converters.append(self.convert_booleanfield_value)
  118. elif internal_type in ('DateField', 'NewDateField', 'LegacyDateField'):
  119. converters.append(self.convert_datefield_value)
  120. elif internal_type in ('DateTimeField', 'NewDateTimeField', 'LegacyDateTimeField'):
  121. converters.append(self.convert_datetimefield_value)
  122. elif internal_type in ('TimeField', 'NewTimeField', 'LegacyTimeField'):
  123. converters.append(self.convert_timefield_value)
  124. elif internal_type == 'DateTimeOffsetField':
  125. converters.append(self.convert_datetimeoffsetfield_value)
  126. elif internal_type == 'UUIDField':
  127. converters.append(self.convert_uuidfield_value)
  128. return converters
  129. def convert_booleanfield_value(self, value, expression, connection, context):
  130. if value in (0, 1):
  131. value = bool(value)
  132. return value
  133. def convert_datefield_value(self, value, expression, connection, context):
  134. if isinstance(value, six.text_type):
  135. internal_type = expression.output_field.get_internal_type()
  136. if internal_type in ('DateField', 'NewDateField', 'LegacyDateField'):
  137. value = self._convert_values_map[internal_type].to_python(value)
  138. elif isinstance(value, datetime.datetime):
  139. return value.date()
  140. return value
  141. def convert_datetimefield_value(self, value, expression, connection, context):
  142. if isinstance(value, six.text_type):
  143. internal_type = expression.output_field.get_internal_type()
  144. if internal_type in ('DateTimeField', 'NewDateTimeField', 'LegacyDateField'):
  145. value = self._convert_values_map[internal_type].to_python(value)
  146. return value
  147. def convert_datetimeoffsetfield_value(self, value, expression, connection, context):
  148. if isinstance(value, six.text_type):
  149. internal_type = expression.output_field.get_internal_type()
  150. if internal_type == 'DateTimeOffsetField':
  151. value = self._convert_values_map[internal_type].to_python(value)
  152. return value
  153. def convert_timefield_value(self, value, expression, connection, context):
  154. if isinstance(value, six.text_type):
  155. internal_type = expression.output_field.get_internal_type()
  156. if internal_type in ('TimeField', 'NewTimeField', 'LegacyTimeField'):
  157. value = self._convert_values_map[internal_type].to_python(value)
  158. elif isinstance(value, datetime.datetime):
  159. return value.time()
  160. return value
  161. def convert_uuidfield_value(self, value, expression, connection, context):
  162. if expression.output_field.get_internal_type() == 'UUIDField':
  163. if isinstance(value, six.string_types):
  164. value = uuid.UUID(value.replace('-', ''))
  165. return value
  166. def convert_textfield_value(self, value, expression, connection, context):
  167. if value is not None:
  168. value = force_text(value)
  169. return value
  170. def last_insert_id(self, cursor, table_name, pk_name):
  171. """
  172. Fetch the last inserted ID by executing another query.
  173. """
  174. # IDENT_CURRENT returns the last identity value generated for a
  175. # specific table in any session and any scope.
  176. # http://msdn.microsoft.com/en-us/library/ms175098.aspx
  177. cursor.execute("SELECT CAST(IDENT_CURRENT(%s) as bigint)", [self.quote_name(table_name)])
  178. return cursor.fetchone()[0]
  179. def return_insert_id(self):
  180. """
  181. MSSQL implements the RETURNING SQL standard extension differently from
  182. the core database backends and this function is essentially a no-op.
  183. The SQL is altered in the SQLInsertCompiler to add the necessary OUTPUT
  184. clause. Return None, None to bypass the core's SQL mangling.
  185. """
  186. return (None, None)
  187. def no_limit_value(self):
  188. return None
  189. def prep_for_like_query(self, x):
  190. """Prepares a value for use in a LIKE query."""
  191. return (
  192. smart_text(x).
  193. replace("\\", "\\\\").
  194. replace("%", "\%").
  195. replace("_", "\_").
  196. replace("[", "\[").
  197. replace("]", "\]")
  198. )
  199. def quote_name(self, name):
  200. if name.startswith('[') and name.endswith(']'):
  201. return name # already quoted
  202. return '[%s]' % name
  203. def random_function_sql(self):
  204. return 'NEWID()'
  205. def regex_lookup(self, lookup_type):
  206. # Case sensitivity
  207. match_option = {'iregex': 0, 'regex': 1}[lookup_type]
  208. return "dbo.REGEXP_LIKE(%%s, %%s, %s)=1" % (match_option,)
  209. def sql_flush(self, style, tables, sequences, allow_cascade=False):
  210. """
  211. Returns a list of SQL statements required to remove all data from
  212. the given database tables (without actually removing the tables
  213. themselves).
  214. The `style` argument is a Style object as returned by either
  215. color_style() or no_style() in django.core.management.color.
  216. Originally taken from django-pyodbc project.
  217. """
  218. if not tables:
  219. return list()
  220. # Cannot use TRUNCATE on tables that are referenced by a FOREIGN KEY; use DELETE instead.
  221. # (which is slow)
  222. with self.connection.cursor() as cursor:
  223. # Try to minimize the risks of the braindeaded inconsistency in
  224. # DBCC CHEKIDENT(table, RESEED, n) behavior.
  225. seqs = []
  226. for seq in sequences:
  227. cursor.execute("SELECT COUNT(*) FROM %s" % self.quote_name(seq["table"]))
  228. rowcnt = cursor.fetchone()[0]
  229. elem = dict()
  230. if rowcnt:
  231. elem['start_id'] = 0
  232. else:
  233. elem['start_id'] = 1
  234. elem.update(seq)
  235. seqs.append(elem)
  236. sql_list = list()
  237. # Turn off constraints.
  238. sql_list.append('EXEC sp_MSforeachtable "ALTER TABLE ? NOCHECK CONSTRAINT all"')
  239. # Delete data from tables.
  240. sql_list.extend(
  241. ['%s %s %s;' % (
  242. style.SQL_KEYWORD('DELETE'),
  243. style.SQL_KEYWORD('FROM'),
  244. style.SQL_FIELD(self.quote_name(t))
  245. ) for t in tables]
  246. )
  247. # Reset the counters on each table.
  248. sql_list.extend(
  249. ['%s %s (%s, %s, %s) %s %s;' % (
  250. style.SQL_KEYWORD('DBCC'),
  251. style.SQL_KEYWORD('CHECKIDENT'),
  252. style.SQL_FIELD(self.quote_name(seq["table"])),
  253. style.SQL_KEYWORD('RESEED'),
  254. style.SQL_FIELD('%d' % seq['start_id']),
  255. style.SQL_KEYWORD('WITH'),
  256. style.SQL_KEYWORD('NO_INFOMSGS'),
  257. ) for seq in seqs]
  258. )
  259. # Turn constraints back on.
  260. sql_list.append('EXEC sp_MSforeachtable "ALTER TABLE ? WITH NOCHECK CHECK CONSTRAINT all"')
  261. return sql_list
  262. def tablespace_sql(self, tablespace, inline=False):
  263. return "ON %s" % self.quote_name(tablespace)
  264. def __to_truncated_datetime_string(self, value):
  265. """
  266. Format a datetime to a internationalize string parsable by either a
  267. 'datetime' or 'datetime2'.
  268. """
  269. if isinstance(value, datetime.datetime):
  270. # Strip '-' so SQL Server parses as YYYYMMDD for all languages/formats
  271. val = value.isoformat(str(' ')).replace('-', '')
  272. if value.microsecond:
  273. # truncate to millisecond so SQL's 'datetime' can parse it
  274. idx = val.rindex('.')
  275. val = val[:idx + 4] + val[idx + 7:]
  276. return val
  277. raise TypeError("'value' must be a date or datetime")
  278. def _legacy_value_to_db_datetime(self, value):
  279. if value is None or isinstance(value, six.string_types):
  280. return value
  281. if timezone.is_aware(value):# and not self.connection.features.supports_timezones:
  282. if getattr(settings, 'USE_TZ', False):
  283. value = value.astimezone(timezone.utc).replace(tzinfo=None)
  284. else:
  285. raise ValueError("SQL Server backend does not support timezone-aware datetimes.")
  286. val = self.__to_truncated_datetime_string(value)
  287. return val
  288. def _new_value_to_db_datetime(self, value):
  289. if value is None or isinstance(value, six.string_types):
  290. return value
  291. if timezone.is_aware(value):# and not self.connection.features.supports_timezones:
  292. if getattr(settings, 'USE_TZ', False):
  293. value = value.astimezone(timezone.utc).replace(tzinfo=None)
  294. else:
  295. raise ValueError("SQL Server backend does not support timezone-aware datetimes.")
  296. return value.isoformat()
  297. def _legacy_value_to_db_time(self, value):
  298. if value is None or isinstance(value, six.string_types):
  299. return value
  300. if timezone.is_aware(value):
  301. if not getattr(settings, 'USE_TZ', False) and hasattr(value, 'astimezone'):
  302. value = timezone.make_naive(value, timezone.utc)
  303. else:
  304. raise ValueError("SQL Server backend does not support timezone-aware times.")
  305. val = value.isoformat()
  306. if value.microsecond:
  307. # truncate to millisecond so SQL's 'datetime' can parse it
  308. idx = val.rindex('.')
  309. val = val[:idx + 4] + val[idx + 7:]
  310. return val
  311. def _new_value_to_db_time(self, value):
  312. if value is None or isinstance(value, six.string_types):
  313. return value
  314. if timezone.is_aware(value):
  315. if not getattr(settings, 'USE_TZ', False) and hasattr(value, 'astimezone'):
  316. value = timezone.make_naive(value, timezone.utc)
  317. else:
  318. raise ValueError("SQL Server backend does not support timezone-aware times.")
  319. return value.isoformat()
  320. def value_to_db_decimal(self, value, max_digits, decimal_places):
  321. if value is None or value == '':
  322. return None
  323. return value # Should be a decimal type (or string)
  324. def year_lookup_bounds_for_date_field(self, value):
  325. """
  326. Returns a two-elements list with the lower and upper bound to be used
  327. with a BETWEEN operator to query a DateField value using a year
  328. lookup.
  329. `value` is an int, containing the looked-up year.
  330. """
  331. first = self.value_to_db_date(datetime.date(value, 1, 1))
  332. second = self.value_to_db_date(datetime.date(value, 12, 31))
  333. return [first, second]
  334. def year_lookup_bounds_for_datetime_field(self, value):
  335. """
  336. Returns a two-elements list with the lower and upper bound to be used
  337. with a BETWEEN operator to query a field value using a year lookup
  338. `value` is an int, containing the looked-up year.
  339. """
  340. first = datetime.datetime(value, 1, 1)
  341. second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999)
  342. if settings.USE_TZ:
  343. tz = timezone.get_current_timezone()
  344. first = timezone.make_aware(first, tz)
  345. second = timezone.make_aware(second, tz)
  346. return [self.value_to_db_datetime(first), self.value_to_db_datetime(second)]
  347. def convert_values(self, value, field):
  348. """
  349. MSSQL needs help with date fields that might come out as strings.
  350. """
  351. if field:
  352. internal_type = field.get_internal_type()
  353. if internal_type in self._convert_values_map:
  354. value = self._convert_values_map[internal_type].to_python(value)
  355. else:
  356. value = super(DatabaseOperations, self).convert_values(value, field)
  357. return value
  358. if django.VERSION >= (1, 9, 0):
  359. def bulk_insert_sql(self, fields, placeholder_rows):
  360. placeholder_rows_sql = (", ".join(row) for row in placeholder_rows)
  361. values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql)
  362. return "VALUES " + values_sql
  363. else:
  364. def bulk_insert_sql(self, fields, num_values):
  365. """
  366. Format the SQL for bulk insert
  367. """
  368. items_sql = "(%s)" % ", ".join(["%s"] * len(fields))
  369. return "VALUES " + ", ".join([items_sql] * num_values)
  370. def max_name_length(self):
  371. """
  372. MSSQL supports identifier names up to 128
  373. """
  374. return 128
  375. def _supports_stddev(self):
  376. """
  377. Work around for django ticket #18334.
  378. This backend supports StdDev and the SQLCompilers will remap to
  379. the correct function names.
  380. """
  381. return True
  382. def enable_identity_insert(self, table):
  383. """
  384. Backends can implement as needed to enable inserts in to
  385. the identity column.
  386. Should return True if identity inserts have been enabled.
  387. """
  388. if table:
  389. cursor = self.connection.cursor()
  390. cursor.execute('SET IDENTITY_INSERT {0} ON'.format(
  391. self.connection.ops.quote_name(table)
  392. ))
  393. return True
  394. return False
  395. def disable_identity_insert(self, table):
  396. """
  397. Backends can implement as needed to disable inserts in to
  398. the identity column.
  399. Should return True if identity inserts have been disabled.
  400. """
  401. if table:
  402. cursor = self.connection.cursor()
  403. cursor.execute('SET IDENTITY_INSERT {0} OFF'.format(
  404. self.connection.ops.quote_name(table)
  405. ))
  406. return True
  407. return False
  408. def savepoint_create_sql(self, sid):
  409. return "SAVE TRANSACTION {0}".format(self.quote_name(sid))
  410. def savepoint_rollback_sql(self, sid):
  411. return "ROLLBACK TRANSACTION {0}".format(self.quote_name(sid))
  412. def combine_expression(self, connector, sub_expressions):
  413. """
  414. MSSQL requires special cases for ^ operators in query expressions
  415. """
  416. if connector == '^':
  417. return 'POWER(%s)' % ','.join(sub_expressions)
  418. return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
  419. def combine_duration_expression(self, connector, sub_expressions):
  420. if connector not in ['+', '-']:
  421. raise utils.DatabaseError('Invalid connector for timedelta: %s.' % connector)
  422. # print 'combine_duration_expression', connector, [repr(x) for x in sub_expressions]
  423. seconds = False
  424. microseconds = False
  425. sign = 1 if connector == '+' else -1
  426. sql, timedelta = sub_expressions
  427. if isinstance(sql, datetime.timedelta):
  428. # normalize to sql + duration
  429. sql, timedelta = timedelta, sql
  430. if isinstance(timedelta, datetime.timedelta):
  431. seconds = ((timedelta.days * 86400) + timedelta.seconds) * sign
  432. microseconds = timedelta.microseconds * sign
  433. if isinstance(timedelta, six.text_type):
  434. seconds = "({} / 1000000)".format(timedelta)
  435. # Need to fix %% escaping down through dbapi
  436. microseconds = "({} %% 1000000)".format(timedelta)
  437. if sign == -1:
  438. microseconds = "(-1 * {})".format(microseconds)
  439. out = sql
  440. if seconds:
  441. out = 'DATEADD(SECOND, {}, CAST({} as datetime2))'.format(seconds, sql)
  442. if microseconds:
  443. # DATEADD with datetime doesn't support ms, must cast up
  444. out = 'DATEADD(MICROSECOND, {ms}, CAST({sql} as datetime2))'.format(
  445. ms=microseconds,
  446. sql=out,
  447. )
  448. return out
  449. # return self.combine_expression(connector, sub_expressions)
  450. def format_for_duration_arithmetic(self, sql):
  451. """Do nothing here, we will handle it in the custom function."""
  452. return sql
  453. def bulk_batch_size(self, fields, objs):
  454. """
  455. Returns the maximum allowed batch size for the backend. The fields
  456. are the fields going to be inserted in the batch, the objs contains
  457. all the objects to be inserted.
  458. """
  459. return min(len(objs), 2100 // len(fields), 1000)