1from __future__ import annotations
2
3import datetime
4import decimal
5import json
6from abc import ABC, abstractmethod
7from collections.abc import Iterable
8from importlib import import_module
9from typing import TYPE_CHECKING, Any
10
11import sqlparse
12
13from plain.models.backends import utils
14from plain.models.db import NotSupportedError
15from plain.utils import timezone
16from plain.utils.encoding import force_str
17
18if TYPE_CHECKING:
19 from types import ModuleType
20
21 from plain.models.backends.base.base import BaseDatabaseWrapper
22 from plain.models.fields import Field
23
24
25class BaseDatabaseOperations(ABC):
26 """
27 Encapsulate backend-specific differences, such as the way a backend
28 performs ordering or calculates the ID of a recently-inserted row.
29 """
30
31 compiler_module: str = "plain.models.sql.compiler"
32
33 # Integer field safe ranges by `internal_type` as documented
34 # in docs/ref/models/fields.txt.
35 integer_field_ranges: dict[str, tuple[int, int]] = {
36 "SmallIntegerField": (-32768, 32767),
37 "IntegerField": (-2147483648, 2147483647),
38 "BigIntegerField": (-9223372036854775808, 9223372036854775807),
39 "PositiveBigIntegerField": (0, 9223372036854775807),
40 "PositiveSmallIntegerField": (0, 32767),
41 "PositiveIntegerField": (0, 2147483647),
42 "PrimaryKeyField": (-9223372036854775808, 9223372036854775807),
43 }
44 set_operators: dict[str, str] = {
45 "union": "UNION",
46 "intersection": "INTERSECT",
47 "difference": "EXCEPT",
48 }
49 # Mapping of Field.get_internal_type() (typically the model field's class
50 # name) to the data type to use for the Cast() function, if different from
51 # DatabaseWrapper.data_types.
52 cast_data_types: dict[str, str] = {}
53 # CharField data type if the max_length argument isn't provided.
54 cast_char_field_without_max_length: str | None = None
55
56 # Start and end points for window expressions.
57 PRECEDING: str = "PRECEDING"
58 FOLLOWING: str = "FOLLOWING"
59 UNBOUNDED_PRECEDING: str = "UNBOUNDED " + PRECEDING
60 UNBOUNDED_FOLLOWING: str = "UNBOUNDED " + FOLLOWING
61 CURRENT_ROW: str = "CURRENT ROW"
62
63 # Prefix for EXPLAIN queries, or None EXPLAIN isn't supported.
64 explain_prefix: str | None = None
65
66 def __init__(self, connection: BaseDatabaseWrapper):
67 self.connection = connection
68 self._cache: ModuleType | None = None
69
70 def autoinc_sql(self, table: str, column: str) -> str | None:
71 """
72 Return any SQL needed to support auto-incrementing primary keys, or
73 None if no SQL is necessary.
74
75 This SQL is executed when a table is created.
76 """
77 return None
78
79 def bulk_batch_size(self, fields: list[Field], objs: list[Any]) -> int:
80 """
81 Return the maximum allowed batch size for the backend. The fields
82 are the fields going to be inserted in the batch, the objs contains
83 all the objects to be inserted.
84 """
85 return len(objs)
86
87 def format_for_duration_arithmetic(self, sql: str) -> str:
88 raise NotImplementedError(
89 "subclasses of BaseDatabaseOperations may require a "
90 "format_for_duration_arithmetic() method."
91 )
92
93 def unification_cast_sql(self, output_field: Field) -> str:
94 """
95 Given a field instance, return the SQL that casts the result of a union
96 to that type. The resulting string should contain a '%s' placeholder
97 for the expression being cast.
98 """
99 return "%s"
100
101 @abstractmethod
102 def date_extract_sql(
103 self, lookup_type: str, sql: str, params: list[Any] | tuple[Any, ...]
104 ) -> tuple[str, list[Any] | tuple[Any, ...]]:
105 """
106 Given a lookup_type of 'year', 'month', or 'day', return the SQL that
107 extracts a value from the given date field field_name.
108 """
109 ...
110
111 @abstractmethod
112 def date_trunc_sql(
113 self,
114 lookup_type: str,
115 sql: str,
116 params: list[Any] | tuple[Any, ...],
117 tzname: str | None = None,
118 ) -> tuple[str, list[Any] | tuple[Any, ...]]:
119 """
120 Given a lookup_type of 'year', 'month', or 'day', return the SQL that
121 truncates the given date or datetime field field_name to a date object
122 with only the given specificity.
123
124 If `tzname` is provided, the given value is truncated in a specific
125 timezone.
126 """
127 ...
128
129 @abstractmethod
130 def datetime_cast_date_sql(
131 self, sql: str, params: list[Any] | tuple[Any, ...], tzname: str | None
132 ) -> tuple[str, list[Any] | tuple[Any, ...]]:
133 """
134 Return the SQL to cast a datetime value to date value.
135 """
136 ...
137
138 @abstractmethod
139 def datetime_cast_time_sql(
140 self, sql: str, params: list[Any] | tuple[Any, ...], tzname: str | None
141 ) -> tuple[str, list[Any] | tuple[Any, ...]]:
142 """
143 Return the SQL to cast a datetime value to time value.
144 """
145 ...
146
147 @abstractmethod
148 def datetime_extract_sql(
149 self,
150 lookup_type: str,
151 sql: str,
152 params: list[Any] | tuple[Any, ...],
153 tzname: str | None,
154 ) -> tuple[str, list[Any] | tuple[Any, ...]]:
155 """
156 Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
157 'second', return the SQL that extracts a value from the given
158 datetime field field_name.
159 """
160 ...
161
162 @abstractmethod
163 def datetime_trunc_sql(
164 self,
165 lookup_type: str,
166 sql: str,
167 params: list[Any] | tuple[Any, ...],
168 tzname: str | None,
169 ) -> tuple[str, list[Any] | tuple[Any, ...]]:
170 """
171 Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
172 'second', return the SQL that truncates the given datetime field
173 field_name to a datetime object with only the given specificity.
174 """
175 ...
176
177 @abstractmethod
178 def time_trunc_sql(
179 self,
180 lookup_type: str,
181 sql: str,
182 params: list[Any] | tuple[Any, ...],
183 tzname: str | None = None,
184 ) -> tuple[str, list[Any] | tuple[Any, ...]]:
185 """
186 Given a lookup_type of 'hour', 'minute' or 'second', return the SQL
187 that truncates the given time or datetime field field_name to a time
188 object with only the given specificity.
189
190 If `tzname` is provided, the given value is truncated in a specific
191 timezone.
192 """
193 ...
194
195 def time_extract_sql(
196 self, lookup_type: str, sql: str, params: list[Any] | tuple[Any, ...]
197 ) -> tuple[str, list[Any] | tuple[Any, ...]]:
198 """
199 Given a lookup_type of 'hour', 'minute', or 'second', return the SQL
200 that extracts a value from the given time field field_name.
201 """
202 return self.date_extract_sql(lookup_type, sql, params)
203
204 def deferrable_sql(self) -> str:
205 """
206 Return the SQL to make a constraint "initially deferred" during a
207 CREATE TABLE statement.
208 """
209 return ""
210
211 def distinct_sql(
212 self, fields: list[str], params: list[Any] | tuple[Any, ...]
213 ) -> tuple[list[str], list[Any]]:
214 """
215 Return an SQL DISTINCT clause which removes duplicate rows from the
216 result set. If any fields are given, only check the given fields for
217 duplicates.
218 """
219 if fields:
220 raise NotSupportedError(
221 "DISTINCT ON fields is not supported by this database backend"
222 )
223 else:
224 return ["DISTINCT"], []
225
226 def fetch_returned_insert_columns(self, cursor: Any, returning_params: Any) -> Any:
227 """
228 Given a cursor object that has just performed an INSERT...RETURNING
229 statement into a table, return the newly created data.
230 """
231 return cursor.fetchone()
232
233 def field_cast_sql(self, db_type: str | None, internal_type: str) -> str:
234 """
235 Given a column type (e.g. 'BLOB', 'VARCHAR') and an internal type
236 (e.g. 'GenericIPAddressField'), return the SQL to cast it before using
237 it in a WHERE statement. The resulting string should contain a '%s'
238 placeholder for the column being searched against.
239 """
240 return "%s"
241
242 def force_no_ordering(self) -> list[str]:
243 """
244 Return a list used in the "ORDER BY" clause to force no ordering at
245 all. Return an empty list to include nothing in the ordering.
246 """
247 return []
248
249 def for_update_sql(
250 self,
251 nowait: bool = False,
252 skip_locked: bool = False,
253 of: tuple[str, ...] = (),
254 no_key: bool = False,
255 ) -> str:
256 """
257 Return the FOR UPDATE SQL clause to lock rows for an update operation.
258 """
259 return "FOR{} UPDATE{}{}{}".format(
260 " NO KEY" if no_key else "",
261 " OF {}".format(", ".join(of)) if of else "",
262 " NOWAIT" if nowait else "",
263 " SKIP LOCKED" if skip_locked else "",
264 )
265
266 def _get_limit_offset_params(
267 self, low_mark: int | None, high_mark: int | None
268 ) -> tuple[int | None, int]:
269 offset = low_mark or 0
270 if high_mark is not None:
271 return (high_mark - offset), offset
272 elif offset:
273 return self.connection.ops.no_limit_value(), offset
274 return None, offset
275
276 def limit_offset_sql(self, low_mark: int | None, high_mark: int | None) -> str:
277 """Return LIMIT/OFFSET SQL clause."""
278 limit, offset = self._get_limit_offset_params(low_mark, high_mark)
279 return " ".join(
280 sql
281 for sql in (
282 ("LIMIT %d" % limit) if limit else None, # noqa: UP031
283 ("OFFSET %d" % offset) if offset else None, # noqa: UP031
284 )
285 if sql
286 )
287
288 def last_executed_query(
289 self,
290 cursor: Any,
291 sql: str,
292 params: list[Any] | tuple[Any, ...] | dict[str, Any] | None,
293 ) -> str:
294 """
295 Return a string of the query last executed by the given cursor, with
296 placeholders replaced with actual values.
297
298 `sql` is the raw query containing placeholders and `params` is the
299 sequence of parameters. These are used by default, but this method
300 exists for database backends to provide a better implementation
301 according to their own quoting schemes.
302 """
303
304 # Convert params to contain string values.
305 def to_string(s: Any) -> str:
306 return force_str(s, strings_only=True, errors="replace")
307
308 u_params: tuple[str, ...] | dict[str, str]
309 if isinstance(params, (list, tuple)): # noqa: UP038
310 u_params = tuple(to_string(val) for val in params)
311 elif params is None:
312 u_params = ()
313 else:
314 u_params = {to_string(k): to_string(v) for k, v in params.items()} # type: ignore[union-attr]
315
316 return f"QUERY = {sql!r} - PARAMS = {u_params!r}"
317
318 def last_insert_id(self, cursor: Any, table_name: str, pk_name: str) -> int:
319 """
320 Given a cursor object that has just performed an INSERT statement into
321 a table that has an auto-incrementing ID, return the newly created ID.
322
323 `pk_name` is the name of the primary-key column.
324 """
325 return cursor.lastrowid
326
327 def lookup_cast(self, lookup_type: str, internal_type: str | None = None) -> str:
328 """
329 Return the string to use in a query when performing lookups
330 ("contains", "like", etc.). It should contain a '%s' placeholder for
331 the column being searched against.
332 """
333 return "%s"
334
335 def max_in_list_size(self) -> int | None:
336 """
337 Return the maximum number of items that can be passed in a single 'IN'
338 list condition, or None if the backend does not impose a limit.
339 """
340 return None
341
342 def max_name_length(self) -> int | None:
343 """
344 Return the maximum length of table and column names, or None if there
345 is no limit.
346 """
347 return None
348
349 @abstractmethod
350 def no_limit_value(self) -> int | None:
351 """
352 Return the value to use for the LIMIT when we are wanting "LIMIT
353 infinity". Return None if the limit clause can be omitted in this case.
354 """
355 ...
356
357 def pk_default_value(self) -> str:
358 """
359 Return the value to use during an INSERT statement to specify that
360 the field should use its default value.
361 """
362 return "DEFAULT"
363
364 def prepare_sql_script(self, sql: str) -> list[str]:
365 """
366 Take an SQL script that may contain multiple lines and return a list
367 of statements to feed to successive cursor.execute() calls.
368
369 Since few databases are able to process raw SQL scripts in a single
370 cursor.execute() call and PEP 249 doesn't talk about this use case,
371 the default implementation is conservative.
372 """
373 return [
374 sqlparse.format(statement, strip_comments=True)
375 for statement in sqlparse.split(sql)
376 if statement
377 ]
378
379 def return_insert_columns(
380 self, fields: list[Field]
381 ) -> tuple[str, list[Any]] | None:
382 """
383 For backends that support returning columns as part of an insert query,
384 return the SQL and params to append to the INSERT query. The returned
385 fragment should contain a format string to hold the appropriate column.
386 """
387 return None
388
389 @abstractmethod
390 def bulk_insert_sql(
391 self, fields: list[Field], placeholder_rows: list[list[str]]
392 ) -> str:
393 """
394 Return the SQL for bulk inserting rows.
395 """
396 ...
397
398 @abstractmethod
399 def fetch_returned_insert_rows(self, cursor: Any) -> list[Any]:
400 """
401 Given a cursor object that has just performed an INSERT...RETURNING
402 statement into a table, return the list of returned data.
403 """
404 ...
405
406 def compiler(self, compiler_name: str) -> type[Any]:
407 """
408 Return the SQLCompiler class corresponding to the given name,
409 in the namespace corresponding to the `compiler_module` attribute
410 on this backend.
411 """
412 if self._cache is None:
413 self._cache = import_module(self.compiler_module)
414 return getattr(self._cache, compiler_name)
415
416 @abstractmethod
417 def quote_name(self, name: str) -> str:
418 """
419 Return a quoted version of the given table, index, or column name. Do
420 not quote the given name if it's already been quoted.
421 """
422 ...
423
424 def regex_lookup(self, lookup_type: str) -> str:
425 """
426 Return the string to use in a query when performing regular expression
427 lookups (using "regex" or "iregex"). It should contain a '%s'
428 placeholder for the column being searched against.
429
430 If the feature is not supported (or part of it is not supported), raise
431 NotImplementedError.
432 """
433 raise NotImplementedError(
434 "subclasses of BaseDatabaseOperations may require a regex_lookup() method"
435 )
436
437 def savepoint_create_sql(self, sid: str) -> str:
438 """
439 Return the SQL for starting a new savepoint. Only required if the
440 "uses_savepoints" feature is True. The "sid" parameter is a string
441 for the savepoint id.
442 """
443 return f"SAVEPOINT {self.quote_name(sid)}"
444
445 def savepoint_commit_sql(self, sid: str) -> str:
446 """
447 Return the SQL for committing the given savepoint.
448 """
449 return f"RELEASE SAVEPOINT {self.quote_name(sid)}"
450
451 def savepoint_rollback_sql(self, sid: str) -> str:
452 """
453 Return the SQL for rolling back the given savepoint.
454 """
455 return f"ROLLBACK TO SAVEPOINT {self.quote_name(sid)}"
456
457 def set_time_zone_sql(self) -> str:
458 """
459 Return the SQL that will set the connection's time zone.
460
461 Return '' if the backend doesn't support time zones.
462 """
463 return ""
464
465 def prep_for_like_query(self, x: str) -> str:
466 """Prepare a value for use in a LIKE query."""
467 return str(x).replace("\\", "\\\\").replace("%", r"\%").replace("_", r"\_")
468
469 # Same as prep_for_like_query(), but called for "iexact" matches, which
470 # need not necessarily be implemented using "LIKE" in the backend.
471 prep_for_iexact_query = prep_for_like_query
472
473 def validate_autopk_value(self, value: int) -> int:
474 """
475 Certain backends do not accept some values for "serial" fields
476 (for example zero in MySQL). Raise a ValueError if the value is
477 invalid, otherwise return the validated value.
478 """
479 return value
480
481 def adapt_unknown_value(self, value: Any) -> Any:
482 """
483 Transform a value to something compatible with the backend driver.
484
485 This method only depends on the type of the value. It's designed for
486 cases where the target type isn't known, such as .raw() SQL queries.
487 As a consequence it may not work perfectly in all circumstances.
488 """
489 if isinstance(value, datetime.datetime): # must be before date
490 return self.adapt_datetimefield_value(value)
491 elif isinstance(value, datetime.date):
492 return self.adapt_datefield_value(value)
493 elif isinstance(value, datetime.time):
494 return self.adapt_timefield_value(value)
495 elif isinstance(value, decimal.Decimal):
496 return self.adapt_decimalfield_value(value)
497 else:
498 return value
499
500 def adapt_integerfield_value(
501 self, value: int | None, internal_type: str
502 ) -> int | None:
503 return value
504
505 def adapt_datefield_value(self, value: datetime.date | None) -> str | None:
506 """
507 Transform a date value to an object compatible with what is expected
508 by the backend driver for date columns.
509 """
510 if value is None:
511 return None
512 return str(value)
513
514 def adapt_datetimefield_value(
515 self, value: datetime.datetime | Any | None
516 ) -> str | Any | None:
517 """
518 Transform a datetime value to an object compatible with what is expected
519 by the backend driver for datetime columns.
520 """
521 if value is None:
522 return None
523 # Expression values are adapted by the database.
524 if hasattr(value, "resolve_expression"):
525 return value
526
527 return str(value)
528
529 def adapt_timefield_value(
530 self, value: datetime.time | Any | None
531 ) -> str | Any | None:
532 """
533 Transform a time value to an object compatible with what is expected
534 by the backend driver for time columns.
535 """
536 if value is None:
537 return None
538 # Expression values are adapted by the database.
539 if hasattr(value, "resolve_expression"):
540 return value
541
542 if timezone.is_aware(value): # type: ignore[arg-type]
543 raise ValueError("Plain does not support timezone-aware times.")
544 return str(value)
545
546 def adapt_decimalfield_value(
547 self,
548 value: decimal.Decimal | None,
549 max_digits: int | None = None,
550 decimal_places: int | None = None,
551 ) -> str | None:
552 """
553 Transform a decimal.Decimal value to an object compatible with what is
554 expected by the backend driver for decimal (numeric) columns.
555 """
556 return utils.format_number(value, max_digits, decimal_places)
557
558 def adapt_ipaddressfield_value(self, value: str | None) -> str | None:
559 """
560 Transform a string representation of an IP address into the expected
561 type for the backend driver.
562 """
563 return value or None
564
565 def adapt_json_value(
566 self, value: Any, encoder: type[json.JSONEncoder] | None
567 ) -> str:
568 return json.dumps(value, cls=encoder)
569
570 def year_lookup_bounds_for_date_field(
571 self, value: int, iso_year: bool = False
572 ) -> list[str | None]:
573 """
574 Return a two-elements list with the lower and upper bound to be used
575 with a BETWEEN operator to query a DateField value using a year
576 lookup.
577
578 `value` is an int, containing the looked-up year.
579 If `iso_year` is True, return bounds for ISO-8601 week-numbering years.
580 """
581 if iso_year:
582 first = datetime.date.fromisocalendar(value, 1, 1)
583 second = datetime.date.fromisocalendar(
584 value + 1, 1, 1
585 ) - datetime.timedelta(days=1)
586 else:
587 first = datetime.date(value, 1, 1)
588 second = datetime.date(value, 12, 31)
589 first_adapted = self.adapt_datefield_value(first)
590 second_adapted = self.adapt_datefield_value(second)
591 return [first_adapted, second_adapted]
592
593 def year_lookup_bounds_for_datetime_field(
594 self, value: int, iso_year: bool = False
595 ) -> list[str | Any | None]:
596 """
597 Return a two-elements list with the lower and upper bound to be used
598 with a BETWEEN operator to query a DateTimeField value using a year
599 lookup.
600
601 `value` is an int, containing the looked-up year.
602 If `iso_year` is True, return bounds for ISO-8601 week-numbering years.
603 """
604 if iso_year:
605 first = datetime.datetime.fromisocalendar(value, 1, 1)
606 second = datetime.datetime.fromisocalendar(
607 value + 1, 1, 1
608 ) - datetime.timedelta(microseconds=1)
609 else:
610 first = datetime.datetime(value, 1, 1)
611 second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999)
612
613 # Make sure that datetimes are aware in the current timezone
614 tz = timezone.get_current_timezone()
615 first = timezone.make_aware(first, tz)
616 second = timezone.make_aware(second, tz)
617
618 first_adapted = self.adapt_datetimefield_value(first)
619 second_adapted = self.adapt_datetimefield_value(second)
620 return [first_adapted, second_adapted]
621
622 def get_db_converters(self, expression: Any) -> list[Any]:
623 """
624 Return a list of functions needed to convert field data.
625
626 Some field types on some backends do not provide data in the correct
627 format, this is the hook for converter functions.
628 """
629 return []
630
631 def convert_durationfield_value(
632 self, value: int | None, expression: Any, connection: BaseDatabaseWrapper
633 ) -> datetime.timedelta | None:
634 if value is not None:
635 return datetime.timedelta(0, 0, value)
636 return None
637
638 def check_expression_support(self, expression: Any) -> None:
639 """
640 Check that the backend supports the provided expression.
641
642 This is used on specific backends to rule out known expressions
643 that have problematic or nonexistent implementations. If the
644 expression has a known problem, the backend should raise
645 NotSupportedError.
646 """
647 return None
648
649 def conditional_expression_supported_in_where_clause(self, expression: Any) -> bool:
650 """
651 Return True, if the conditional expression is supported in the WHERE
652 clause.
653 """
654 return True
655
656 def combine_expression(self, connector: str, sub_expressions: list[str]) -> str:
657 """
658 Combine a list of subexpressions into a single expression, using
659 the provided connecting operator. This is required because operators
660 can vary between backends (e.g., Oracle with %% and &) and between
661 subexpression types (e.g., date expressions).
662 """
663 conn = f" {connector} "
664 return conn.join(sub_expressions)
665
666 def combine_duration_expression(
667 self, connector: str, sub_expressions: list[str]
668 ) -> str:
669 return self.combine_expression(connector, sub_expressions)
670
671 def binary_placeholder_sql(self, value: Any) -> str:
672 """
673 Some backends require special syntax to insert binary content (MySQL
674 for example uses '_binary %s').
675 """
676 return "%s"
677
678 def modify_insert_params(
679 self, placeholder: str, params: list[Any] | tuple[Any, ...]
680 ) -> list[Any] | tuple[Any, ...]:
681 """
682 Allow modification of insert parameters. Needed for Oracle Spatial
683 backend due to #10888.
684 """
685 return params
686
687 def integer_field_range(self, internal_type: str) -> tuple[int, int]:
688 """
689 Given an integer field internal type (e.g. 'PositiveIntegerField'),
690 return a tuple of the (min_value, max_value) form representing the
691 range of the column type bound to the field.
692 """
693 return self.integer_field_ranges[internal_type]
694
695 def subtract_temporals(
696 self,
697 internal_type: str,
698 lhs: tuple[str, list[Any] | tuple[Any, ...]],
699 rhs: tuple[str, list[Any] | tuple[Any, ...]],
700 ) -> tuple[str, tuple[Any, ...]]:
701 if self.connection.features.supports_temporal_subtraction:
702 lhs_sql, lhs_params = lhs
703 rhs_sql, rhs_params = rhs
704 return f"({lhs_sql} - {rhs_sql})", (*lhs_params, *rhs_params)
705 raise NotSupportedError(
706 f"This backend does not support {internal_type} subtraction."
707 )
708
709 def window_frame_start(self, start: int | None) -> str:
710 if isinstance(start, int):
711 if start < 0:
712 return "%d %s" % (abs(start), self.PRECEDING) # noqa: UP031
713 elif start == 0:
714 return self.CURRENT_ROW
715 elif start is None:
716 return self.UNBOUNDED_PRECEDING
717 raise ValueError(
718 f"start argument must be a negative integer, zero, or None, but got '{start}'."
719 )
720
721 def window_frame_end(self, end: int | None) -> str:
722 if isinstance(end, int):
723 if end == 0:
724 return self.CURRENT_ROW
725 elif end > 0:
726 return "%d %s" % (end, self.FOLLOWING) # noqa: UP031
727 elif end is None:
728 return self.UNBOUNDED_FOLLOWING
729 raise ValueError(
730 f"end argument must be a positive integer, zero, or None, but got '{end}'."
731 )
732
733 def window_frame_rows_start_end(
734 self, start: int | None = None, end: int | None = None
735 ) -> tuple[str, str]:
736 """
737 Return SQL for start and end points in an OVER clause window frame.
738 """
739 if not self.connection.features.supports_over_clause:
740 raise NotSupportedError("This backend does not support window expressions.")
741 return self.window_frame_start(start), self.window_frame_end(end)
742
743 def window_frame_range_start_end(
744 self, start: int | None = None, end: int | None = None
745 ) -> tuple[str, str]:
746 start_, end_ = self.window_frame_rows_start_end(start, end)
747 features = self.connection.features
748 if features.only_supports_unbounded_with_preceding_and_following and (
749 (start and start < 0) or (end and end > 0)
750 ):
751 raise NotSupportedError(
752 f"{self.connection.display_name} only supports UNBOUNDED together with PRECEDING and "
753 "FOLLOWING."
754 )
755 return start_, end_
756
757 def explain_query_prefix(self, format: str | None = None, **options: Any) -> str:
758 if not self.connection.features.supports_explaining_query_execution:
759 raise NotSupportedError(
760 "This backend does not support explaining query execution."
761 )
762 if format:
763 supported_formats = self.connection.features.supported_explain_formats
764 normalized_format = format.upper()
765 if normalized_format not in supported_formats:
766 msg = f"{normalized_format} is not a recognized format."
767 if supported_formats:
768 msg += " Allowed formats: {}".format(
769 ", ".join(sorted(supported_formats))
770 )
771 else:
772 msg += (
773 f" {self.connection.display_name} does not support any formats."
774 )
775 raise ValueError(msg)
776 if options:
777 raise ValueError(
778 "Unknown options: {}".format(", ".join(sorted(options.keys())))
779 )
780 return self.explain_prefix # type: ignore[return-value]
781
782 def insert_statement(self, on_conflict: Any = None) -> str:
783 return "INSERT INTO"
784
785 def on_conflict_suffix_sql(
786 self,
787 fields: list[Field],
788 on_conflict: Any,
789 update_fields: Iterable[str],
790 unique_fields: Iterable[str],
791 ) -> str:
792 return ""