1from __future__ import annotations
2
3from collections.abc import Sequence
4from typing import TYPE_CHECKING, Any, NamedTuple
5
6import sqlparse
7from MySQLdb.constants import FIELD_TYPE
8
9from plain.models.backends.base.introspection import (
10 BaseDatabaseIntrospection,
11)
12from plain.models.backends.utils import CursorWrapper
13from plain.models.indexes import Index
14from plain.utils.datastructures import OrderedSet
15
16if TYPE_CHECKING:
17 from .base import MySQLDatabaseWrapper
18
19
20class FieldInfo(NamedTuple):
21 """MySQL-specific FieldInfo extending base with additional metadata."""
22
23 # Fields from BaseFieldInfo
24 name: str
25 type_code: Any
26 display_size: int | None
27 internal_size: int | None
28 precision: int | None
29 scale: int | None
30 null_ok: bool | None
31 default: Any
32 # MySQL-specific extensions
33 extra: str
34 is_unsigned: bool
35 has_json_constraint: bool
36
37
38class InfoLine(NamedTuple):
39 """Information about a column from MySQL's information schema."""
40
41 col_name: str
42 data_type: str
43 max_len: int | None
44 num_prec: int | None
45 num_scale: int | None
46 extra: str
47 column_default: Any
48 is_unsigned: bool
49
50
51class TableInfo(NamedTuple):
52 """MySQL-specific TableInfo."""
53
54 # Fields from BaseTableInfo
55 name: str
56 type: str
57
58
59class DatabaseIntrospection(BaseDatabaseIntrospection):
60 # Type hint: narrow connection type to MySQL-specific wrapper
61 connection: MySQLDatabaseWrapper
62
63 data_types_reverse = {
64 FIELD_TYPE.BLOB: "TextField",
65 FIELD_TYPE.CHAR: "CharField",
66 FIELD_TYPE.DECIMAL: "DecimalField",
67 FIELD_TYPE.NEWDECIMAL: "DecimalField",
68 FIELD_TYPE.DATE: "DateField",
69 FIELD_TYPE.DATETIME: "DateTimeField",
70 FIELD_TYPE.DOUBLE: "FloatField",
71 FIELD_TYPE.FLOAT: "FloatField",
72 FIELD_TYPE.INT24: "IntegerField",
73 FIELD_TYPE.JSON: "JSONField",
74 FIELD_TYPE.LONG: "IntegerField",
75 FIELD_TYPE.LONGLONG: "BigIntegerField",
76 FIELD_TYPE.SHORT: "SmallIntegerField",
77 FIELD_TYPE.STRING: "CharField",
78 FIELD_TYPE.TIME: "TimeField",
79 FIELD_TYPE.TIMESTAMP: "DateTimeField",
80 FIELD_TYPE.TINY: "IntegerField",
81 FIELD_TYPE.TINY_BLOB: "TextField",
82 FIELD_TYPE.MEDIUM_BLOB: "TextField",
83 FIELD_TYPE.LONG_BLOB: "TextField",
84 FIELD_TYPE.VAR_STRING: "CharField",
85 }
86
87 def get_field_type(self, data_type: Any, description: Any) -> str:
88 field_type = super().get_field_type(data_type, description)
89 if "auto_increment" in description.extra:
90 if field_type == "BigIntegerField":
91 return "PrimaryKeyField"
92 if description.is_unsigned:
93 if field_type == "BigIntegerField":
94 return "PositiveBigIntegerField"
95 elif field_type == "IntegerField":
96 return "PositiveIntegerField"
97 elif field_type == "SmallIntegerField":
98 return "PositiveSmallIntegerField"
99 # JSON data type is an alias for LONGTEXT in MariaDB, use check
100 # constraints clauses to introspect JSONField.
101 if description.has_json_constraint:
102 return "JSONField"
103 return field_type
104
105 def get_table_list(self, cursor: CursorWrapper) -> Sequence[TableInfo]:
106 """Return a list of table and view names in the current database."""
107 cursor.execute(
108 """
109 SELECT
110 table_name,
111 table_type
112 FROM information_schema.tables
113 WHERE table_schema = DATABASE()
114 """
115 )
116 return [
117 TableInfo(row[0], {"BASE TABLE": "t", "VIEW": "v"}.get(row[1], "t"))
118 for row in cursor.fetchall()
119 ]
120
121 def get_table_description(
122 self, cursor: CursorWrapper, table_name: str
123 ) -> Sequence[FieldInfo]:
124 """
125 Return a description of the table with the DB-API cursor.description
126 interface."
127 """
128 json_constraints: set[Any] = set()
129 if (
130 self.connection.mysql_is_mariadb
131 and self.connection.features.can_introspect_json_field
132 ):
133 # JSON data type is an alias for LONGTEXT in MariaDB, select
134 # JSON_VALID() constraints to introspect JSONField.
135 cursor.execute(
136 """
137 SELECT c.constraint_name AS column_name
138 FROM information_schema.check_constraints AS c
139 WHERE
140 c.table_name = %s AND
141 LOWER(c.check_clause) =
142 'json_valid(`' + LOWER(c.constraint_name) + '`)' AND
143 c.constraint_schema = DATABASE()
144 """,
145 [table_name],
146 )
147 json_constraints = {row[0] for row in cursor.fetchall()}
148 # information_schema database gives more accurate results for some figures:
149 # - varchar length returned by cursor.description is an internal length,
150 # not visible length (#5725)
151 # - precision and scale (for decimal fields) (#5014)
152 # - auto_increment is not available in cursor.description
153 cursor.execute(
154 """
155 SELECT
156 column_name, data_type, character_maximum_length,
157 numeric_precision, numeric_scale, extra, column_default,
158 CASE
159 WHEN column_type LIKE '%% unsigned' THEN 1
160 ELSE 0
161 END AS is_unsigned
162 FROM information_schema.columns
163 WHERE table_name = %s AND table_schema = DATABASE()
164 """,
165 [table_name],
166 )
167 field_info = {line[0]: InfoLine(*line) for line in cursor.fetchall()}
168
169 cursor.execute(
170 f"SELECT * FROM {self.connection.ops.quote_name(table_name)} LIMIT 1"
171 )
172
173 def to_int(i: Any) -> Any:
174 return int(i) if i is not None else i
175
176 fields = []
177 for line in cursor.description:
178 info = field_info[line[0]]
179 fields.append(
180 FieldInfo(
181 name=line[0],
182 type_code=line[1],
183 display_size=to_int(info.max_len) or line[2],
184 internal_size=to_int(info.max_len) or line[3],
185 precision=to_int(info.num_prec) or line[4],
186 scale=to_int(info.num_scale) or line[5],
187 null_ok=line[6],
188 default=info.column_default,
189 extra=info.extra,
190 is_unsigned=info.is_unsigned,
191 has_json_constraint=line[0] in json_constraints,
192 )
193 )
194 return fields
195
196 def get_sequences(
197 self, cursor: CursorWrapper, table_name: str, table_fields: tuple[Any, ...] = ()
198 ) -> list[dict[str, Any]]:
199 for field_info in self.get_table_description(cursor, table_name):
200 if "auto_increment" in field_info.extra:
201 # MySQL allows only one auto-increment column per table.
202 return [{"table": table_name, "column": field_info.name}]
203 return []
204
205 def get_relations(
206 self, cursor: CursorWrapper, table_name: str
207 ) -> dict[str, tuple[str, str]]:
208 """
209 Return a dictionary of {field_name: (field_name_other_table, other_table)}
210 representing all foreign keys in the given table.
211 """
212 cursor.execute(
213 """
214 SELECT column_name, referenced_column_name, referenced_table_name
215 FROM information_schema.key_column_usage
216 WHERE table_name = %s
217 AND table_schema = DATABASE()
218 AND referenced_table_name IS NOT NULL
219 AND referenced_column_name IS NOT NULL
220 """,
221 [table_name],
222 )
223 return {
224 field_name: (other_field, other_table)
225 for field_name, other_field, other_table in cursor.fetchall()
226 }
227
228 def get_storage_engine(self, cursor: CursorWrapper, table_name: str) -> str:
229 """
230 Retrieve the storage engine for a given table. Return the default
231 storage engine if the table doesn't exist.
232 """
233 cursor.execute(
234 """
235 SELECT engine
236 FROM information_schema.tables
237 WHERE
238 table_name = %s AND
239 table_schema = DATABASE()
240 """,
241 [table_name],
242 )
243 result = cursor.fetchone()
244 if not result:
245 return self.connection.features._mysql_storage_engine
246 return result[0]
247
248 def _parse_constraint_columns(
249 self, check_clause: str, columns: set[str]
250 ) -> OrderedSet:
251 check_columns: OrderedSet = OrderedSet()
252 statement = sqlparse.parse(check_clause)[0]
253 tokens = (token for token in statement.flatten() if not token.is_whitespace)
254 for token in tokens:
255 if (
256 token.ttype == sqlparse.tokens.Name
257 and self.connection.ops.quote_name(token.value) == token.value
258 and token.value[1:-1] in columns
259 ):
260 check_columns.add(token.value[1:-1])
261 return check_columns
262
263 def get_constraints(
264 self, cursor: CursorWrapper, table_name: str
265 ) -> dict[str, dict[str, Any]]:
266 """
267 Retrieve any constraints or keys (unique, pk, fk, check, index) across
268 one or more columns.
269 """
270 constraints: dict[str, dict[str, Any]] = {}
271 # Get the actual constraint names and columns
272 name_query = """
273 SELECT kc.`constraint_name`, kc.`column_name`,
274 kc.`referenced_table_name`, kc.`referenced_column_name`,
275 c.`constraint_type`
276 FROM
277 information_schema.key_column_usage AS kc,
278 information_schema.table_constraints AS c
279 WHERE
280 kc.table_schema = DATABASE() AND
281 c.table_schema = kc.table_schema AND
282 c.constraint_name = kc.constraint_name AND
283 c.constraint_type != 'CHECK' AND
284 kc.table_name = %s
285 ORDER BY kc.`ordinal_position`
286 """
287 cursor.execute(name_query, [table_name])
288 for constraint, column, ref_table, ref_column, kind in cursor.fetchall():
289 if constraint not in constraints:
290 constraints[constraint] = {
291 "columns": OrderedSet(),
292 "primary_key": kind == "PRIMARY KEY",
293 "unique": kind in {"PRIMARY KEY", "UNIQUE"},
294 "index": False,
295 "check": False,
296 "foreign_key": (ref_table, ref_column) if ref_column else None,
297 }
298 if self.connection.features.supports_index_column_ordering:
299 constraints[constraint]["orders"] = []
300 constraints[constraint]["columns"].add(column)
301 # Add check constraints.
302 if self.connection.features.can_introspect_check_constraints:
303 unnamed_constraints_index = 0
304 columns = {
305 info.name for info in self.get_table_description(cursor, table_name)
306 }
307 if self.connection.mysql_is_mariadb:
308 type_query = """
309 SELECT c.constraint_name, c.check_clause
310 FROM information_schema.check_constraints AS c
311 WHERE
312 c.constraint_schema = DATABASE() AND
313 c.table_name = %s
314 """
315 else:
316 type_query = """
317 SELECT cc.constraint_name, cc.check_clause
318 FROM
319 information_schema.check_constraints AS cc,
320 information_schema.table_constraints AS tc
321 WHERE
322 cc.constraint_schema = DATABASE() AND
323 tc.table_schema = cc.constraint_schema AND
324 cc.constraint_name = tc.constraint_name AND
325 tc.constraint_type = 'CHECK' AND
326 tc.table_name = %s
327 """
328 cursor.execute(type_query, [table_name])
329 for constraint, check_clause in cursor.fetchall():
330 constraint_columns = self._parse_constraint_columns(
331 check_clause, columns
332 )
333 # Ensure uniqueness of unnamed constraints. Unnamed unique
334 # and check columns constraints have the same name as
335 # a column.
336 if set(constraint_columns) == {constraint}:
337 unnamed_constraints_index += 1
338 constraint = f"__unnamed_constraint_{unnamed_constraints_index}__"
339 constraints[constraint] = {
340 "columns": constraint_columns,
341 "primary_key": False,
342 "unique": False,
343 "index": False,
344 "check": True,
345 "foreign_key": None,
346 }
347 # Now add in the indexes
348 cursor.execute(f"SHOW INDEX FROM {self.connection.ops.quote_name(table_name)}")
349 for table, non_unique, index, colseq, column, order, type_ in [
350 x[:6] + (x[10],) for x in cursor.fetchall()
351 ]:
352 if index not in constraints:
353 constraints[index] = {
354 "columns": OrderedSet(),
355 "primary_key": False,
356 "unique": not non_unique,
357 "check": False,
358 "foreign_key": None,
359 }
360 if self.connection.features.supports_index_column_ordering:
361 constraints[index]["orders"] = []
362 constraints[index]["index"] = True
363 constraints[index]["type"] = (
364 Index.suffix if type_ == "BTREE" else type_.lower()
365 )
366 constraints[index]["columns"].add(column)
367 if self.connection.features.supports_index_column_ordering:
368 constraints[index]["orders"].append("DESC" if order == "D" else "ASC")
369 # Convert the sorted sets to lists
370 for constraint in constraints.values():
371 constraint["columns"] = list(constraint["columns"])
372 return constraints