1from __future__ import annotations
2
3from collections.abc import Sequence
4from typing import TYPE_CHECKING, Any, NamedTuple
5
6import sqlparse
7from MySQLdb.constants import FIELD_TYPE
8
9from plain.models.backends.base.introspection import (
10 BaseDatabaseIntrospection,
11)
12from plain.models.backends.utils import CursorWrapper
13from plain.models.indexes import Index
14from plain.utils.datastructures import OrderedSet
15
16if TYPE_CHECKING:
17 from .base import MySQLDatabaseWrapper
18
19
20class FieldInfo(NamedTuple):
21 """MySQL-specific FieldInfo extending base with additional metadata."""
22
23 # Fields from BaseFieldInfo
24 name: str
25 type_code: Any
26 display_size: int | None
27 internal_size: int | None
28 precision: int | None
29 scale: int | None
30 null_ok: bool | None
31 default: Any
32 collation: str | None
33 # MySQL-specific extensions
34 extra: str
35 is_unsigned: bool
36 has_json_constraint: bool
37 comment: str | None
38
39
40class InfoLine(NamedTuple):
41 """Information about a column from MySQL's information schema."""
42
43 col_name: str
44 data_type: str
45 max_len: int | None
46 num_prec: int | None
47 num_scale: int | None
48 extra: str
49 column_default: Any
50 collation: str | None
51 is_unsigned: bool
52 comment: str | None
53
54
55class TableInfo(NamedTuple):
56 """MySQL-specific TableInfo extending base with comment support."""
57
58 # Fields from BaseTableInfo
59 name: str
60 type: str
61 # MySQL-specific extension
62 comment: str | None
63
64
65class DatabaseIntrospection(BaseDatabaseIntrospection):
66 # Type hint: narrow connection type to MySQL-specific wrapper
67 connection: MySQLDatabaseWrapper
68
69 data_types_reverse = {
70 FIELD_TYPE.BLOB: "TextField",
71 FIELD_TYPE.CHAR: "CharField",
72 FIELD_TYPE.DECIMAL: "DecimalField",
73 FIELD_TYPE.NEWDECIMAL: "DecimalField",
74 FIELD_TYPE.DATE: "DateField",
75 FIELD_TYPE.DATETIME: "DateTimeField",
76 FIELD_TYPE.DOUBLE: "FloatField",
77 FIELD_TYPE.FLOAT: "FloatField",
78 FIELD_TYPE.INT24: "IntegerField",
79 FIELD_TYPE.JSON: "JSONField",
80 FIELD_TYPE.LONG: "IntegerField",
81 FIELD_TYPE.LONGLONG: "BigIntegerField",
82 FIELD_TYPE.SHORT: "SmallIntegerField",
83 FIELD_TYPE.STRING: "CharField",
84 FIELD_TYPE.TIME: "TimeField",
85 FIELD_TYPE.TIMESTAMP: "DateTimeField",
86 FIELD_TYPE.TINY: "IntegerField",
87 FIELD_TYPE.TINY_BLOB: "TextField",
88 FIELD_TYPE.MEDIUM_BLOB: "TextField",
89 FIELD_TYPE.LONG_BLOB: "TextField",
90 FIELD_TYPE.VAR_STRING: "CharField",
91 }
92
93 def get_field_type(self, data_type: Any, description: Any) -> str:
94 field_type = super().get_field_type(data_type, description)
95 if "auto_increment" in description.extra:
96 if field_type == "BigIntegerField":
97 return "PrimaryKeyField"
98 if description.is_unsigned:
99 if field_type == "BigIntegerField":
100 return "PositiveBigIntegerField"
101 elif field_type == "IntegerField":
102 return "PositiveIntegerField"
103 elif field_type == "SmallIntegerField":
104 return "PositiveSmallIntegerField"
105 # JSON data type is an alias for LONGTEXT in MariaDB, use check
106 # constraints clauses to introspect JSONField.
107 if description.has_json_constraint:
108 return "JSONField"
109 return field_type
110
111 def get_table_list(self, cursor: CursorWrapper) -> Sequence[TableInfo]:
112 """Return a list of table and view names in the current database."""
113 cursor.execute(
114 """
115 SELECT
116 table_name,
117 table_type,
118 table_comment
119 FROM information_schema.tables
120 WHERE table_schema = DATABASE()
121 """
122 )
123 return [
124 TableInfo(row[0], {"BASE TABLE": "t", "VIEW": "v"}.get(row[1], "t"), row[2])
125 for row in cursor.fetchall()
126 ]
127
128 def get_table_description(
129 self, cursor: CursorWrapper, table_name: str
130 ) -> Sequence[FieldInfo]:
131 """
132 Return a description of the table with the DB-API cursor.description
133 interface."
134 """
135 json_constraints: set[Any] = set()
136 if (
137 self.connection.mysql_is_mariadb
138 and self.connection.features.can_introspect_json_field
139 ):
140 # JSON data type is an alias for LONGTEXT in MariaDB, select
141 # JSON_VALID() constraints to introspect JSONField.
142 cursor.execute(
143 """
144 SELECT c.constraint_name AS column_name
145 FROM information_schema.check_constraints AS c
146 WHERE
147 c.table_name = %s AND
148 LOWER(c.check_clause) =
149 'json_valid(`' + LOWER(c.constraint_name) + '`)' AND
150 c.constraint_schema = DATABASE()
151 """,
152 [table_name],
153 )
154 json_constraints = {row[0] for row in cursor.fetchall()}
155 # A default collation for the given table.
156 cursor.execute(
157 """
158 SELECT table_collation
159 FROM information_schema.tables
160 WHERE table_schema = DATABASE()
161 AND table_name = %s
162 """,
163 [table_name],
164 )
165 row = cursor.fetchone()
166 default_column_collation = row[0] if row else ""
167 # information_schema database gives more accurate results for some figures:
168 # - varchar length returned by cursor.description is an internal length,
169 # not visible length (#5725)
170 # - precision and scale (for decimal fields) (#5014)
171 # - auto_increment is not available in cursor.description
172 cursor.execute(
173 """
174 SELECT
175 column_name, data_type, character_maximum_length,
176 numeric_precision, numeric_scale, extra, column_default,
177 CASE
178 WHEN collation_name = %s THEN NULL
179 ELSE collation_name
180 END AS collation_name,
181 CASE
182 WHEN column_type LIKE '%% unsigned' THEN 1
183 ELSE 0
184 END AS is_unsigned,
185 column_comment
186 FROM information_schema.columns
187 WHERE table_name = %s AND table_schema = DATABASE()
188 """,
189 [default_column_collation, table_name],
190 )
191 field_info = {line[0]: InfoLine(*line) for line in cursor.fetchall()}
192
193 cursor.execute(
194 f"SELECT * FROM {self.connection.ops.quote_name(table_name)} LIMIT 1"
195 )
196
197 def to_int(i: Any) -> Any:
198 return int(i) if i is not None else i
199
200 fields = []
201 for line in cursor.description:
202 info = field_info[line[0]]
203 fields.append(
204 FieldInfo(
205 name=line[0],
206 type_code=line[1],
207 display_size=to_int(info.max_len) or line[2],
208 internal_size=to_int(info.max_len) or line[3],
209 precision=to_int(info.num_prec) or line[4],
210 scale=to_int(info.num_scale) or line[5],
211 null_ok=line[6],
212 default=info.column_default,
213 collation=info.collation,
214 extra=info.extra,
215 is_unsigned=info.is_unsigned,
216 has_json_constraint=line[0] in json_constraints,
217 comment=info.comment,
218 )
219 )
220 return fields
221
222 def get_sequences(
223 self, cursor: CursorWrapper, table_name: str, table_fields: tuple[Any, ...] = ()
224 ) -> list[dict[str, Any]]:
225 for field_info in self.get_table_description(cursor, table_name):
226 if "auto_increment" in field_info.extra:
227 # MySQL allows only one auto-increment column per table.
228 return [{"table": table_name, "column": field_info.name}]
229 return []
230
231 def get_relations(
232 self, cursor: CursorWrapper, table_name: str
233 ) -> dict[str, tuple[str, str]]:
234 """
235 Return a dictionary of {field_name: (field_name_other_table, other_table)}
236 representing all foreign keys in the given table.
237 """
238 cursor.execute(
239 """
240 SELECT column_name, referenced_column_name, referenced_table_name
241 FROM information_schema.key_column_usage
242 WHERE table_name = %s
243 AND table_schema = DATABASE()
244 AND referenced_table_name IS NOT NULL
245 AND referenced_column_name IS NOT NULL
246 """,
247 [table_name],
248 )
249 return {
250 field_name: (other_field, other_table)
251 for field_name, other_field, other_table in cursor.fetchall()
252 }
253
254 def get_storage_engine(self, cursor: CursorWrapper, table_name: str) -> str:
255 """
256 Retrieve the storage engine for a given table. Return the default
257 storage engine if the table doesn't exist.
258 """
259 cursor.execute(
260 """
261 SELECT engine
262 FROM information_schema.tables
263 WHERE
264 table_name = %s AND
265 table_schema = DATABASE()
266 """,
267 [table_name],
268 )
269 result = cursor.fetchone()
270 if not result:
271 return self.connection.features._mysql_storage_engine
272 return result[0]
273
274 def _parse_constraint_columns(
275 self, check_clause: str, columns: set[str]
276 ) -> OrderedSet:
277 check_columns: OrderedSet = OrderedSet()
278 statement = sqlparse.parse(check_clause)[0]
279 tokens = (token for token in statement.flatten() if not token.is_whitespace)
280 for token in tokens:
281 if (
282 token.ttype == sqlparse.tokens.Name
283 and self.connection.ops.quote_name(token.value) == token.value
284 and token.value[1:-1] in columns
285 ):
286 check_columns.add(token.value[1:-1])
287 return check_columns
288
289 def get_constraints(
290 self, cursor: CursorWrapper, table_name: str
291 ) -> dict[str, dict[str, Any]]:
292 """
293 Retrieve any constraints or keys (unique, pk, fk, check, index) across
294 one or more columns.
295 """
296 constraints: dict[str, dict[str, Any]] = {}
297 # Get the actual constraint names and columns
298 name_query = """
299 SELECT kc.`constraint_name`, kc.`column_name`,
300 kc.`referenced_table_name`, kc.`referenced_column_name`,
301 c.`constraint_type`
302 FROM
303 information_schema.key_column_usage AS kc,
304 information_schema.table_constraints AS c
305 WHERE
306 kc.table_schema = DATABASE() AND
307 c.table_schema = kc.table_schema AND
308 c.constraint_name = kc.constraint_name AND
309 c.constraint_type != 'CHECK' AND
310 kc.table_name = %s
311 ORDER BY kc.`ordinal_position`
312 """
313 cursor.execute(name_query, [table_name])
314 for constraint, column, ref_table, ref_column, kind in cursor.fetchall():
315 if constraint not in constraints:
316 constraints[constraint] = {
317 "columns": OrderedSet(),
318 "primary_key": kind == "PRIMARY KEY",
319 "unique": kind in {"PRIMARY KEY", "UNIQUE"},
320 "index": False,
321 "check": False,
322 "foreign_key": (ref_table, ref_column) if ref_column else None,
323 }
324 if self.connection.features.supports_index_column_ordering:
325 constraints[constraint]["orders"] = []
326 constraints[constraint]["columns"].add(column)
327 # Add check constraints.
328 if self.connection.features.can_introspect_check_constraints:
329 unnamed_constraints_index = 0
330 columns = {
331 info.name for info in self.get_table_description(cursor, table_name)
332 }
333 if self.connection.mysql_is_mariadb:
334 type_query = """
335 SELECT c.constraint_name, c.check_clause
336 FROM information_schema.check_constraints AS c
337 WHERE
338 c.constraint_schema = DATABASE() AND
339 c.table_name = %s
340 """
341 else:
342 type_query = """
343 SELECT cc.constraint_name, cc.check_clause
344 FROM
345 information_schema.check_constraints AS cc,
346 information_schema.table_constraints AS tc
347 WHERE
348 cc.constraint_schema = DATABASE() AND
349 tc.table_schema = cc.constraint_schema AND
350 cc.constraint_name = tc.constraint_name AND
351 tc.constraint_type = 'CHECK' AND
352 tc.table_name = %s
353 """
354 cursor.execute(type_query, [table_name])
355 for constraint, check_clause in cursor.fetchall():
356 constraint_columns = self._parse_constraint_columns(
357 check_clause, columns
358 )
359 # Ensure uniqueness of unnamed constraints. Unnamed unique
360 # and check columns constraints have the same name as
361 # a column.
362 if set(constraint_columns) == {constraint}:
363 unnamed_constraints_index += 1
364 constraint = f"__unnamed_constraint_{unnamed_constraints_index}__"
365 constraints[constraint] = {
366 "columns": constraint_columns,
367 "primary_key": False,
368 "unique": False,
369 "index": False,
370 "check": True,
371 "foreign_key": None,
372 }
373 # Now add in the indexes
374 cursor.execute(f"SHOW INDEX FROM {self.connection.ops.quote_name(table_name)}")
375 for table, non_unique, index, colseq, column, order, type_ in [
376 x[:6] + (x[10],) for x in cursor.fetchall()
377 ]:
378 if index not in constraints:
379 constraints[index] = {
380 "columns": OrderedSet(),
381 "primary_key": False,
382 "unique": not non_unique,
383 "check": False,
384 "foreign_key": None,
385 }
386 if self.connection.features.supports_index_column_ordering:
387 constraints[index]["orders"] = []
388 constraints[index]["index"] = True
389 constraints[index]["type"] = (
390 Index.suffix if type_ == "BTREE" else type_.lower()
391 )
392 constraints[index]["columns"].add(column)
393 if self.connection.features.supports_index_column_ordering:
394 constraints[index]["orders"].append("DESC" if order == "D" else "ASC")
395 # Convert the sorted sets to lists
396 for constraint in constraints.values():
397 constraint["columns"] = list(constraint["columns"])
398 return constraints