1import collections.abc
2import copy
3import datetime
4import decimal
5import enum
6import operator
7import uuid
8import warnings
9from base64 import b64decode, b64encode
10from functools import cached_property, partialmethod, total_ordering
11
12from plain import exceptions, preflight, validators
13from plain.models.constants import LOOKUP_SEP
14from plain.models.db import connection, connections, router
15from plain.models.enums import ChoicesMeta
16from plain.models.query_utils import DeferredAttribute, RegisterLookupMixin
17from plain.utils import timezone
18from plain.utils.datastructures import DictWrapper
19from plain.utils.dateparse import (
20 parse_date,
21 parse_datetime,
22 parse_duration,
23 parse_time,
24)
25from plain.utils.duration import duration_microseconds, duration_string
26from plain.utils.functional import Promise
27from plain.utils.ipv6 import clean_ipv6_address
28from plain.utils.itercompat import is_iterable
29
30from ..registry import models_registry
31
32__all__ = [
33 "AutoField",
34 "BLANK_CHOICE_DASH",
35 "BigAutoField",
36 "BigIntegerField",
37 "BinaryField",
38 "BooleanField",
39 "CharField",
40 "CommaSeparatedIntegerField",
41 "DateField",
42 "DateTimeField",
43 "DecimalField",
44 "DurationField",
45 "EmailField",
46 "Empty",
47 "Field",
48 "FloatField",
49 "GenericIPAddressField",
50 "IPAddressField",
51 "IntegerField",
52 "NOT_PROVIDED",
53 "NullBooleanField",
54 "PositiveBigIntegerField",
55 "PositiveIntegerField",
56 "PositiveSmallIntegerField",
57 "SmallAutoField",
58 "SmallIntegerField",
59 "TextField",
60 "TimeField",
61 "URLField",
62 "UUIDField",
63]
64
65
66class Empty:
67 pass
68
69
70class NOT_PROVIDED:
71 pass
72
73
74# The values to use for "blank" in SelectFields. Will be appended to the start
75# of most "choices" lists.
76BLANK_CHOICE_DASH = [("", "---------")]
77
78
79def _load_field(package_label, model_name, field_name):
80 return models_registry.get_model(package_label, model_name)._meta.get_field(
81 field_name
82 )
83
84
85# A guide to Field parameters:
86#
87# * name: The name of the field specified in the model.
88# * attname: The attribute to use on the model object. This is the same as
89# "name", except in the case of ForeignKeys, where "_id" is
90# appended.
91# * db_column: The db_column specified in the model (or None).
92# * column: The database column for this field. This is the same as
93# "attname", except if db_column is specified.
94#
95# Code that introspects values, or does other dynamic things, should use
96# attname. For example, this gets the primary key value of object "obj":
97#
98# getattr(obj, opts.pk.attname)
99
100
101def _empty(of_cls):
102 new = Empty()
103 new.__class__ = of_cls
104 return new
105
106
107def return_None():
108 return None
109
110
111@total_ordering
112class Field(RegisterLookupMixin):
113 """Base class for all field types"""
114
115 # Designates whether empty strings fundamentally are allowed at the
116 # database level.
117 empty_strings_allowed = True
118 empty_values = list(validators.EMPTY_VALUES)
119
120 # These track each time a Field instance is created. Used to retain order.
121 # The auto_creation_counter is used for fields that Plain implicitly
122 # creates, creation_counter is used for all user-specified fields.
123 creation_counter = 0
124 auto_creation_counter = -1
125 default_validators = [] # Default set of validators
126 default_error_messages = {
127 "invalid_choice": "Value %(value)r is not a valid choice.",
128 "allow_null": "This field cannot be null.",
129 "required": "This field is be required.",
130 "unique": "A %(model_name)s with this %(field_label)s already exists.",
131 }
132 system_check_deprecated_details = None
133 system_check_removed_details = None
134
135 # Attributes that don't affect a column definition.
136 # These attributes are ignored when altering the field.
137 non_db_attrs = (
138 "required",
139 "choices",
140 "db_column",
141 "error_messages",
142 "limit_choices_to",
143 # Database-level options are not supported, see #21961.
144 "on_delete",
145 "related_name",
146 "related_query_name",
147 "validators",
148 )
149
150 # Field flags
151 hidden = False
152
153 many_to_many = None
154 many_to_one = None
155 one_to_many = None
156 related_model = None
157
158 descriptor_class = DeferredAttribute
159
160 # Generic field type description, usually overridden by subclasses
161 def _description(self):
162 return f"Field of type: {self.__class__.__name__}"
163
164 description = property(_description)
165
166 def __init__(
167 self,
168 *,
169 primary_key=False,
170 max_length=None,
171 required=True,
172 allow_null=False,
173 rel=None,
174 default=NOT_PROVIDED,
175 choices=None,
176 db_column=None,
177 auto_created=False,
178 validators=(),
179 error_messages=None,
180 db_comment=None,
181 ):
182 self.name = None # Set by set_attributes_from_name
183 self.primary_key = primary_key
184 self.max_length = max_length
185 self.required, self.allow_null = required, allow_null
186 self.remote_field = rel
187 self.is_relation = self.remote_field is not None
188 self.default = default
189 if isinstance(choices, ChoicesMeta):
190 choices = choices.choices
191 elif isinstance(choices, enum.EnumMeta):
192 choices = [(member.value, member.name) for member in choices]
193 if isinstance(choices, collections.abc.Iterator):
194 choices = list(choices)
195 self.choices = choices
196 self.db_column = db_column
197 self.db_comment = db_comment
198 self.auto_created = auto_created
199
200 # Adjust the appropriate creation counter, and save our local copy.
201 if auto_created:
202 self.creation_counter = Field.auto_creation_counter
203 Field.auto_creation_counter -= 1
204 else:
205 self.creation_counter = Field.creation_counter
206 Field.creation_counter += 1
207
208 self._validators = list(validators) # Store for deconstruction later
209
210 self._error_messages = error_messages # Store for deconstruction later
211
212 def __str__(self):
213 """
214 Return "package_label.model_label.field_name" for fields attached to
215 models.
216 """
217 if not hasattr(self, "model"):
218 return super().__str__()
219 model = self.model
220 return f"{model._meta.label}.{self.name}"
221
222 def __repr__(self):
223 """Display the module, class, and name of the field."""
224 path = f"{self.__class__.__module__}.{self.__class__.__qualname__}"
225 name = getattr(self, "name", None)
226 if name is not None:
227 return f"<{path}: {name}>"
228 return f"<{path}>"
229
230 def check(self, **kwargs):
231 return [
232 *self._check_field_name(),
233 *self._check_choices(),
234 *self._check_db_comment(**kwargs),
235 *self._check_null_allowed_for_primary_keys(),
236 *self._check_backend_specific_checks(**kwargs),
237 *self._check_validators(),
238 *self._check_deprecation_details(),
239 ]
240
241 def _check_field_name(self):
242 """
243 Check if field name is valid, i.e. 1) does not end with an
244 underscore, 2) does not contain "__" and 3) is not "pk".
245 """
246 if self.name.endswith("_"):
247 return [
248 preflight.Error(
249 "Field names must not end with an underscore.",
250 obj=self,
251 id="fields.E001",
252 )
253 ]
254 elif LOOKUP_SEP in self.name:
255 return [
256 preflight.Error(
257 f'Field names must not contain "{LOOKUP_SEP}".',
258 obj=self,
259 id="fields.E002",
260 )
261 ]
262 elif self.name == "pk":
263 return [
264 preflight.Error(
265 "'pk' is a reserved word that cannot be used as a field name.",
266 obj=self,
267 id="fields.E003",
268 )
269 ]
270 else:
271 return []
272
273 @classmethod
274 def _choices_is_value(cls, value):
275 return isinstance(value, str | Promise) or not is_iterable(value)
276
277 def _check_choices(self):
278 if not self.choices:
279 return []
280
281 if not is_iterable(self.choices) or isinstance(self.choices, str):
282 return [
283 preflight.Error(
284 "'choices' must be an iterable (e.g., a list or tuple).",
285 obj=self,
286 id="fields.E004",
287 )
288 ]
289
290 choice_max_length = 0
291 # Expect [group_name, [value, display]]
292 for choices_group in self.choices:
293 try:
294 group_name, group_choices = choices_group
295 except (TypeError, ValueError):
296 # Containing non-pairs
297 break
298 try:
299 if not all(
300 self._choices_is_value(value) and self._choices_is_value(human_name)
301 for value, human_name in group_choices
302 ):
303 break
304 if self.max_length is not None and group_choices:
305 choice_max_length = max(
306 [
307 choice_max_length,
308 *(
309 len(value)
310 for value, _ in group_choices
311 if isinstance(value, str)
312 ),
313 ]
314 )
315 except (TypeError, ValueError):
316 # No groups, choices in the form [value, display]
317 value, human_name = group_name, group_choices
318 if not self._choices_is_value(value) or not self._choices_is_value(
319 human_name
320 ):
321 break
322 if self.max_length is not None and isinstance(value, str):
323 choice_max_length = max(choice_max_length, len(value))
324
325 # Special case: choices=['ab']
326 if isinstance(choices_group, str):
327 break
328 else:
329 if self.max_length is not None and choice_max_length > self.max_length:
330 return [
331 preflight.Error(
332 "'max_length' is too small to fit the longest value " # noqa: UP031
333 "in 'choices' (%d characters)." % choice_max_length,
334 obj=self,
335 id="fields.E009",
336 ),
337 ]
338 return []
339
340 return [
341 preflight.Error(
342 "'choices' must be an iterable containing "
343 "(actual value, human readable name) tuples.",
344 obj=self,
345 id="fields.E005",
346 )
347 ]
348
349 def _check_db_comment(self, databases=None, **kwargs):
350 if not self.db_comment or not databases:
351 return []
352 errors = []
353 for db in databases:
354 if not router.allow_migrate_model(db, self.model):
355 continue
356 connection = connections[db]
357 if not (
358 connection.features.supports_comments
359 or "supports_comments" in self.model._meta.required_db_features
360 ):
361 errors.append(
362 preflight.Warning(
363 f"{connection.display_name} does not support comments on "
364 f"columns (db_comment).",
365 obj=self,
366 id="fields.W163",
367 )
368 )
369 return errors
370
371 def _check_null_allowed_for_primary_keys(self):
372 if (
373 self.primary_key
374 and self.allow_null
375 and not connection.features.interprets_empty_strings_as_nulls
376 ):
377 # We cannot reliably check this for backends like Oracle which
378 # consider NULL and '' to be equal (and thus set up
379 # character-based fields a little differently).
380 return [
381 preflight.Error(
382 "Primary keys must not have allow_null=True.",
383 hint=(
384 "Set allow_null=False on the field, or "
385 "remove primary_key=True argument."
386 ),
387 obj=self,
388 id="fields.E007",
389 )
390 ]
391 else:
392 return []
393
394 def _check_backend_specific_checks(self, databases=None, **kwargs):
395 if databases is None:
396 return []
397 errors = []
398 for alias in databases:
399 if router.allow_migrate_model(alias, self.model):
400 errors.extend(connections[alias].validation.check_field(self, **kwargs))
401 return errors
402
403 def _check_validators(self):
404 errors = []
405 for i, validator in enumerate(self.validators):
406 if not callable(validator):
407 errors.append(
408 preflight.Error(
409 "All 'validators' must be callable.",
410 hint=(
411 f"validators[{i}] ({repr(validator)}) isn't a function or "
412 "instance of a validator class."
413 ),
414 obj=self,
415 id="fields.E008",
416 )
417 )
418 return errors
419
420 def _check_deprecation_details(self):
421 if self.system_check_removed_details is not None:
422 return [
423 preflight.Error(
424 self.system_check_removed_details.get(
425 "msg",
426 f"{self.__class__.__name__} has been removed except for support in historical "
427 "migrations.",
428 ),
429 hint=self.system_check_removed_details.get("hint"),
430 obj=self,
431 id=self.system_check_removed_details.get("id", "fields.EXXX"),
432 )
433 ]
434 elif self.system_check_deprecated_details is not None:
435 return [
436 preflight.Warning(
437 self.system_check_deprecated_details.get(
438 "msg", f"{self.__class__.__name__} has been deprecated."
439 ),
440 hint=self.system_check_deprecated_details.get("hint"),
441 obj=self,
442 id=self.system_check_deprecated_details.get("id", "fields.WXXX"),
443 )
444 ]
445 return []
446
447 def get_col(self, alias, output_field=None):
448 if alias == self.model._meta.db_table and (
449 output_field is None or output_field == self
450 ):
451 return self.cached_col
452 from plain.models.expressions import Col
453
454 return Col(alias, self, output_field)
455
456 @cached_property
457 def cached_col(self):
458 from plain.models.expressions import Col
459
460 return Col(self.model._meta.db_table, self)
461
462 def select_format(self, compiler, sql, params):
463 """
464 Custom format for select clauses. For example, GIS columns need to be
465 selected as AsText(table.col) on MySQL as the table.col data can't be
466 used by Plain.
467 """
468 return sql, params
469
470 def deconstruct(self):
471 """
472 Return enough information to recreate the field as a 4-tuple:
473
474 * The name of the field on the model, if contribute_to_class() has
475 been run.
476 * The import path of the field, including the class, e.g.
477 plain.models.IntegerField. This should be the most portable
478 version, so less specific may be better.
479 * A list of positional arguments.
480 * A dict of keyword arguments.
481
482 Note that the positional or keyword arguments must contain values of
483 the following types (including inner values of collection types):
484
485 * None, bool, str, int, float, complex, set, frozenset, list, tuple,
486 dict
487 * UUID
488 * datetime.datetime (naive), datetime.date
489 * top-level classes, top-level functions - will be referenced by their
490 full import path
491 * Storage instances - these have their own deconstruct() method
492
493 This is because the values here must be serialized into a text format
494 (possibly new Python code, possibly JSON) and these are the only types
495 with encoding handlers defined.
496
497 There's no need to return the exact way the field was instantiated this
498 time, just ensure that the resulting field is the same - prefer keyword
499 arguments over positional ones, and omit parameters with their default
500 values.
501 """
502 # Short-form way of fetching all the default parameters
503 keywords = {}
504 possibles = {
505 "primary_key": False,
506 "max_length": None,
507 "required": True,
508 "allow_null": False,
509 "default": NOT_PROVIDED,
510 "choices": None,
511 "db_column": None,
512 "db_comment": None,
513 "auto_created": False,
514 "validators": [],
515 "error_messages": None,
516 }
517 attr_overrides = {
518 "error_messages": "_error_messages",
519 "validators": "_validators",
520 }
521 equals_comparison = {"choices", "validators"}
522 for name, default in possibles.items():
523 value = getattr(self, attr_overrides.get(name, name))
524 # Unroll anything iterable for choices into a concrete list
525 if name == "choices" and isinstance(value, collections.abc.Iterable):
526 value = list(value)
527 # Do correct kind of comparison
528 if name in equals_comparison:
529 if value != default:
530 keywords[name] = value
531 else:
532 if value is not default:
533 keywords[name] = value
534 # Work out path - we shorten it for known Plain core fields
535 path = f"{self.__class__.__module__}.{self.__class__.__qualname__}"
536 if path.startswith("plain.models.fields.related"):
537 path = path.replace("plain.models.fields.related", "plain.models")
538 elif path.startswith("plain.models.fields.json"):
539 path = path.replace("plain.models.fields.json", "plain.models")
540 elif path.startswith("plain.models.fields.proxy"):
541 path = path.replace("plain.models.fields.proxy", "plain.models")
542 elif path.startswith("plain.models.fields"):
543 path = path.replace("plain.models.fields", "plain.models")
544 # Return basic info - other fields should override this.
545 return (self.name, path, [], keywords)
546
547 def clone(self):
548 """
549 Uses deconstruct() to clone a new copy of this Field.
550 Will not preserve any class attachments/attribute names.
551 """
552 name, path, args, kwargs = self.deconstruct()
553 return self.__class__(*args, **kwargs)
554
555 def __eq__(self, other):
556 # Needed for @total_ordering
557 if isinstance(other, Field):
558 return self.creation_counter == other.creation_counter and getattr(
559 self, "model", None
560 ) == getattr(other, "model", None)
561 return NotImplemented
562
563 def __lt__(self, other):
564 # This is needed because bisect does not take a comparison function.
565 # Order by creation_counter first for backward compatibility.
566 if isinstance(other, Field):
567 if (
568 self.creation_counter != other.creation_counter
569 or not hasattr(self, "model")
570 and not hasattr(other, "model")
571 ):
572 return self.creation_counter < other.creation_counter
573 elif hasattr(self, "model") != hasattr(other, "model"):
574 return not hasattr(self, "model") # Order no-model fields first
575 else:
576 # creation_counter's are equal, compare only models.
577 return (self.model._meta.package_label, self.model._meta.model_name) < (
578 other.model._meta.package_label,
579 other.model._meta.model_name,
580 )
581 return NotImplemented
582
583 def __hash__(self):
584 return hash(self.creation_counter)
585
586 def __deepcopy__(self, memodict):
587 # We don't have to deepcopy very much here, since most things are not
588 # intended to be altered after initial creation.
589 obj = copy.copy(self)
590 if self.remote_field:
591 obj.remote_field = copy.copy(self.remote_field)
592 if hasattr(self.remote_field, "field") and self.remote_field.field is self:
593 obj.remote_field.field = obj
594 memodict[id(self)] = obj
595 return obj
596
597 def __copy__(self):
598 # We need to avoid hitting __reduce__, so define this
599 # slightly weird copy construct.
600 obj = Empty()
601 obj.__class__ = self.__class__
602 obj.__dict__ = self.__dict__.copy()
603 return obj
604
605 def __reduce__(self):
606 """
607 Pickling should return the model._meta.fields instance of the field,
608 not a new copy of that field. So, use the app registry to load the
609 model and then the field back.
610 """
611 if not hasattr(self, "model"):
612 # Fields are sometimes used without attaching them to models (for
613 # example in aggregation). In this case give back a plain field
614 # instance. The code below will create a new empty instance of
615 # class self.__class__, then update its dict with self.__dict__
616 # values - so, this is very close to normal pickle.
617 state = self.__dict__.copy()
618 # The _get_default cached_property can't be pickled due to lambda
619 # usage.
620 state.pop("_get_default", None)
621 return _empty, (self.__class__,), state
622 return _load_field, (
623 self.model._meta.package_label,
624 self.model._meta.object_name,
625 self.name,
626 )
627
628 def get_pk_value_on_save(self, instance):
629 """
630 Hook to generate new PK values on save. This method is called when
631 saving instances with no primary key value set. If this method returns
632 something else than None, then the returned value is used when saving
633 the new instance.
634 """
635 if self.default:
636 return self.get_default()
637 return None
638
639 def to_python(self, value):
640 """
641 Convert the input value into the expected Python data type, raising
642 plain.exceptions.ValidationError if the data can't be converted.
643 Return the converted value. Subclasses should override this.
644 """
645 return value
646
647 @cached_property
648 def error_messages(self):
649 messages = {}
650 for c in reversed(self.__class__.__mro__):
651 messages.update(getattr(c, "default_error_messages", {}))
652 messages.update(self._error_messages or {})
653 return messages
654
655 @cached_property
656 def validators(self):
657 """
658 Some validators can't be created at field initialization time.
659 This method provides a way to delay their creation until required.
660 """
661 return [*self.default_validators, *self._validators]
662
663 def run_validators(self, value):
664 if value in self.empty_values:
665 return
666
667 errors = []
668 for v in self.validators:
669 try:
670 v(value)
671 except exceptions.ValidationError as e:
672 if hasattr(e, "code") and e.code in self.error_messages:
673 e.message = self.error_messages[e.code]
674 errors.extend(e.error_list)
675
676 if errors:
677 raise exceptions.ValidationError(errors)
678
679 def validate(self, value, model_instance):
680 """
681 Validate value and raise ValidationError if necessary. Subclasses
682 should override this to provide validation logic.
683 """
684
685 if self.choices is not None and value not in self.empty_values:
686 for option_key, option_value in self.choices:
687 if isinstance(option_value, list | tuple):
688 # This is an optgroup, so look inside the group for
689 # options.
690 for optgroup_key, optgroup_value in option_value:
691 if value == optgroup_key:
692 return
693 elif value == option_key:
694 return
695 raise exceptions.ValidationError(
696 self.error_messages["invalid_choice"],
697 code="invalid_choice",
698 params={"value": value},
699 )
700
701 if value is None and not self.allow_null:
702 raise exceptions.ValidationError(
703 self.error_messages["allow_null"], code="allow_null"
704 )
705
706 if self.required and value in self.empty_values:
707 raise exceptions.ValidationError(
708 self.error_messages["required"], code="required"
709 )
710
711 def clean(self, value, model_instance):
712 """
713 Convert the value's type and run validation. Validation errors
714 from to_python() and validate() are propagated. Return the correct
715 value if no error is raised.
716 """
717 value = self.to_python(value)
718 self.validate(value, model_instance)
719 self.run_validators(value)
720 return value
721
722 def db_type_parameters(self, connection):
723 return DictWrapper(self.__dict__, connection.ops.quote_name, "qn_")
724
725 def db_check(self, connection):
726 """
727 Return the database column check constraint for this field, for the
728 provided connection. Works the same way as db_type() for the case that
729 get_internal_type() does not map to a preexisting model field.
730 """
731 data = self.db_type_parameters(connection)
732 try:
733 return (
734 connection.data_type_check_constraints[self.get_internal_type()] % data
735 )
736 except KeyError:
737 return None
738
739 def db_type(self, connection):
740 """
741 Return the database column data type for this field, for the provided
742 connection.
743 """
744 # The default implementation of this method looks at the
745 # backend-specific data_types dictionary, looking up the field by its
746 # "internal type".
747 #
748 # A Field class can implement the get_internal_type() method to specify
749 # which *preexisting* Plain Field class it's most similar to -- i.e.,
750 # a custom field might be represented by a TEXT column type, which is
751 # the same as the TextField Plain field type, which means the custom
752 # field's get_internal_type() returns 'TextField'.
753 #
754 # But the limitation of the get_internal_type() / data_types approach
755 # is that it cannot handle database column types that aren't already
756 # mapped to one of the built-in Plain field types. In this case, you
757 # can implement db_type() instead of get_internal_type() to specify
758 # exactly which wacky database column type you want to use.
759 data = self.db_type_parameters(connection)
760 try:
761 column_type = connection.data_types[self.get_internal_type()]
762 except KeyError:
763 return None
764 else:
765 # column_type is either a single-parameter function or a string.
766 if callable(column_type):
767 return column_type(data)
768 return column_type % data
769
770 def rel_db_type(self, connection):
771 """
772 Return the data type that a related field pointing to this field should
773 use. For example, this method is called by ForeignKey to determine its data type.
774 """
775 return self.db_type(connection)
776
777 def cast_db_type(self, connection):
778 """Return the data type to use in the Cast() function."""
779 db_type = connection.ops.cast_data_types.get(self.get_internal_type())
780 if db_type:
781 return db_type % self.db_type_parameters(connection)
782 return self.db_type(connection)
783
784 def db_parameters(self, connection):
785 """
786 Extension of db_type(), providing a range of different return values
787 (type, checks). This will look at db_type(), allowing custom model
788 fields to override it.
789 """
790 type_string = self.db_type(connection)
791 check_string = self.db_check(connection)
792 return {
793 "type": type_string,
794 "check": check_string,
795 }
796
797 def db_type_suffix(self, connection):
798 return connection.data_types_suffix.get(self.get_internal_type())
799
800 def get_db_converters(self, connection):
801 if hasattr(self, "from_db_value"):
802 return [self.from_db_value]
803 return []
804
805 @property
806 def db_returning(self):
807 """
808 Private API intended only to be used by Plain itself. Currently only
809 the PostgreSQL backend supports returning multiple fields on a model.
810 """
811 return False
812
813 def set_attributes_from_name(self, name):
814 self.name = self.name or name
815 self.attname, self.column = self.get_attname_column()
816 self.concrete = self.column is not None
817
818 def contribute_to_class(self, cls, name, private_only=False):
819 """
820 Register the field with the model class it belongs to.
821
822 If private_only is True, create a separate instance of this field
823 for every subclass of cls, even if cls is not an abstract model.
824 """
825 self.set_attributes_from_name(name)
826 self.model = cls
827 cls._meta.add_field(self, private=private_only)
828 if self.column:
829 setattr(cls, self.attname, self.descriptor_class(self))
830 if self.choices is not None:
831 # Don't override a get_FOO_display() method defined explicitly on
832 # this class, but don't check methods derived from inheritance, to
833 # allow overriding inherited choices. For more complex inheritance
834 # structures users should override contribute_to_class().
835 if f"get_{self.name}_display" not in cls.__dict__:
836 setattr(
837 cls,
838 f"get_{self.name}_display",
839 partialmethod(cls._get_FIELD_display, field=self),
840 )
841
842 def get_attname(self):
843 return self.name
844
845 def get_attname_column(self):
846 attname = self.get_attname()
847 column = self.db_column or attname
848 return attname, column
849
850 def get_internal_type(self):
851 return self.__class__.__name__
852
853 def pre_save(self, model_instance, add):
854 """Return field's value just before saving."""
855 return getattr(model_instance, self.attname)
856
857 def get_prep_value(self, value):
858 """Perform preliminary non-db specific value checks and conversions."""
859 if isinstance(value, Promise):
860 value = value._proxy____cast()
861 return value
862
863 def get_db_prep_value(self, value, connection, prepared=False):
864 """
865 Return field's value prepared for interacting with the database backend.
866
867 Used by the default implementations of get_db_prep_save().
868 """
869 if not prepared:
870 value = self.get_prep_value(value)
871 return value
872
873 def get_db_prep_save(self, value, connection):
874 """Return field's value prepared for saving into a database."""
875 if hasattr(value, "as_sql"):
876 return value
877 return self.get_db_prep_value(value, connection=connection, prepared=False)
878
879 def has_default(self):
880 """Return a boolean of whether this field has a default value."""
881 return self.default is not NOT_PROVIDED
882
883 def get_default(self):
884 """Return the default value for this field."""
885 return self._get_default()
886
887 @cached_property
888 def _get_default(self):
889 if self.has_default():
890 if callable(self.default):
891 return self.default
892 return lambda: self.default
893
894 if (
895 not self.empty_strings_allowed
896 or self.allow_null
897 and not connection.features.interprets_empty_strings_as_nulls
898 ):
899 return return_None
900 return str # return empty string
901
902 def get_choices(
903 self,
904 include_blank=True,
905 blank_choice=BLANK_CHOICE_DASH,
906 limit_choices_to=None,
907 ordering=(),
908 ):
909 """
910 Return choices with a default blank choices included, for use
911 as <select> choices for this field.
912 """
913 if self.choices is not None:
914 choices = list(self.choices)
915 if include_blank:
916 blank_defined = any(
917 choice in ("", None) for choice, _ in self.flatchoices
918 )
919 if not blank_defined:
920 choices = blank_choice + choices
921 return choices
922 rel_model = self.remote_field.model
923 limit_choices_to = limit_choices_to or self.get_limit_choices_to()
924 choice_func = operator.attrgetter(
925 self.remote_field.get_related_field().attname
926 if hasattr(self.remote_field, "get_related_field")
927 else "pk"
928 )
929 qs = rel_model._default_manager.complex_filter(limit_choices_to)
930 if ordering:
931 qs = qs.order_by(*ordering)
932 return (blank_choice if include_blank else []) + [
933 (choice_func(x), str(x)) for x in qs
934 ]
935
936 def value_to_string(self, obj):
937 """
938 Return a string value of this field from the passed obj.
939 This is used by the serialization framework.
940 """
941 return str(self.value_from_object(obj))
942
943 def _get_flatchoices(self):
944 """Flattened version of choices tuple."""
945 if self.choices is None:
946 return []
947 flat = []
948 for choice, value in self.choices:
949 if isinstance(value, list | tuple):
950 flat.extend(value)
951 else:
952 flat.append((choice, value))
953 return flat
954
955 flatchoices = property(_get_flatchoices)
956
957 def save_form_data(self, instance, data):
958 setattr(instance, self.name, data)
959
960 def value_from_object(self, obj):
961 """Return the value of this field in the given model instance."""
962 return getattr(obj, self.attname)
963
964
965class BooleanField(Field):
966 empty_strings_allowed = False
967 default_error_messages = {
968 "invalid": "โ%(value)sโ value must be either True or False.",
969 "invalid_nullable": "โ%(value)sโ value must be either True, False, or None.",
970 }
971 description = "Boolean (Either True or False)"
972
973 def get_internal_type(self):
974 return "BooleanField"
975
976 def to_python(self, value):
977 if self.allow_null and value in self.empty_values:
978 return None
979 if value in (True, False):
980 # 1/0 are equal to True/False. bool() converts former to latter.
981 return bool(value)
982 if value in ("t", "True", "1"):
983 return True
984 if value in ("f", "False", "0"):
985 return False
986 raise exceptions.ValidationError(
987 self.error_messages["invalid_nullable" if self.allow_null else "invalid"],
988 code="invalid",
989 params={"value": value},
990 )
991
992 def get_prep_value(self, value):
993 value = super().get_prep_value(value)
994 if value is None:
995 return None
996 return self.to_python(value)
997
998
999class CharField(Field):
1000 def __init__(self, *, db_collation=None, **kwargs):
1001 super().__init__(**kwargs)
1002 self.db_collation = db_collation
1003 if self.max_length is not None:
1004 self.validators.append(validators.MaxLengthValidator(self.max_length))
1005
1006 @property
1007 def description(self):
1008 if self.max_length is not None:
1009 return "String (up to %(max_length)s)"
1010 else:
1011 return "String (unlimited)"
1012
1013 def check(self, **kwargs):
1014 databases = kwargs.get("databases") or []
1015 return [
1016 *super().check(**kwargs),
1017 *self._check_db_collation(databases),
1018 *self._check_max_length_attribute(**kwargs),
1019 ]
1020
1021 def _check_max_length_attribute(self, **kwargs):
1022 if self.max_length is None:
1023 if (
1024 connection.features.supports_unlimited_charfield
1025 or "supports_unlimited_charfield"
1026 in self.model._meta.required_db_features
1027 ):
1028 return []
1029 return [
1030 preflight.Error(
1031 "CharFields must define a 'max_length' attribute.",
1032 obj=self,
1033 id="fields.E120",
1034 )
1035 ]
1036 elif (
1037 not isinstance(self.max_length, int)
1038 or isinstance(self.max_length, bool)
1039 or self.max_length <= 0
1040 ):
1041 return [
1042 preflight.Error(
1043 "'max_length' must be a positive integer.",
1044 obj=self,
1045 id="fields.E121",
1046 )
1047 ]
1048 else:
1049 return []
1050
1051 def _check_db_collation(self, databases):
1052 errors = []
1053 for db in databases:
1054 if not router.allow_migrate_model(db, self.model):
1055 continue
1056 connection = connections[db]
1057 if not (
1058 self.db_collation is None
1059 or "supports_collation_on_charfield"
1060 in self.model._meta.required_db_features
1061 or connection.features.supports_collation_on_charfield
1062 ):
1063 errors.append(
1064 preflight.Error(
1065 f"{connection.display_name} does not support a database collation on "
1066 "CharFields.",
1067 obj=self,
1068 id="fields.E190",
1069 ),
1070 )
1071 return errors
1072
1073 def cast_db_type(self, connection):
1074 if self.max_length is None:
1075 return connection.ops.cast_char_field_without_max_length
1076 return super().cast_db_type(connection)
1077
1078 def db_parameters(self, connection):
1079 db_params = super().db_parameters(connection)
1080 db_params["collation"] = self.db_collation
1081 return db_params
1082
1083 def get_internal_type(self):
1084 return "CharField"
1085
1086 def to_python(self, value):
1087 if isinstance(value, str) or value is None:
1088 return value
1089 return str(value)
1090
1091 def get_prep_value(self, value):
1092 value = super().get_prep_value(value)
1093 return self.to_python(value)
1094
1095 def deconstruct(self):
1096 name, path, args, kwargs = super().deconstruct()
1097 if self.db_collation:
1098 kwargs["db_collation"] = self.db_collation
1099 return name, path, args, kwargs
1100
1101
1102class CommaSeparatedIntegerField(CharField):
1103 default_validators = [validators.validate_comma_separated_integer_list]
1104 description = "Comma-separated integers"
1105 system_check_removed_details = {
1106 "msg": (
1107 "CommaSeparatedIntegerField is removed except for support in "
1108 "historical migrations."
1109 ),
1110 "hint": (
1111 "Use CharField(validators=[validate_comma_separated_integer_list]) instead."
1112 ),
1113 "id": "fields.E901",
1114 }
1115
1116
1117def _to_naive(value):
1118 if timezone.is_aware(value):
1119 value = timezone.make_naive(value, datetime.UTC)
1120 return value
1121
1122
1123def _get_naive_now():
1124 return _to_naive(timezone.now())
1125
1126
1127class DateTimeCheckMixin:
1128 def check(self, **kwargs):
1129 return [
1130 *super().check(**kwargs),
1131 *self._check_mutually_exclusive_options(),
1132 *self._check_fix_default_value(),
1133 ]
1134
1135 def _check_mutually_exclusive_options(self):
1136 # auto_now, auto_now_add, and default are mutually exclusive
1137 # options. The use of more than one of these options together
1138 # will trigger an Error
1139 mutually_exclusive_options = [
1140 self.auto_now_add,
1141 self.auto_now,
1142 self.has_default(),
1143 ]
1144 enabled_options = [
1145 option not in (None, False) for option in mutually_exclusive_options
1146 ].count(True)
1147 if enabled_options > 1:
1148 return [
1149 preflight.Error(
1150 "The options auto_now, auto_now_add, and default "
1151 "are mutually exclusive. Only one of these options "
1152 "may be present.",
1153 obj=self,
1154 id="fields.E160",
1155 )
1156 ]
1157 else:
1158 return []
1159
1160 def _check_fix_default_value(self):
1161 return []
1162
1163 # Concrete subclasses use this in their implementations of
1164 # _check_fix_default_value().
1165 def _check_if_value_fixed(self, value, now=None):
1166 """
1167 Check if the given value appears to have been provided as a "fixed"
1168 time value, and include a warning in the returned list if it does. The
1169 value argument must be a date object or aware/naive datetime object. If
1170 now is provided, it must be a naive datetime object.
1171 """
1172 if now is None:
1173 now = _get_naive_now()
1174 offset = datetime.timedelta(seconds=10)
1175 lower = now - offset
1176 upper = now + offset
1177 if isinstance(value, datetime.datetime):
1178 value = _to_naive(value)
1179 else:
1180 assert isinstance(value, datetime.date)
1181 lower = lower.date()
1182 upper = upper.date()
1183 if lower <= value <= upper:
1184 return [
1185 preflight.Warning(
1186 "Fixed default value provided.",
1187 hint=(
1188 "It seems you set a fixed date / time / datetime "
1189 "value as default for this field. This may not be "
1190 "what you want. If you want to have the current date "
1191 "as default, use `plain.utils.timezone.now`"
1192 ),
1193 obj=self,
1194 id="fields.W161",
1195 )
1196 ]
1197 return []
1198
1199
1200class DateField(DateTimeCheckMixin, Field):
1201 empty_strings_allowed = False
1202 default_error_messages = {
1203 "invalid": "โ%(value)sโ value has an invalid date format. It must be in YYYY-MM-DD format.",
1204 "invalid_date": "โ%(value)sโ value has the correct format (YYYY-MM-DD) but it is an invalid date.",
1205 }
1206 description = "Date (without time)"
1207
1208 def __init__(self, *, auto_now=False, auto_now_add=False, **kwargs):
1209 self.auto_now, self.auto_now_add = auto_now, auto_now_add
1210 if auto_now or auto_now_add:
1211 kwargs["required"] = False
1212 super().__init__(**kwargs)
1213
1214 def _check_fix_default_value(self):
1215 """
1216 Warn that using an actual date or datetime value is probably wrong;
1217 it's only evaluated on server startup.
1218 """
1219 if not self.has_default():
1220 return []
1221
1222 value = self.default
1223 if isinstance(value, datetime.datetime):
1224 value = _to_naive(value).date()
1225 elif isinstance(value, datetime.date):
1226 pass
1227 else:
1228 # No explicit date / datetime value -- no checks necessary
1229 return []
1230 # At this point, value is a date object.
1231 return self._check_if_value_fixed(value)
1232
1233 def deconstruct(self):
1234 name, path, args, kwargs = super().deconstruct()
1235 if self.auto_now:
1236 kwargs["auto_now"] = True
1237 if self.auto_now_add:
1238 kwargs["auto_now_add"] = True
1239 if self.auto_now or self.auto_now_add:
1240 del kwargs["required"]
1241 return name, path, args, kwargs
1242
1243 def get_internal_type(self):
1244 return "DateField"
1245
1246 def to_python(self, value):
1247 if value is None:
1248 return value
1249 if isinstance(value, datetime.datetime):
1250 if timezone.is_aware(value):
1251 # Convert aware datetimes to the default time zone
1252 # before casting them to dates (#17742).
1253 default_timezone = timezone.get_default_timezone()
1254 value = timezone.make_naive(value, default_timezone)
1255 return value.date()
1256 if isinstance(value, datetime.date):
1257 return value
1258
1259 try:
1260 parsed = parse_date(value)
1261 if parsed is not None:
1262 return parsed
1263 except ValueError:
1264 raise exceptions.ValidationError(
1265 self.error_messages["invalid_date"],
1266 code="invalid_date",
1267 params={"value": value},
1268 )
1269
1270 raise exceptions.ValidationError(
1271 self.error_messages["invalid"],
1272 code="invalid",
1273 params={"value": value},
1274 )
1275
1276 def pre_save(self, model_instance, add):
1277 if self.auto_now or (self.auto_now_add and add):
1278 value = datetime.date.today()
1279 setattr(model_instance, self.attname, value)
1280 return value
1281 else:
1282 return super().pre_save(model_instance, add)
1283
1284 def contribute_to_class(self, cls, name, **kwargs):
1285 super().contribute_to_class(cls, name, **kwargs)
1286 if not self.allow_null:
1287 setattr(
1288 cls,
1289 f"get_next_by_{self.name}",
1290 partialmethod(
1291 cls._get_next_or_previous_by_FIELD, field=self, is_next=True
1292 ),
1293 )
1294 setattr(
1295 cls,
1296 f"get_previous_by_{self.name}",
1297 partialmethod(
1298 cls._get_next_or_previous_by_FIELD, field=self, is_next=False
1299 ),
1300 )
1301
1302 def get_prep_value(self, value):
1303 value = super().get_prep_value(value)
1304 return self.to_python(value)
1305
1306 def get_db_prep_value(self, value, connection, prepared=False):
1307 # Casts dates into the format expected by the backend
1308 if not prepared:
1309 value = self.get_prep_value(value)
1310 return connection.ops.adapt_datefield_value(value)
1311
1312 def value_to_string(self, obj):
1313 val = self.value_from_object(obj)
1314 return "" if val is None else val.isoformat()
1315
1316
1317class DateTimeField(DateField):
1318 empty_strings_allowed = False
1319 default_error_messages = {
1320 "invalid": "โ%(value)sโ value has an invalid format. It must be in YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format.",
1321 "invalid_date": "โ%(value)sโ value has the correct format (YYYY-MM-DD) but it is an invalid date.",
1322 "invalid_datetime": "โ%(value)sโ value has the correct format (YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) but it is an invalid date/time.",
1323 }
1324 description = "Date (with time)"
1325
1326 # __init__ is inherited from DateField
1327
1328 def _check_fix_default_value(self):
1329 """
1330 Warn that using an actual date or datetime value is probably wrong;
1331 it's only evaluated on server startup.
1332 """
1333 if not self.has_default():
1334 return []
1335
1336 value = self.default
1337 if isinstance(value, datetime.datetime | datetime.date):
1338 return self._check_if_value_fixed(value)
1339 # No explicit date / datetime value -- no checks necessary.
1340 return []
1341
1342 def get_internal_type(self):
1343 return "DateTimeField"
1344
1345 def to_python(self, value):
1346 if value is None:
1347 return value
1348 if isinstance(value, datetime.datetime):
1349 return value
1350 if isinstance(value, datetime.date):
1351 value = datetime.datetime(value.year, value.month, value.day)
1352
1353 # For backwards compatibility, interpret naive datetimes in
1354 # local time. This won't work during DST change, but we can't
1355 # do much about it, so we let the exceptions percolate up the
1356 # call stack.
1357 warnings.warn(
1358 f"DateTimeField {self.model.__name__}.{self.name} received a naive datetime "
1359 f"({value}) while time zone support is active.",
1360 RuntimeWarning,
1361 )
1362 default_timezone = timezone.get_default_timezone()
1363 value = timezone.make_aware(value, default_timezone)
1364
1365 return value
1366
1367 try:
1368 parsed = parse_datetime(value)
1369 if parsed is not None:
1370 return parsed
1371 except ValueError:
1372 raise exceptions.ValidationError(
1373 self.error_messages["invalid_datetime"],
1374 code="invalid_datetime",
1375 params={"value": value},
1376 )
1377
1378 try:
1379 parsed = parse_date(value)
1380 if parsed is not None:
1381 return datetime.datetime(parsed.year, parsed.month, parsed.day)
1382 except ValueError:
1383 raise exceptions.ValidationError(
1384 self.error_messages["invalid_date"],
1385 code="invalid_date",
1386 params={"value": value},
1387 )
1388
1389 raise exceptions.ValidationError(
1390 self.error_messages["invalid"],
1391 code="invalid",
1392 params={"value": value},
1393 )
1394
1395 def pre_save(self, model_instance, add):
1396 if self.auto_now or (self.auto_now_add and add):
1397 value = timezone.now()
1398 setattr(model_instance, self.attname, value)
1399 return value
1400 else:
1401 return super().pre_save(model_instance, add)
1402
1403 # contribute_to_class is inherited from DateField, it registers
1404 # get_next_by_FOO and get_prev_by_FOO
1405
1406 def get_prep_value(self, value):
1407 value = super().get_prep_value(value)
1408 value = self.to_python(value)
1409 if value is not None and timezone.is_naive(value):
1410 # For backwards compatibility, interpret naive datetimes in local
1411 # time. This won't work during DST change, but we can't do much
1412 # about it, so we let the exceptions percolate up the call stack.
1413 try:
1414 name = f"{self.model.__name__}.{self.name}"
1415 except AttributeError:
1416 name = "(unbound)"
1417 warnings.warn(
1418 f"DateTimeField {name} received a naive datetime ({value})"
1419 " while time zone support is active.",
1420 RuntimeWarning,
1421 )
1422 default_timezone = timezone.get_default_timezone()
1423 value = timezone.make_aware(value, default_timezone)
1424 return value
1425
1426 def get_db_prep_value(self, value, connection, prepared=False):
1427 # Casts datetimes into the format expected by the backend
1428 if not prepared:
1429 value = self.get_prep_value(value)
1430 return connection.ops.adapt_datetimefield_value(value)
1431
1432 def value_to_string(self, obj):
1433 val = self.value_from_object(obj)
1434 return "" if val is None else val.isoformat()
1435
1436
1437class DecimalField(Field):
1438 empty_strings_allowed = False
1439 default_error_messages = {
1440 "invalid": "โ%(value)sโ value must be a decimal number.",
1441 }
1442 description = "Decimal number"
1443
1444 def __init__(
1445 self,
1446 *,
1447 max_digits=None,
1448 decimal_places=None,
1449 **kwargs,
1450 ):
1451 self.max_digits, self.decimal_places = max_digits, decimal_places
1452 super().__init__(**kwargs)
1453
1454 def check(self, **kwargs):
1455 errors = super().check(**kwargs)
1456
1457 digits_errors = [
1458 *self._check_decimal_places(),
1459 *self._check_max_digits(),
1460 ]
1461 if not digits_errors:
1462 errors.extend(self._check_decimal_places_and_max_digits(**kwargs))
1463 else:
1464 errors.extend(digits_errors)
1465 return errors
1466
1467 def _check_decimal_places(self):
1468 try:
1469 decimal_places = int(self.decimal_places)
1470 if decimal_places < 0:
1471 raise ValueError()
1472 except TypeError:
1473 return [
1474 preflight.Error(
1475 "DecimalFields must define a 'decimal_places' attribute.",
1476 obj=self,
1477 id="fields.E130",
1478 )
1479 ]
1480 except ValueError:
1481 return [
1482 preflight.Error(
1483 "'decimal_places' must be a non-negative integer.",
1484 obj=self,
1485 id="fields.E131",
1486 )
1487 ]
1488 else:
1489 return []
1490
1491 def _check_max_digits(self):
1492 try:
1493 max_digits = int(self.max_digits)
1494 if max_digits <= 0:
1495 raise ValueError()
1496 except TypeError:
1497 return [
1498 preflight.Error(
1499 "DecimalFields must define a 'max_digits' attribute.",
1500 obj=self,
1501 id="fields.E132",
1502 )
1503 ]
1504 except ValueError:
1505 return [
1506 preflight.Error(
1507 "'max_digits' must be a positive integer.",
1508 obj=self,
1509 id="fields.E133",
1510 )
1511 ]
1512 else:
1513 return []
1514
1515 def _check_decimal_places_and_max_digits(self, **kwargs):
1516 if int(self.decimal_places) > int(self.max_digits):
1517 return [
1518 preflight.Error(
1519 "'max_digits' must be greater or equal to 'decimal_places'.",
1520 obj=self,
1521 id="fields.E134",
1522 )
1523 ]
1524 return []
1525
1526 @cached_property
1527 def validators(self):
1528 return super().validators + [
1529 validators.DecimalValidator(self.max_digits, self.decimal_places)
1530 ]
1531
1532 @cached_property
1533 def context(self):
1534 return decimal.Context(prec=self.max_digits)
1535
1536 def deconstruct(self):
1537 name, path, args, kwargs = super().deconstruct()
1538 if self.max_digits is not None:
1539 kwargs["max_digits"] = self.max_digits
1540 if self.decimal_places is not None:
1541 kwargs["decimal_places"] = self.decimal_places
1542 return name, path, args, kwargs
1543
1544 def get_internal_type(self):
1545 return "DecimalField"
1546
1547 def to_python(self, value):
1548 if value is None:
1549 return value
1550 try:
1551 if isinstance(value, float):
1552 decimal_value = self.context.create_decimal_from_float(value)
1553 else:
1554 decimal_value = decimal.Decimal(value)
1555 except (decimal.InvalidOperation, TypeError, ValueError):
1556 raise exceptions.ValidationError(
1557 self.error_messages["invalid"],
1558 code="invalid",
1559 params={"value": value},
1560 )
1561 if not decimal_value.is_finite():
1562 raise exceptions.ValidationError(
1563 self.error_messages["invalid"],
1564 code="invalid",
1565 params={"value": value},
1566 )
1567 return decimal_value
1568
1569 def get_db_prep_value(self, value, connection, prepared=False):
1570 if not prepared:
1571 value = self.get_prep_value(value)
1572 if hasattr(value, "as_sql"):
1573 return value
1574 return connection.ops.adapt_decimalfield_value(
1575 value, self.max_digits, self.decimal_places
1576 )
1577
1578 def get_prep_value(self, value):
1579 value = super().get_prep_value(value)
1580 return self.to_python(value)
1581
1582
1583class DurationField(Field):
1584 """
1585 Store timedelta objects.
1586
1587 Use interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint
1588 of microseconds on other databases.
1589 """
1590
1591 empty_strings_allowed = False
1592 default_error_messages = {
1593 "invalid": "โ%(value)sโ value has an invalid format. It must be in [DD] [[HH:]MM:]ss[.uuuuuu] format.",
1594 }
1595 description = "Duration"
1596
1597 def get_internal_type(self):
1598 return "DurationField"
1599
1600 def to_python(self, value):
1601 if value is None:
1602 return value
1603 if isinstance(value, datetime.timedelta):
1604 return value
1605 try:
1606 parsed = parse_duration(value)
1607 except ValueError:
1608 pass
1609 else:
1610 if parsed is not None:
1611 return parsed
1612
1613 raise exceptions.ValidationError(
1614 self.error_messages["invalid"],
1615 code="invalid",
1616 params={"value": value},
1617 )
1618
1619 def get_db_prep_value(self, value, connection, prepared=False):
1620 if connection.features.has_native_duration_field:
1621 return value
1622 if value is None:
1623 return None
1624 return duration_microseconds(value)
1625
1626 def get_db_converters(self, connection):
1627 converters = []
1628 if not connection.features.has_native_duration_field:
1629 converters.append(connection.ops.convert_durationfield_value)
1630 return converters + super().get_db_converters(connection)
1631
1632 def value_to_string(self, obj):
1633 val = self.value_from_object(obj)
1634 return "" if val is None else duration_string(val)
1635
1636
1637class EmailField(CharField):
1638 default_validators = [validators.validate_email]
1639 description = "Email address"
1640
1641 def __init__(self, **kwargs):
1642 # max_length=254 to be compliant with RFCs 3696 and 5321
1643 kwargs.setdefault("max_length", 254)
1644 super().__init__(**kwargs)
1645
1646 def deconstruct(self):
1647 name, path, args, kwargs = super().deconstruct()
1648 # We do not exclude max_length if it matches default as we want to change
1649 # the default in future.
1650 return name, path, args, kwargs
1651
1652
1653class FloatField(Field):
1654 empty_strings_allowed = False
1655 default_error_messages = {
1656 "invalid": "โ%(value)sโ value must be a float.",
1657 }
1658 description = "Floating point number"
1659
1660 def get_prep_value(self, value):
1661 value = super().get_prep_value(value)
1662 if value is None:
1663 return None
1664 try:
1665 return float(value)
1666 except (TypeError, ValueError) as e:
1667 raise e.__class__(
1668 f"Field '{self.name}' expected a number but got {value!r}.",
1669 ) from e
1670
1671 def get_internal_type(self):
1672 return "FloatField"
1673
1674 def to_python(self, value):
1675 if value is None:
1676 return value
1677 try:
1678 return float(value)
1679 except (TypeError, ValueError):
1680 raise exceptions.ValidationError(
1681 self.error_messages["invalid"],
1682 code="invalid",
1683 params={"value": value},
1684 )
1685
1686
1687class IntegerField(Field):
1688 empty_strings_allowed = False
1689 default_error_messages = {
1690 "invalid": "โ%(value)sโ value must be an integer.",
1691 }
1692 description = "Integer"
1693
1694 def check(self, **kwargs):
1695 return [
1696 *super().check(**kwargs),
1697 *self._check_max_length_warning(),
1698 ]
1699
1700 def _check_max_length_warning(self):
1701 if self.max_length is not None:
1702 return [
1703 preflight.Warning(
1704 f"'max_length' is ignored when used with {self.__class__.__name__}.",
1705 hint="Remove 'max_length' from field",
1706 obj=self,
1707 id="fields.W122",
1708 )
1709 ]
1710 return []
1711
1712 @cached_property
1713 def validators(self):
1714 # These validators can't be added at field initialization time since
1715 # they're based on values retrieved from `connection`.
1716 validators_ = super().validators
1717 internal_type = self.get_internal_type()
1718 min_value, max_value = connection.ops.integer_field_range(internal_type)
1719 if min_value is not None and not any(
1720 (
1721 isinstance(validator, validators.MinValueValidator)
1722 and (
1723 validator.limit_value()
1724 if callable(validator.limit_value)
1725 else validator.limit_value
1726 )
1727 >= min_value
1728 )
1729 for validator in validators_
1730 ):
1731 validators_.append(validators.MinValueValidator(min_value))
1732 if max_value is not None and not any(
1733 (
1734 isinstance(validator, validators.MaxValueValidator)
1735 and (
1736 validator.limit_value()
1737 if callable(validator.limit_value)
1738 else validator.limit_value
1739 )
1740 <= max_value
1741 )
1742 for validator in validators_
1743 ):
1744 validators_.append(validators.MaxValueValidator(max_value))
1745 return validators_
1746
1747 def get_prep_value(self, value):
1748 value = super().get_prep_value(value)
1749 if value is None:
1750 return None
1751 try:
1752 return int(value)
1753 except (TypeError, ValueError) as e:
1754 raise e.__class__(
1755 f"Field '{self.name}' expected a number but got {value!r}.",
1756 ) from e
1757
1758 def get_db_prep_value(self, value, connection, prepared=False):
1759 value = super().get_db_prep_value(value, connection, prepared)
1760 return connection.ops.adapt_integerfield_value(value, self.get_internal_type())
1761
1762 def get_internal_type(self):
1763 return "IntegerField"
1764
1765 def to_python(self, value):
1766 if value is None:
1767 return value
1768 try:
1769 return int(value)
1770 except (TypeError, ValueError):
1771 raise exceptions.ValidationError(
1772 self.error_messages["invalid"],
1773 code="invalid",
1774 params={"value": value},
1775 )
1776
1777
1778class BigIntegerField(IntegerField):
1779 description = "Big (8 byte) integer"
1780
1781 def get_internal_type(self):
1782 return "BigIntegerField"
1783
1784
1785class SmallIntegerField(IntegerField):
1786 description = "Small integer"
1787
1788 def get_internal_type(self):
1789 return "SmallIntegerField"
1790
1791
1792class IPAddressField(Field):
1793 empty_strings_allowed = False
1794 description = "IPv4 address"
1795 system_check_removed_details = {
1796 "msg": (
1797 "IPAddressField has been removed except for support in "
1798 "historical migrations."
1799 ),
1800 "hint": "Use GenericIPAddressField instead.",
1801 "id": "fields.E900",
1802 }
1803
1804 def __init__(self, **kwargs):
1805 kwargs["max_length"] = 15
1806 super().__init__(**kwargs)
1807
1808 def deconstruct(self):
1809 name, path, args, kwargs = super().deconstruct()
1810 del kwargs["max_length"]
1811 return name, path, args, kwargs
1812
1813 def get_prep_value(self, value):
1814 value = super().get_prep_value(value)
1815 if value is None:
1816 return None
1817 return str(value)
1818
1819 def get_internal_type(self):
1820 return "IPAddressField"
1821
1822
1823class GenericIPAddressField(Field):
1824 empty_strings_allowed = False
1825 description = "IP address"
1826 default_error_messages = {}
1827
1828 def __init__(
1829 self,
1830 *,
1831 protocol="both",
1832 unpack_ipv4=False,
1833 **kwargs,
1834 ):
1835 self.unpack_ipv4 = unpack_ipv4
1836 self.protocol = protocol
1837 (
1838 self.default_validators,
1839 invalid_error_message,
1840 ) = validators.ip_address_validators(protocol, unpack_ipv4)
1841 self.default_error_messages["invalid"] = invalid_error_message
1842 kwargs["max_length"] = 39
1843 super().__init__(**kwargs)
1844
1845 def check(self, **kwargs):
1846 return [
1847 *super().check(**kwargs),
1848 *self._check_required_and_null_values(**kwargs),
1849 ]
1850
1851 def _check_required_and_null_values(self, **kwargs):
1852 if not getattr(self, "allow_null", False) and not getattr(
1853 self, "required", True
1854 ):
1855 return [
1856 preflight.Error(
1857 "GenericIPAddressFields cannot have required=False if allow_null=False, "
1858 "as blank values are stored as nulls.",
1859 obj=self,
1860 id="fields.E150",
1861 )
1862 ]
1863 return []
1864
1865 def deconstruct(self):
1866 name, path, args, kwargs = super().deconstruct()
1867 if self.unpack_ipv4 is not False:
1868 kwargs["unpack_ipv4"] = self.unpack_ipv4
1869 if self.protocol != "both":
1870 kwargs["protocol"] = self.protocol
1871 if kwargs.get("max_length") == 39:
1872 del kwargs["max_length"]
1873 return name, path, args, kwargs
1874
1875 def get_internal_type(self):
1876 return "GenericIPAddressField"
1877
1878 def to_python(self, value):
1879 if value is None:
1880 return None
1881 if not isinstance(value, str):
1882 value = str(value)
1883 value = value.strip()
1884 if ":" in value:
1885 return clean_ipv6_address(
1886 value, self.unpack_ipv4, self.error_messages["invalid"]
1887 )
1888 return value
1889
1890 def get_db_prep_value(self, value, connection, prepared=False):
1891 if not prepared:
1892 value = self.get_prep_value(value)
1893 return connection.ops.adapt_ipaddressfield_value(value)
1894
1895 def get_prep_value(self, value):
1896 value = super().get_prep_value(value)
1897 if value is None:
1898 return None
1899 if value and ":" in value:
1900 try:
1901 return clean_ipv6_address(value, self.unpack_ipv4)
1902 except exceptions.ValidationError:
1903 pass
1904 return str(value)
1905
1906
1907class NullBooleanField(BooleanField):
1908 default_error_messages = {
1909 "invalid": "โ%(value)sโ value must be either None, True or False.",
1910 "invalid_nullable": "โ%(value)sโ value must be either None, True or False.",
1911 }
1912 description = "Boolean (Either True, False or None)"
1913 system_check_removed_details = {
1914 "msg": (
1915 "NullBooleanField is removed except for support in historical migrations."
1916 ),
1917 "hint": "Use BooleanField(allow_null=True) instead.",
1918 "id": "fields.E903",
1919 }
1920
1921 def __init__(self, **kwargs):
1922 kwargs["allow_null"] = True
1923 kwargs["required"] = False
1924 super().__init__(**kwargs)
1925
1926 def deconstruct(self):
1927 name, path, args, kwargs = super().deconstruct()
1928 del kwargs["allow_null"]
1929 del kwargs["required"]
1930 return name, path, args, kwargs
1931
1932
1933class PositiveIntegerRelDbTypeMixin:
1934 def __init_subclass__(cls, **kwargs):
1935 super().__init_subclass__(**kwargs)
1936 if not hasattr(cls, "integer_field_class"):
1937 cls.integer_field_class = next(
1938 (
1939 parent
1940 for parent in cls.__mro__[1:]
1941 if issubclass(parent, IntegerField)
1942 ),
1943 None,
1944 )
1945
1946 def rel_db_type(self, connection):
1947 """
1948 Return the data type that a related field pointing to this field should
1949 use. In most cases, a foreign key pointing to a positive integer
1950 primary key will have an integer column data type but some databases
1951 (e.g. MySQL) have an unsigned integer type. In that case
1952 (related_fields_match_type=True), the primary key should return its
1953 db_type.
1954 """
1955 if connection.features.related_fields_match_type:
1956 return self.db_type(connection)
1957 else:
1958 return self.integer_field_class().db_type(connection=connection)
1959
1960
1961class PositiveBigIntegerField(PositiveIntegerRelDbTypeMixin, BigIntegerField):
1962 description = "Positive big integer"
1963
1964 def get_internal_type(self):
1965 return "PositiveBigIntegerField"
1966
1967
1968class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField):
1969 description = "Positive integer"
1970
1971 def get_internal_type(self):
1972 return "PositiveIntegerField"
1973
1974
1975class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, SmallIntegerField):
1976 description = "Positive small integer"
1977
1978 def get_internal_type(self):
1979 return "PositiveSmallIntegerField"
1980
1981
1982class TextField(Field):
1983 description = "Text"
1984
1985 def __init__(self, *, db_collation=None, **kwargs):
1986 super().__init__(**kwargs)
1987 self.db_collation = db_collation
1988
1989 def check(self, **kwargs):
1990 databases = kwargs.get("databases") or []
1991 return [
1992 *super().check(**kwargs),
1993 *self._check_db_collation(databases),
1994 ]
1995
1996 def _check_db_collation(self, databases):
1997 errors = []
1998 for db in databases:
1999 if not router.allow_migrate_model(db, self.model):
2000 continue
2001 connection = connections[db]
2002 if not (
2003 self.db_collation is None
2004 or "supports_collation_on_textfield"
2005 in self.model._meta.required_db_features
2006 or connection.features.supports_collation_on_textfield
2007 ):
2008 errors.append(
2009 preflight.Error(
2010 f"{connection.display_name} does not support a database collation on "
2011 "TextFields.",
2012 obj=self,
2013 id="fields.E190",
2014 ),
2015 )
2016 return errors
2017
2018 def db_parameters(self, connection):
2019 db_params = super().db_parameters(connection)
2020 db_params["collation"] = self.db_collation
2021 return db_params
2022
2023 def get_internal_type(self):
2024 return "TextField"
2025
2026 def to_python(self, value):
2027 if isinstance(value, str) or value is None:
2028 return value
2029 return str(value)
2030
2031 def get_prep_value(self, value):
2032 value = super().get_prep_value(value)
2033 return self.to_python(value)
2034
2035 def deconstruct(self):
2036 name, path, args, kwargs = super().deconstruct()
2037 if self.db_collation:
2038 kwargs["db_collation"] = self.db_collation
2039 return name, path, args, kwargs
2040
2041
2042class TimeField(DateTimeCheckMixin, Field):
2043 empty_strings_allowed = False
2044 default_error_messages = {
2045 "invalid": "โ%(value)sโ value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] format.",
2046 "invalid_time": "โ%(value)sโ value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is an invalid time.",
2047 }
2048 description = "Time"
2049
2050 def __init__(self, *, auto_now=False, auto_now_add=False, **kwargs):
2051 self.auto_now, self.auto_now_add = auto_now, auto_now_add
2052 if auto_now or auto_now_add:
2053 kwargs["required"] = False
2054 super().__init__(**kwargs)
2055
2056 def _check_fix_default_value(self):
2057 """
2058 Warn that using an actual date or datetime value is probably wrong;
2059 it's only evaluated on server startup.
2060 """
2061 if not self.has_default():
2062 return []
2063
2064 value = self.default
2065 if isinstance(value, datetime.datetime):
2066 now = None
2067 elif isinstance(value, datetime.time):
2068 now = _get_naive_now()
2069 # This will not use the right date in the race condition where now
2070 # is just before the date change and value is just past 0:00.
2071 value = datetime.datetime.combine(now.date(), value)
2072 else:
2073 # No explicit time / datetime value -- no checks necessary
2074 return []
2075 # At this point, value is a datetime object.
2076 return self._check_if_value_fixed(value, now=now)
2077
2078 def deconstruct(self):
2079 name, path, args, kwargs = super().deconstruct()
2080 if self.auto_now is not False:
2081 kwargs["auto_now"] = self.auto_now
2082 if self.auto_now_add is not False:
2083 kwargs["auto_now_add"] = self.auto_now_add
2084 if self.auto_now or self.auto_now_add:
2085 del kwargs["required"]
2086 return name, path, args, kwargs
2087
2088 def get_internal_type(self):
2089 return "TimeField"
2090
2091 def to_python(self, value):
2092 if value is None:
2093 return None
2094 if isinstance(value, datetime.time):
2095 return value
2096 if isinstance(value, datetime.datetime):
2097 # Not usually a good idea to pass in a datetime here (it loses
2098 # information), but this can be a side-effect of interacting with a
2099 # database backend (e.g. Oracle), so we'll be accommodating.
2100 return value.time()
2101
2102 try:
2103 parsed = parse_time(value)
2104 if parsed is not None:
2105 return parsed
2106 except ValueError:
2107 raise exceptions.ValidationError(
2108 self.error_messages["invalid_time"],
2109 code="invalid_time",
2110 params={"value": value},
2111 )
2112
2113 raise exceptions.ValidationError(
2114 self.error_messages["invalid"],
2115 code="invalid",
2116 params={"value": value},
2117 )
2118
2119 def pre_save(self, model_instance, add):
2120 if self.auto_now or (self.auto_now_add and add):
2121 value = datetime.datetime.now().time()
2122 setattr(model_instance, self.attname, value)
2123 return value
2124 else:
2125 return super().pre_save(model_instance, add)
2126
2127 def get_prep_value(self, value):
2128 value = super().get_prep_value(value)
2129 return self.to_python(value)
2130
2131 def get_db_prep_value(self, value, connection, prepared=False):
2132 # Casts times into the format expected by the backend
2133 if not prepared:
2134 value = self.get_prep_value(value)
2135 return connection.ops.adapt_timefield_value(value)
2136
2137 def value_to_string(self, obj):
2138 val = self.value_from_object(obj)
2139 return "" if val is None else val.isoformat()
2140
2141
2142class URLField(CharField):
2143 default_validators = [validators.URLValidator()]
2144 description = "URL"
2145
2146 def __init__(self, **kwargs):
2147 kwargs.setdefault("max_length", 200)
2148 super().__init__(**kwargs)
2149
2150 def deconstruct(self):
2151 name, path, args, kwargs = super().deconstruct()
2152 if kwargs.get("max_length") == 200:
2153 del kwargs["max_length"]
2154 return name, path, args, kwargs
2155
2156
2157class BinaryField(Field):
2158 description = "Raw binary data"
2159 empty_values = [None, b""]
2160
2161 def __init__(self, **kwargs):
2162 super().__init__(**kwargs)
2163 if self.max_length is not None:
2164 self.validators.append(validators.MaxLengthValidator(self.max_length))
2165
2166 def check(self, **kwargs):
2167 return [*super().check(**kwargs), *self._check_str_default_value()]
2168
2169 def _check_str_default_value(self):
2170 if self.has_default() and isinstance(self.default, str):
2171 return [
2172 preflight.Error(
2173 "BinaryField's default cannot be a string. Use bytes "
2174 "content instead.",
2175 obj=self,
2176 id="fields.E170",
2177 )
2178 ]
2179 return []
2180
2181 def get_internal_type(self):
2182 return "BinaryField"
2183
2184 def get_placeholder(self, value, compiler, connection):
2185 return connection.ops.binary_placeholder_sql(value)
2186
2187 def get_default(self):
2188 if self.has_default() and not callable(self.default):
2189 return self.default
2190 default = super().get_default()
2191 if default == "":
2192 return b""
2193 return default
2194
2195 def get_db_prep_value(self, value, connection, prepared=False):
2196 value = super().get_db_prep_value(value, connection, prepared)
2197 if value is not None:
2198 return connection.Database.Binary(value)
2199 return value
2200
2201 def value_to_string(self, obj):
2202 """Binary data is serialized as base64"""
2203 return b64encode(self.value_from_object(obj)).decode("ascii")
2204
2205 def to_python(self, value):
2206 # If it's a string, it should be base64-encoded data
2207 if isinstance(value, str):
2208 return memoryview(b64decode(value.encode("ascii")))
2209 return value
2210
2211
2212class UUIDField(Field):
2213 default_error_messages = {
2214 "invalid": "โ%(value)sโ is not a valid UUID.",
2215 }
2216 description = "Universally unique identifier"
2217 empty_strings_allowed = False
2218
2219 def __init__(self, **kwargs):
2220 kwargs["max_length"] = 32
2221 super().__init__(**kwargs)
2222
2223 def deconstruct(self):
2224 name, path, args, kwargs = super().deconstruct()
2225 del kwargs["max_length"]
2226 return name, path, args, kwargs
2227
2228 def get_internal_type(self):
2229 return "UUIDField"
2230
2231 def get_prep_value(self, value):
2232 value = super().get_prep_value(value)
2233 return self.to_python(value)
2234
2235 def get_db_prep_value(self, value, connection, prepared=False):
2236 if value is None:
2237 return None
2238 if not isinstance(value, uuid.UUID):
2239 value = self.to_python(value)
2240
2241 if connection.features.has_native_uuid_field:
2242 return value
2243 return value.hex
2244
2245 def to_python(self, value):
2246 if value is not None and not isinstance(value, uuid.UUID):
2247 input_form = "int" if isinstance(value, int) else "hex"
2248 try:
2249 return uuid.UUID(**{input_form: value})
2250 except (AttributeError, ValueError):
2251 raise exceptions.ValidationError(
2252 self.error_messages["invalid"],
2253 code="invalid",
2254 params={"value": value},
2255 )
2256 return value
2257
2258
2259class AutoFieldMixin:
2260 db_returning = True
2261
2262 def __init__(self, *args, **kwargs):
2263 kwargs["required"] = False
2264 super().__init__(*args, **kwargs)
2265
2266 def check(self, **kwargs):
2267 return [
2268 *super().check(**kwargs),
2269 *self._check_primary_key(),
2270 ]
2271
2272 def _check_primary_key(self):
2273 if not self.primary_key:
2274 return [
2275 preflight.Error(
2276 "AutoFields must set primary_key=True.",
2277 obj=self,
2278 id="fields.E100",
2279 ),
2280 ]
2281 else:
2282 return []
2283
2284 def deconstruct(self):
2285 name, path, args, kwargs = super().deconstruct()
2286 del kwargs["required"]
2287 kwargs["primary_key"] = True
2288 return name, path, args, kwargs
2289
2290 def validate(self, value, model_instance):
2291 pass
2292
2293 def get_db_prep_value(self, value, connection, prepared=False):
2294 if not prepared:
2295 value = self.get_prep_value(value)
2296 value = connection.ops.validate_autopk_value(value)
2297 return value
2298
2299 def contribute_to_class(self, cls, name, **kwargs):
2300 if cls._meta.auto_field:
2301 raise ValueError(
2302 f"Model {cls._meta.label} can't have more than one auto-generated field."
2303 )
2304 super().contribute_to_class(cls, name, **kwargs)
2305 cls._meta.auto_field = self
2306
2307
2308class AutoFieldMeta(type):
2309 """
2310 Metaclass to maintain backward inheritance compatibility for AutoField.
2311
2312 It is intended that AutoFieldMixin become public API when it is possible to
2313 create a non-integer automatically-generated field using column defaults
2314 stored in the database.
2315
2316 In many areas Plain also relies on using isinstance() to check for an
2317 automatically-generated field as a subclass of AutoField. A new flag needs
2318 to be implemented on Field to be used instead.
2319
2320 When these issues have been addressed, this metaclass could be used to
2321 deprecate inheritance from AutoField and use of isinstance() with AutoField
2322 for detecting automatically-generated fields.
2323 """
2324
2325 @property
2326 def _subclasses(self):
2327 return (BigAutoField, SmallAutoField)
2328
2329 def __instancecheck__(self, instance):
2330 return isinstance(instance, self._subclasses) or super().__instancecheck__(
2331 instance
2332 )
2333
2334 def __subclasscheck__(self, subclass):
2335 return issubclass(subclass, self._subclasses) or super().__subclasscheck__(
2336 subclass
2337 )
2338
2339
2340class AutoField(AutoFieldMixin, IntegerField, metaclass=AutoFieldMeta):
2341 def get_internal_type(self):
2342 return "AutoField"
2343
2344 def rel_db_type(self, connection):
2345 return IntegerField().db_type(connection=connection)
2346
2347
2348class BigAutoField(AutoFieldMixin, BigIntegerField):
2349 def get_internal_type(self):
2350 return "BigAutoField"
2351
2352 def rel_db_type(self, connection):
2353 return BigIntegerField().db_type(connection=connection)
2354
2355
2356class SmallAutoField(AutoFieldMixin, SmallIntegerField):
2357 def get_internal_type(self):
2358 return "SmallAutoField"
2359
2360 def rel_db_type(self, connection):
2361 return SmallIntegerField().db_type(connection=connection)