diff --git a/django_mongodb/features.py b/django_mongodb/features.py index 06b3dbfc6..8278004b6 100644 --- a/django_mongodb/features.py +++ b/django_mongodb/features.py @@ -14,6 +14,7 @@ class DatabaseFeatures(BaseDatabaseFeatures): supports_collation_on_charfield = False supports_column_check_constraints = False supports_date_lookup_using_string = False + supports_deferrable_unique_constraints = False supports_explaining_query_execution = True supports_expression_defaults = False supports_expression_indexes = False @@ -23,6 +24,8 @@ class DatabaseFeatures(BaseDatabaseFeatures): # BSON Date type doesn't support microsecond precision. supports_microsecond_precision = False supports_paramstyle_pyformat = False + # Not implemented. + supports_partial_indexes = False supports_select_difference = False supports_select_intersection = False supports_sequence_reset = False @@ -70,28 +73,6 @@ class DatabaseFeatures(BaseDatabaseFeatures): "backends.tests.ThreadTests.test_pass_connection_between_threads", "backends.tests.ThreadTests.test_closing_non_shared_connections", "backends.tests.ThreadTests.test_default_connection_thread_local", - # AddField - "schema.tests.SchemaTests.test_add_unique_charfield", - # AlterField - "schema.tests.SchemaTests.test_alter_field_fk_to_o2o", - "schema.tests.SchemaTests.test_alter_field_o2o_keeps_unique", - "schema.tests.SchemaTests.test_alter_field_o2o_to_fk", - "schema.tests.SchemaTests.test_alter_int_pk_to_int_unique", - # AlterField (unique) - "schema.tests.SchemaTests.test_indexes", - "schema.tests.SchemaTests.test_unique", - "schema.tests.SchemaTests.test_unique_and_reverse_m2m", - # alter_unique_together - "migrations.test_operations.OperationTests.test_alter_unique_together", - "schema.tests.SchemaTests.test_unique_together", - # add/remove_constraint - "introspection.tests.IntrospectionTests.test_get_constraints", - "migrations.test_operations.OperationTests.test_add_partial_unique_constraint", - "migrations.test_operations.OperationTests.test_create_model_with_partial_unique_constraint", - "migrations.test_operations.OperationTests.test_remove_partial_unique_constraint", - "schema.tests.SchemaTests.test_composed_constraint_with_fk", - "schema.tests.SchemaTests.test_remove_ignored_unique_constraint_not_create_fk_index", - "schema.tests.SchemaTests.test_unique_constraint", # Column default values aren't handled when a field raises # EmptyResultSet: https://github.com/mongodb-labs/django-mongodb/issues/155 "annotations.tests.NonAggregateAnnotationTestCase.test_empty_queryset_annotation", @@ -195,24 +176,6 @@ def django_test_expected_failures(self): "model_fields.test_autofield.SmallAutoFieldTests", "queries.tests.TestInvalidValuesRelation.test_invalid_values", }, - "MongoDB does not enforce UNIQUE constraints.": { - "auth_tests.test_basic.BasicTestCase.test_unicode_username", - "auth_tests.test_migrations.ProxyModelWithSameAppLabelTests.test_migrate_with_existing_target_permission", - "constraints.tests.UniqueConstraintTests.test_database_constraint", - "contenttypes_tests.test_operations.ContentTypeOperationsTests.test_content_type_rename_conflict", - "contenttypes_tests.test_operations.ContentTypeOperationsTests.test_existing_content_type_rename", - "custom_pk.tests.CustomPKTests.test_unique_pk", - "force_insert_update.tests.ForceInsertInheritanceTests.test_force_insert_with_existing_grandparent", - "get_or_create.tests.GetOrCreateTestsWithManualPKs.test_create_with_duplicate_primary_key", - "get_or_create.tests.GetOrCreateTestsWithManualPKs.test_savepoint_rollback", - "get_or_create.tests.GetOrCreateThroughManyToMany.test_something", - "get_or_create.tests.UpdateOrCreateTests.test_manual_primary_key_test", - "get_or_create.tests.UpdateOrCreateTestsWithManualPKs.test_create_with_duplicate_primary_key", - "introspection.tests.IntrospectionTests.test_get_constraints_unique_indexes_orders", - "model_fields.test_filefield.FileFieldTests.test_unique_when_same_filename", - "one_to_one.tests.OneToOneTests.test_multiple_o2o", - "queries.test_bulk_update.BulkUpdateTests.test_database_routing_batch_atomicity", - }, "MongoDB does not enforce PositiveIntegerField constraint.": { "model_fields.test_integerfield.PositiveIntegerFieldTests.test_negative_values", }, diff --git a/django_mongodb/schema.py b/django_mongodb/schema.py index 7cc8fe6d2..8fd902ac3 100644 --- a/django_mongodb/schema.py +++ b/django_mongodb/schema.py @@ -1,5 +1,5 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor -from django.db.models import Index +from django.db.models import Index, UniqueConstraint from pymongo import ASCENDING, DESCENDING from pymongo.operations import IndexModel @@ -29,18 +29,26 @@ def create_model(self, model): def _create_model_indexes(self, model): """ - Create all indexes (field indexes, index_together, Meta.indexes) for - the specified model. + Create all indexes (field indexes & uniques, Meta.index_together, + Meta.unique_together, Meta.constraints, Meta.indexes) for the model. """ if not model._meta.managed or model._meta.proxy or model._meta.swapped: return - # Field indexes + # Field indexes and uniques for field in model._meta.local_fields: if self._field_should_be_indexed(model, field): self._add_field_index(model, field) + elif self._field_should_have_unique(field): + self._add_field_unique(model, field) # Meta.index_together (RemovedInDjango51Warning) for field_names in model._meta.index_together: self._add_composed_index(model, field_names) + # Meta.unique_together + if model._meta.unique_together: + self.alter_unique_together(model, [], model._meta.unique_together) + # Meta.constraints + for constraint in model._meta.constraints: + self.add_constraint(model, constraint) # Meta.indexes for index in model._meta.indexes: self.add_index(model, index) @@ -62,9 +70,11 @@ def add_field(self, model, field): self.get_collection(model._meta.db_table).update_many( {}, [{"$set": {column: self.effective_default(field)}}] ) - # Add an index, if required. + # Add an index or unique, if required. if self._field_should_be_indexed(model, field): self._add_field_index(model, field) + elif self._field_should_have_unique(field): + self._add_field_unique(model, field) def _alter_field( self, @@ -78,6 +88,11 @@ def _alter_field( strict=False, ): collection = self.get_collection(model._meta.db_table) + # Has unique been removed? + old_field_unique = self._field_should_have_unique(old_field) + new_field_unique = self._field_should_have_unique(new_field) + if old_field_unique and not new_field_unique: + self._remove_field_unique(model, old_field) # Removed an index? old_field_indexed = self._field_should_be_indexed(model, old_field) new_field_indexed = self._field_should_be_indexed(model, new_field) @@ -90,6 +105,10 @@ def _alter_field( if old_field_indexed and new_field_indexed: self._remove_field_index(model, old_field) self._add_field_index(model, new_field) + # Move unique to the new field, if needed. + if old_field_unique and new_field_unique: + self._remove_field_unique(model, old_field) + self._add_field_unique(model, new_field) # Replace NULL with the field default if the field and was changed from # NULL to NOT NULL. if new_field.has_default() and old_field.null and not new_field.null: @@ -99,6 +118,9 @@ def _alter_field( # Added an index? if not old_field_indexed and new_field_indexed: self._add_field_index(model, new_field) + # Added a unique? + if not old_field_unique and new_field_unique: + self._add_field_unique(model, new_field) def remove_field(self, model, field): # Remove implicit M2M tables. @@ -110,6 +132,8 @@ def remove_field(self, model, field): self.get_collection(model._meta.db_table).update_many({}, {"$unset": {column: ""}}) if self._field_should_be_indexed(model, field): self._remove_field_index(model, field) + elif self._field_should_have_unique(field): + self._remove_field_unique(model, field) def alter_index_together(self, model, old_index_together, new_index_together): olds = {tuple(fields) for fields in old_index_together} @@ -122,11 +146,37 @@ def alter_index_together(self, model, old_index_together, new_index_together): self._add_composed_index(model, field_names) def alter_unique_together(self, model, old_unique_together, new_unique_together): - pass + olds = {tuple(fields) for fields in old_unique_together} + news = {tuple(fields) for fields in new_unique_together} + # Deleted uniques + for field_names in olds.difference(news): + self._remove_composed_index( + model, + field_names, + {"unique": True, "primary_key": False}, + ) + # Created uniques + for field_names in news.difference(olds): + columns = [model._meta.get_field(field).column for field in field_names] + name = str(self._unique_constraint_name(model._meta.db_table, columns)) + constraint = UniqueConstraint(fields=field_names, name=name) + self.add_constraint(model, constraint) - def add_index(self, model, index, field=None): + def add_index(self, model, index, field=None, unique=False): if index.contains_expressions: return + kwargs = {} + if unique: + filter_expression = {} + # Indexing on $type matches the value of most SQL databases by + # allowing multiple null values for the unique constraint. + if field: + filter_expression[field.column] = {"$type": field.db_type(self.connection)} + else: + for field_name, _ in index.fields_orders: + field_ = model._meta.get_field(field_name) + filter_expression[field_.column] = {"$type": field_.db_type(self.connection)} + kwargs = {"partialFilterExpression": filter_expression, "unique": True} index_orders = ( [(field.column, ASCENDING)] if field @@ -137,7 +187,7 @@ def add_index(self, model, index, field=None): for field_name, order in index.fields_orders ] ) - idx = IndexModel(index_orders, name=index.name) + idx = IndexModel(index_orders, name=index.name, **kwargs) self.get_collection(model._meta.db_table).create_indexes([idx]) def _add_composed_index(self, model, field_names): @@ -202,13 +252,57 @@ def _remove_field_index(self, model, field): ) collection.drop_index(index_names[0]) - def add_constraint(self, model, constraint): - pass + def add_constraint(self, model, constraint, field=None): + if isinstance(constraint, UniqueConstraint) and self._unique_supported( + condition=constraint.condition, + deferrable=constraint.deferrable, + include=constraint.include, + expressions=constraint.expressions, + nulls_distinct=constraint.nulls_distinct, + ): + idx = Index(fields=constraint.fields, name=constraint.name) + self.add_index(model, idx, field=field, unique=True) + + def _add_field_unique(self, model, field): + name = str(self._unique_constraint_name(model._meta.db_table, [field.column])) + constraint = UniqueConstraint(fields=[field.name], name=name) + self.add_constraint(model, constraint, field=field) def remove_constraint(self, model, constraint): - pass + if isinstance(constraint, UniqueConstraint) and self._unique_supported( + condition=constraint.condition, + deferrable=constraint.deferrable, + include=constraint.include, + expressions=constraint.expressions, + nulls_distinct=constraint.nulls_distinct, + ): + idx = Index(fields=constraint.fields, name=constraint.name) + self.remove_index(model, idx) + + def _remove_field_unique(self, model, field): + # Find the unique constraint for this field + meta_constraint_names = {constraint.name for constraint in model._meta.constraints} + constraint_names = self._constraint_names( + model, + [field.column], + unique=True, + primary_key=False, + exclude=meta_constraint_names, + ) + if len(constraint_names) != 1: + num_found = len(constraint_names) + raise ValueError( + f"Found wrong number ({num_found}) of unique constraints for " + f"{model._meta.db_table}.{field.column}." + ) + self.get_collection(model._meta.db_table).drop_index(constraint_names[0]) def alter_db_table(self, model, old_db_table, new_db_table): if old_db_table == new_db_table: return self.get_collection(old_db_table).rename(new_db_table) + + def _field_should_have_unique(self, field): + db_type = field.db_type(self.connection) + # The _id column is automatically unique. + return db_type and field.unique and field.column != "_id"