ran unittest2pytest
This commit is contained in:
		| @@ -9,6 +9,7 @@ from six import iteritems | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| import pytest | ||||
|  | ||||
|  | ||||
| class TestIndexes(unittest.TestCase): | ||||
| @@ -53,15 +54,15 @@ class TestIndexes(unittest.TestCase): | ||||
|             {"fields": [("tags", 1)]}, | ||||
|             {"fields": [("category", 1), ("addDate", -1)]}, | ||||
|         ] | ||||
|         self.assertEqual(expected_specs, BlogPost._meta["index_specs"]) | ||||
|         assert expected_specs == BlogPost._meta["index_specs"] | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         # _id, '-date', 'tags', ('cat', 'date') | ||||
|         self.assertEqual(len(info), 4) | ||||
|         assert len(info) == 4 | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         for expected in expected_specs: | ||||
|             self.assertIn(expected["fields"], info) | ||||
|             assert expected["fields"] in info | ||||
|  | ||||
|     def _index_test_inheritance(self, InheritFrom): | ||||
|         class BlogPost(InheritFrom): | ||||
| @@ -78,7 +79,7 @@ class TestIndexes(unittest.TestCase): | ||||
|             {"fields": [("_cls", 1), ("tags", 1)]}, | ||||
|             {"fields": [("_cls", 1), ("category", 1), ("addDate", -1)]}, | ||||
|         ] | ||||
|         self.assertEqual(expected_specs, BlogPost._meta["index_specs"]) | ||||
|         assert expected_specs == BlogPost._meta["index_specs"] | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
| @@ -86,17 +87,17 @@ class TestIndexes(unittest.TestCase): | ||||
|         # NB: there is no index on _cls by itself, since | ||||
|         # the indices on -date and tags will both contain | ||||
|         # _cls as first element in the key | ||||
|         self.assertEqual(len(info), 4) | ||||
|         assert len(info) == 4 | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         for expected in expected_specs: | ||||
|             self.assertIn(expected["fields"], info) | ||||
|             assert expected["fields"] in info | ||||
|  | ||||
|         class ExtendedBlogPost(BlogPost): | ||||
|             title = StringField() | ||||
|             meta = {"indexes": ["title"]} | ||||
|  | ||||
|         expected_specs.append({"fields": [("_cls", 1), ("title", 1)]}) | ||||
|         self.assertEqual(expected_specs, ExtendedBlogPost._meta["index_specs"]) | ||||
|         assert expected_specs == ExtendedBlogPost._meta["index_specs"] | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
| @@ -104,7 +105,7 @@ class TestIndexes(unittest.TestCase): | ||||
|         info = ExtendedBlogPost.objects._collection.index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         for expected in expected_specs: | ||||
|             self.assertIn(expected["fields"], info) | ||||
|             assert expected["fields"] in info | ||||
|  | ||||
|     def test_indexes_document_inheritance(self): | ||||
|         """Ensure that indexes are used when meta[indexes] is specified for | ||||
| @@ -128,10 +129,8 @@ class TestIndexes(unittest.TestCase): | ||||
|         class B(A): | ||||
|             description = StringField() | ||||
|  | ||||
|         self.assertEqual(A._meta["index_specs"], B._meta["index_specs"]) | ||||
|         self.assertEqual( | ||||
|             [{"fields": [("_cls", 1), ("title", 1)]}], A._meta["index_specs"] | ||||
|         ) | ||||
|         assert A._meta["index_specs"] == B._meta["index_specs"] | ||||
|         assert [{"fields": [("_cls", 1), ("title", 1)]}] == A._meta["index_specs"] | ||||
|  | ||||
|     def test_index_no_cls(self): | ||||
|         """Ensure index specs are inhertited correctly""" | ||||
| @@ -144,11 +143,11 @@ class TestIndexes(unittest.TestCase): | ||||
|                 "index_cls": False, | ||||
|             } | ||||
|  | ||||
|         self.assertEqual([("title", 1)], A._meta["index_specs"][0]["fields"]) | ||||
|         assert [("title", 1)] == A._meta["index_specs"][0]["fields"] | ||||
|         A._get_collection().drop_indexes() | ||||
|         A.ensure_indexes() | ||||
|         info = A._get_collection().index_information() | ||||
|         self.assertEqual(len(info.keys()), 2) | ||||
|         assert len(info.keys()) == 2 | ||||
|  | ||||
|         class B(A): | ||||
|             c = StringField() | ||||
| @@ -158,8 +157,8 @@ class TestIndexes(unittest.TestCase): | ||||
|                 "allow_inheritance": True, | ||||
|             } | ||||
|  | ||||
|         self.assertEqual([("c", 1)], B._meta["index_specs"][1]["fields"]) | ||||
|         self.assertEqual([("_cls", 1), ("d", 1)], B._meta["index_specs"][2]["fields"]) | ||||
|         assert [("c", 1)] == B._meta["index_specs"][1]["fields"] | ||||
|         assert [("_cls", 1), ("d", 1)] == B._meta["index_specs"][2]["fields"] | ||||
|  | ||||
|     def test_build_index_spec_is_not_destructive(self): | ||||
|         class MyDoc(Document): | ||||
| @@ -167,12 +166,12 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|             meta = {"indexes": ["keywords"], "allow_inheritance": False} | ||||
|  | ||||
|         self.assertEqual(MyDoc._meta["index_specs"], [{"fields": [("keywords", 1)]}]) | ||||
|         assert MyDoc._meta["index_specs"] == [{"fields": [("keywords", 1)]}] | ||||
|  | ||||
|         # Force index creation | ||||
|         MyDoc.ensure_indexes() | ||||
|  | ||||
|         self.assertEqual(MyDoc._meta["index_specs"], [{"fields": [("keywords", 1)]}]) | ||||
|         assert MyDoc._meta["index_specs"] == [{"fields": [("keywords", 1)]}] | ||||
|  | ||||
|     def test_embedded_document_index_meta(self): | ||||
|         """Ensure that embedded document indexes are created explicitly | ||||
| @@ -187,7 +186,7 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|             meta = {"indexes": ["rank.title"], "allow_inheritance": False} | ||||
|  | ||||
|         self.assertEqual([{"fields": [("rank.title", 1)]}], Person._meta["index_specs"]) | ||||
|         assert [{"fields": [("rank.title", 1)]}] == Person._meta["index_specs"] | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
| @@ -195,7 +194,7 @@ class TestIndexes(unittest.TestCase): | ||||
|         list(Person.objects) | ||||
|         info = Person.objects._collection.index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         self.assertIn([("rank.title", 1)], info) | ||||
|         assert [("rank.title", 1)] in info | ||||
|  | ||||
|     def test_explicit_geo2d_index(self): | ||||
|         """Ensure that geo2d indexes work when created via meta[indexes] | ||||
| @@ -205,14 +204,12 @@ class TestIndexes(unittest.TestCase): | ||||
|             location = DictField() | ||||
|             meta = {"allow_inheritance": True, "indexes": ["*location.point"]} | ||||
|  | ||||
|         self.assertEqual( | ||||
|             [{"fields": [("location.point", "2d")]}], Place._meta["index_specs"] | ||||
|         ) | ||||
|         assert [{"fields": [("location.point", "2d")]}] == Place._meta["index_specs"] | ||||
|  | ||||
|         Place.ensure_indexes() | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         self.assertIn([("location.point", "2d")], info) | ||||
|         assert [("location.point", "2d")] in info | ||||
|  | ||||
|     def test_explicit_geo2d_index_embedded(self): | ||||
|         """Ensure that geo2d indexes work when created via meta[indexes] | ||||
| @@ -225,14 +222,14 @@ class TestIndexes(unittest.TestCase): | ||||
|             current = DictField(field=EmbeddedDocumentField("EmbeddedLocation")) | ||||
|             meta = {"allow_inheritance": True, "indexes": ["*current.location.point"]} | ||||
|  | ||||
|         self.assertEqual( | ||||
|             [{"fields": [("current.location.point", "2d")]}], Place._meta["index_specs"] | ||||
|         ) | ||||
|         assert [{"fields": [("current.location.point", "2d")]}] == Place._meta[ | ||||
|             "index_specs" | ||||
|         ] | ||||
|  | ||||
|         Place.ensure_indexes() | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         self.assertIn([("current.location.point", "2d")], info) | ||||
|         assert [("current.location.point", "2d")] in info | ||||
|  | ||||
|     def test_explicit_geosphere_index(self): | ||||
|         """Ensure that geosphere indexes work when created via meta[indexes] | ||||
| @@ -242,14 +239,14 @@ class TestIndexes(unittest.TestCase): | ||||
|             location = DictField() | ||||
|             meta = {"allow_inheritance": True, "indexes": ["(location.point"]} | ||||
|  | ||||
|         self.assertEqual( | ||||
|             [{"fields": [("location.point", "2dsphere")]}], Place._meta["index_specs"] | ||||
|         ) | ||||
|         assert [{"fields": [("location.point", "2dsphere")]}] == Place._meta[ | ||||
|             "index_specs" | ||||
|         ] | ||||
|  | ||||
|         Place.ensure_indexes() | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         self.assertIn([("location.point", "2dsphere")], info) | ||||
|         assert [("location.point", "2dsphere")] in info | ||||
|  | ||||
|     def test_explicit_geohaystack_index(self): | ||||
|         """Ensure that geohaystack indexes work when created via meta[indexes] | ||||
| @@ -264,15 +261,14 @@ class TestIndexes(unittest.TestCase): | ||||
|             name = StringField() | ||||
|             meta = {"indexes": [(")location.point", "name")]} | ||||
|  | ||||
|         self.assertEqual( | ||||
|             [{"fields": [("location.point", "geoHaystack"), ("name", 1)]}], | ||||
|             Place._meta["index_specs"], | ||||
|         ) | ||||
|         assert [ | ||||
|             {"fields": [("location.point", "geoHaystack"), ("name", 1)]} | ||||
|         ] == Place._meta["index_specs"] | ||||
|  | ||||
|         Place.ensure_indexes() | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         self.assertIn([("location.point", "geoHaystack")], info) | ||||
|         assert [("location.point", "geoHaystack")] in info | ||||
|  | ||||
|     def test_create_geohaystack_index(self): | ||||
|         """Ensure that geohaystack indexes can be created | ||||
| @@ -285,7 +281,7 @@ class TestIndexes(unittest.TestCase): | ||||
|         Place.create_index({"fields": (")location.point", "name")}, bucketSize=10) | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         self.assertIn([("location.point", "geoHaystack"), ("name", 1)], info) | ||||
|         assert [("location.point", "geoHaystack"), ("name", 1)] in info | ||||
|  | ||||
|     def test_dictionary_indexes(self): | ||||
|         """Ensure that indexes are used when meta[indexes] contains | ||||
| @@ -298,16 +294,15 @@ class TestIndexes(unittest.TestCase): | ||||
|             tags = ListField(StringField()) | ||||
|             meta = {"indexes": [{"fields": ["-date"], "unique": True, "sparse": True}]} | ||||
|  | ||||
|         self.assertEqual( | ||||
|             [{"fields": [("addDate", -1)], "unique": True, "sparse": True}], | ||||
|             BlogPost._meta["index_specs"], | ||||
|         ) | ||||
|         assert [ | ||||
|             {"fields": [("addDate", -1)], "unique": True, "sparse": True} | ||||
|         ] == BlogPost._meta["index_specs"] | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         # _id, '-date' | ||||
|         self.assertEqual(len(info), 2) | ||||
|         assert len(info) == 2 | ||||
|  | ||||
|         # Indexes are lazy so use list() to perform query | ||||
|         list(BlogPost.objects) | ||||
| @@ -316,7 +311,7 @@ class TestIndexes(unittest.TestCase): | ||||
|             (value["key"], value.get("unique", False), value.get("sparse", False)) | ||||
|             for key, value in iteritems(info) | ||||
|         ] | ||||
|         self.assertIn(([("addDate", -1)], True, True), info) | ||||
|         assert ([("addDate", -1)], True, True) in info | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
| @@ -338,11 +333,9 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         Person(name="test", user_guid="123").save() | ||||
|  | ||||
|         self.assertEqual(1, Person.objects.count()) | ||||
|         assert 1 == Person.objects.count() | ||||
|         info = Person.objects._collection.index_information() | ||||
|         self.assertEqual( | ||||
|             sorted(info.keys()), ["_cls_1_name_1", "_cls_1_user_guid_1", "_id_"] | ||||
|         ) | ||||
|         assert sorted(info.keys()) == ["_cls_1_name_1", "_cls_1_user_guid_1", "_id_"] | ||||
|  | ||||
|     def test_disable_index_creation(self): | ||||
|         """Tests setting auto_create_index to False on the connection will | ||||
| @@ -365,13 +358,13 @@ class TestIndexes(unittest.TestCase): | ||||
|         User(user_guid="123").save() | ||||
|         MongoUser(user_guid="123").save() | ||||
|  | ||||
|         self.assertEqual(2, User.objects.count()) | ||||
|         assert 2 == User.objects.count() | ||||
|         info = User.objects._collection.index_information() | ||||
|         self.assertEqual(list(info.keys()), ["_id_"]) | ||||
|         assert list(info.keys()) == ["_id_"] | ||||
|  | ||||
|         User.ensure_indexes() | ||||
|         info = User.objects._collection.index_information() | ||||
|         self.assertEqual(sorted(info.keys()), ["_cls_1_user_guid_1", "_id_"]) | ||||
|         assert sorted(info.keys()) == ["_cls_1_user_guid_1", "_id_"] | ||||
|  | ||||
|     def test_embedded_document_index(self): | ||||
|         """Tests settings an index on an embedded document | ||||
| @@ -389,7 +382,7 @@ class TestIndexes(unittest.TestCase): | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         self.assertEqual(sorted(info.keys()), ["_id_", "date.yr_-1"]) | ||||
|         assert sorted(info.keys()) == ["_id_", "date.yr_-1"] | ||||
|  | ||||
|     def test_list_embedded_document_index(self): | ||||
|         """Ensure list embedded documents can be indexed | ||||
| @@ -408,7 +401,7 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         # we don't use _cls in with list fields by default | ||||
|         self.assertEqual(sorted(info.keys()), ["_id_", "tags.tag_1"]) | ||||
|         assert sorted(info.keys()) == ["_id_", "tags.tag_1"] | ||||
|  | ||||
|         post1 = BlogPost( | ||||
|             title="Embedded Indexes tests in place", | ||||
| @@ -426,7 +419,7 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         RecursiveDocument.ensure_indexes() | ||||
|         info = RecursiveDocument._get_collection().index_information() | ||||
|         self.assertEqual(sorted(info.keys()), ["_cls_1", "_id_"]) | ||||
|         assert sorted(info.keys()) == ["_cls_1", "_id_"] | ||||
|  | ||||
|     def test_covered_index(self): | ||||
|         """Ensure that covered indexes can be used | ||||
| @@ -446,46 +439,45 @@ class TestIndexes(unittest.TestCase): | ||||
|         # Need to be explicit about covered indexes as mongoDB doesn't know if | ||||
|         # the documents returned might have more keys in that here. | ||||
|         query_plan = Test.objects(id=obj.id).exclude("a").explain() | ||||
|         self.assertEqual( | ||||
|         assert ( | ||||
|             query_plan.get("queryPlanner") | ||||
|             .get("winningPlan") | ||||
|             .get("inputStage") | ||||
|             .get("stage"), | ||||
|             "IDHACK", | ||||
|             .get("stage") | ||||
|             == "IDHACK" | ||||
|         ) | ||||
|  | ||||
|         query_plan = Test.objects(id=obj.id).only("id").explain() | ||||
|         self.assertEqual( | ||||
|         assert ( | ||||
|             query_plan.get("queryPlanner") | ||||
|             .get("winningPlan") | ||||
|             .get("inputStage") | ||||
|             .get("stage"), | ||||
|             "IDHACK", | ||||
|             .get("stage") | ||||
|             == "IDHACK" | ||||
|         ) | ||||
|  | ||||
|         query_plan = Test.objects(a=1).only("a").exclude("id").explain() | ||||
|         self.assertEqual( | ||||
|         assert ( | ||||
|             query_plan.get("queryPlanner") | ||||
|             .get("winningPlan") | ||||
|             .get("inputStage") | ||||
|             .get("stage"), | ||||
|             "IXSCAN", | ||||
|             .get("stage") | ||||
|             == "IXSCAN" | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             query_plan.get("queryPlanner").get("winningPlan").get("stage"), "PROJECTION" | ||||
|         assert ( | ||||
|             query_plan.get("queryPlanner").get("winningPlan").get("stage") | ||||
|             == "PROJECTION" | ||||
|         ) | ||||
|  | ||||
|         query_plan = Test.objects(a=1).explain() | ||||
|         self.assertEqual( | ||||
|         assert ( | ||||
|             query_plan.get("queryPlanner") | ||||
|             .get("winningPlan") | ||||
|             .get("inputStage") | ||||
|             .get("stage"), | ||||
|             "IXSCAN", | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             query_plan.get("queryPlanner").get("winningPlan").get("stage"), "FETCH" | ||||
|             .get("stage") | ||||
|             == "IXSCAN" | ||||
|         ) | ||||
|         assert query_plan.get("queryPlanner").get("winningPlan").get("stage") == "FETCH" | ||||
|  | ||||
|     def test_index_on_id(self): | ||||
|         class BlogPost(Document): | ||||
| @@ -498,9 +490,7 @@ class TestIndexes(unittest.TestCase): | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         indexes = BlogPost.objects._collection.index_information() | ||||
|         self.assertEqual( | ||||
|             indexes["categories_1__id_1"]["key"], [("categories", 1), ("_id", 1)] | ||||
|         ) | ||||
|         assert indexes["categories_1__id_1"]["key"] == [("categories", 1), ("_id", 1)] | ||||
|  | ||||
|     def test_hint(self): | ||||
|         TAGS_INDEX_NAME = "tags_1" | ||||
| @@ -516,25 +506,25 @@ class TestIndexes(unittest.TestCase): | ||||
|             BlogPost(tags=tags).save() | ||||
|  | ||||
|         # Hinting by shape should work. | ||||
|         self.assertEqual(BlogPost.objects.hint([("tags", 1)]).count(), 10) | ||||
|         assert BlogPost.objects.hint([("tags", 1)]).count() == 10 | ||||
|  | ||||
|         # Hinting by index name should work. | ||||
|         self.assertEqual(BlogPost.objects.hint(TAGS_INDEX_NAME).count(), 10) | ||||
|         assert BlogPost.objects.hint(TAGS_INDEX_NAME).count() == 10 | ||||
|  | ||||
|         # Clearing the hint should work fine. | ||||
|         self.assertEqual(BlogPost.objects.hint().count(), 10) | ||||
|         self.assertEqual(BlogPost.objects.hint([("ZZ", 1)]).hint().count(), 10) | ||||
|         assert BlogPost.objects.hint().count() == 10 | ||||
|         assert BlogPost.objects.hint([("ZZ", 1)]).hint().count() == 10 | ||||
|  | ||||
|         # Hinting on a non-existent index shape should fail. | ||||
|         with self.assertRaises(OperationFailure): | ||||
|         with pytest.raises(OperationFailure): | ||||
|             BlogPost.objects.hint([("ZZ", 1)]).count() | ||||
|  | ||||
|         # Hinting on a non-existent index name should fail. | ||||
|         with self.assertRaises(OperationFailure): | ||||
|         with pytest.raises(OperationFailure): | ||||
|             BlogPost.objects.hint("Bad Name").count() | ||||
|  | ||||
|         # Invalid shape argument (missing list brackets) should fail. | ||||
|         with self.assertRaises(ValueError): | ||||
|         with pytest.raises(ValueError): | ||||
|             BlogPost.objects.hint(("tags", 1)).count() | ||||
|  | ||||
|     def test_collation(self): | ||||
| @@ -588,11 +578,14 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         # Two posts with the same slug is not allowed | ||||
|         post2 = BlogPost(title="test2", slug="test") | ||||
|         self.assertRaises(NotUniqueError, post2.save) | ||||
|         self.assertRaises(NotUniqueError, BlogPost.objects.insert, post2) | ||||
|         with pytest.raises(NotUniqueError): | ||||
|             post2.save() | ||||
|         with pytest.raises(NotUniqueError): | ||||
|             BlogPost.objects.insert(post2) | ||||
|  | ||||
|         # Ensure backwards compatibility for errors | ||||
|         self.assertRaises(OperationError, post2.save) | ||||
|         with pytest.raises(OperationError): | ||||
|             post2.save() | ||||
|  | ||||
|     def test_primary_key_unique_not_working(self): | ||||
|         """Relates to #1445""" | ||||
| @@ -602,23 +595,21 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         Blog.drop_collection() | ||||
|  | ||||
|         with self.assertRaises(OperationFailure) as ctx_err: | ||||
|         with pytest.raises(OperationFailure) as ctx_err: | ||||
|             Blog(id="garbage").save() | ||||
|  | ||||
|         # One of the errors below should happen. Which one depends on the | ||||
|         # PyMongo version and dict order. | ||||
|         err_msg = str(ctx_err.exception) | ||||
|         self.assertTrue( | ||||
|             any( | ||||
|                 [ | ||||
|                     "The field 'unique' is not valid for an _id index specification" | ||||
|                     in err_msg, | ||||
|                     "The field 'background' is not valid for an _id index specification" | ||||
|                     in err_msg, | ||||
|                     "The field 'sparse' is not valid for an _id index specification" | ||||
|                     in err_msg, | ||||
|                 ] | ||||
|             ) | ||||
|         assert any( | ||||
|             [ | ||||
|                 "The field 'unique' is not valid for an _id index specification" | ||||
|                 in err_msg, | ||||
|                 "The field 'background' is not valid for an _id index specification" | ||||
|                 in err_msg, | ||||
|                 "The field 'sparse' is not valid for an _id index specification" | ||||
|                 in err_msg, | ||||
|             ] | ||||
|         ) | ||||
|  | ||||
|     def test_unique_with(self): | ||||
| @@ -644,7 +635,8 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         # Now there will be two docs with the same slug and the same day: fail | ||||
|         post3 = BlogPost(title="test3", date=Date(year=2010), slug="test") | ||||
|         self.assertRaises(OperationError, post3.save) | ||||
|         with pytest.raises(OperationError): | ||||
|             post3.save() | ||||
|  | ||||
|     def test_unique_embedded_document(self): | ||||
|         """Ensure that uniqueness constraints are applied to fields on embedded documents. | ||||
| @@ -669,7 +661,8 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         # Now there will be two docs with the same sub.slug | ||||
|         post3 = BlogPost(title="test3", sub=SubDocument(year=2010, slug="test")) | ||||
|         self.assertRaises(NotUniqueError, post3.save) | ||||
|         with pytest.raises(NotUniqueError): | ||||
|             post3.save() | ||||
|  | ||||
|     def test_unique_embedded_document_in_list(self): | ||||
|         """ | ||||
| @@ -699,7 +692,8 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")]) | ||||
|  | ||||
|         self.assertRaises(NotUniqueError, post2.save) | ||||
|         with pytest.raises(NotUniqueError): | ||||
|             post2.save() | ||||
|  | ||||
|     def test_unique_embedded_document_in_sorted_list(self): | ||||
|         """ | ||||
| @@ -729,12 +723,13 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         # confirm that the unique index is created | ||||
|         indexes = BlogPost._get_collection().index_information() | ||||
|         self.assertIn("subs.slug_1", indexes) | ||||
|         self.assertTrue(indexes["subs.slug_1"]["unique"]) | ||||
|         assert "subs.slug_1" in indexes | ||||
|         assert indexes["subs.slug_1"]["unique"] | ||||
|  | ||||
|         post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")]) | ||||
|  | ||||
|         self.assertRaises(NotUniqueError, post2.save) | ||||
|         with pytest.raises(NotUniqueError): | ||||
|             post2.save() | ||||
|  | ||||
|     def test_unique_embedded_document_in_embedded_document_list(self): | ||||
|         """ | ||||
| @@ -764,12 +759,13 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         # confirm that the unique index is created | ||||
|         indexes = BlogPost._get_collection().index_information() | ||||
|         self.assertIn("subs.slug_1", indexes) | ||||
|         self.assertTrue(indexes["subs.slug_1"]["unique"]) | ||||
|         assert "subs.slug_1" in indexes | ||||
|         assert indexes["subs.slug_1"]["unique"] | ||||
|  | ||||
|         post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")]) | ||||
|  | ||||
|         self.assertRaises(NotUniqueError, post2.save) | ||||
|         with pytest.raises(NotUniqueError): | ||||
|             post2.save() | ||||
|  | ||||
|     def test_unique_with_embedded_document_and_embedded_unique(self): | ||||
|         """Ensure that uniqueness constraints are applied to fields on | ||||
| @@ -795,11 +791,13 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         # Now there will be two docs with the same sub.slug | ||||
|         post3 = BlogPost(title="test3", sub=SubDocument(year=2010, slug="test")) | ||||
|         self.assertRaises(NotUniqueError, post3.save) | ||||
|         with pytest.raises(NotUniqueError): | ||||
|             post3.save() | ||||
|  | ||||
|         # Now there will be two docs with the same title and year | ||||
|         post3 = BlogPost(title="test1", sub=SubDocument(year=2009, slug="test-1")) | ||||
|         self.assertRaises(NotUniqueError, post3.save) | ||||
|         with pytest.raises(NotUniqueError): | ||||
|             post3.save() | ||||
|  | ||||
|     def test_ttl_indexes(self): | ||||
|         class Log(Document): | ||||
| @@ -811,7 +809,7 @@ class TestIndexes(unittest.TestCase): | ||||
|         # Indexes are lazy so use list() to perform query | ||||
|         list(Log.objects) | ||||
|         info = Log.objects._collection.index_information() | ||||
|         self.assertEqual(3600, info["created_1"]["expireAfterSeconds"]) | ||||
|         assert 3600 == info["created_1"]["expireAfterSeconds"] | ||||
|  | ||||
|     def test_index_drop_dups_silently_ignored(self): | ||||
|         class Customer(Document): | ||||
| @@ -839,14 +837,14 @@ class TestIndexes(unittest.TestCase): | ||||
|         cust.save() | ||||
|  | ||||
|         cust_dupe = Customer(cust_id=1) | ||||
|         with self.assertRaises(NotUniqueError): | ||||
|         with pytest.raises(NotUniqueError): | ||||
|             cust_dupe.save() | ||||
|  | ||||
|         cust = Customer(cust_id=2) | ||||
|         cust.save() | ||||
|  | ||||
|         # duplicate key on update | ||||
|         with self.assertRaises(NotUniqueError): | ||||
|         with pytest.raises(NotUniqueError): | ||||
|             cust.cust_id = 1 | ||||
|             cust.save() | ||||
|  | ||||
| @@ -867,8 +865,8 @@ class TestIndexes(unittest.TestCase): | ||||
|         user = User(name="huangz", password="secret2") | ||||
|         user.save() | ||||
|  | ||||
|         self.assertEqual(User.objects.count(), 1) | ||||
|         self.assertEqual(User.objects.get().password, "secret2") | ||||
|         assert User.objects.count() == 1 | ||||
|         assert User.objects.get().password == "secret2" | ||||
|  | ||||
|     def test_unique_and_primary_create(self): | ||||
|         """Create a new record with a duplicate primary key | ||||
| @@ -882,11 +880,11 @@ class TestIndexes(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|  | ||||
|         User.objects.create(name="huangz", password="secret") | ||||
|         with self.assertRaises(NotUniqueError): | ||||
|         with pytest.raises(NotUniqueError): | ||||
|             User.objects.create(name="huangz", password="secret2") | ||||
|  | ||||
|         self.assertEqual(User.objects.count(), 1) | ||||
|         self.assertEqual(User.objects.get().password, "secret") | ||||
|         assert User.objects.count() == 1 | ||||
|         assert User.objects.get().password == "secret" | ||||
|  | ||||
|     def test_index_with_pk(self): | ||||
|         """Ensure you can use `pk` as part of a query""" | ||||
| @@ -910,7 +908,7 @@ class TestIndexes(unittest.TestCase): | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         index_item = [("_id", 1), ("comments.comment_id", 1)] | ||||
|         self.assertIn(index_item, info) | ||||
|         assert index_item in info | ||||
|  | ||||
|     def test_compound_key_embedded(self): | ||||
|         class CompoundKey(EmbeddedDocument): | ||||
| @@ -924,10 +922,8 @@ class TestIndexes(unittest.TestCase): | ||||
|         my_key = CompoundKey(name="n", term="ok") | ||||
|         report = ReportEmbedded(text="OK", key=my_key).save() | ||||
|  | ||||
|         self.assertEqual( | ||||
|             {"text": "OK", "_id": {"term": "ok", "name": "n"}}, report.to_mongo() | ||||
|         ) | ||||
|         self.assertEqual(report, ReportEmbedded.objects.get(pk=my_key)) | ||||
|         assert {"text": "OK", "_id": {"term": "ok", "name": "n"}} == report.to_mongo() | ||||
|         assert report == ReportEmbedded.objects.get(pk=my_key) | ||||
|  | ||||
|     def test_compound_key_dictfield(self): | ||||
|         class ReportDictField(Document): | ||||
| @@ -937,15 +933,13 @@ class TestIndexes(unittest.TestCase): | ||||
|         my_key = {"name": "n", "term": "ok"} | ||||
|         report = ReportDictField(text="OK", key=my_key).save() | ||||
|  | ||||
|         self.assertEqual( | ||||
|             {"text": "OK", "_id": {"term": "ok", "name": "n"}}, report.to_mongo() | ||||
|         ) | ||||
|         assert {"text": "OK", "_id": {"term": "ok", "name": "n"}} == report.to_mongo() | ||||
|  | ||||
|         # We can't directly call ReportDictField.objects.get(pk=my_key), | ||||
|         # because dicts are unordered, and if the order in MongoDB is | ||||
|         # different than the one in `my_key`, this test will fail. | ||||
|         self.assertEqual(report, ReportDictField.objects.get(pk__name=my_key["name"])) | ||||
|         self.assertEqual(report, ReportDictField.objects.get(pk__term=my_key["term"])) | ||||
|         assert report == ReportDictField.objects.get(pk__name=my_key["name"]) | ||||
|         assert report == ReportDictField.objects.get(pk__term=my_key["term"]) | ||||
|  | ||||
|     def test_string_indexes(self): | ||||
|         class MyDoc(Document): | ||||
| @@ -954,8 +948,8 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         info = MyDoc.objects._collection.index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         self.assertIn([("provider_ids.foo", 1)], info) | ||||
|         self.assertIn([("provider_ids.bar", 1)], info) | ||||
|         assert [("provider_ids.foo", 1)] in info | ||||
|         assert [("provider_ids.bar", 1)] in info | ||||
|  | ||||
|     def test_sparse_compound_indexes(self): | ||||
|         class MyDoc(Document): | ||||
| @@ -967,11 +961,10 @@ class TestIndexes(unittest.TestCase): | ||||
|             } | ||||
|  | ||||
|         info = MyDoc.objects._collection.index_information() | ||||
|         self.assertEqual( | ||||
|             [("provider_ids.foo", 1), ("provider_ids.bar", 1)], | ||||
|             info["provider_ids.foo_1_provider_ids.bar_1"]["key"], | ||||
|         ) | ||||
|         self.assertTrue(info["provider_ids.foo_1_provider_ids.bar_1"]["sparse"]) | ||||
|         assert [("provider_ids.foo", 1), ("provider_ids.bar", 1)] == info[ | ||||
|             "provider_ids.foo_1_provider_ids.bar_1" | ||||
|         ]["key"] | ||||
|         assert info["provider_ids.foo_1_provider_ids.bar_1"]["sparse"] | ||||
|  | ||||
|     def test_text_indexes(self): | ||||
|         class Book(Document): | ||||
| @@ -979,9 +972,9 @@ class TestIndexes(unittest.TestCase): | ||||
|             meta = {"indexes": ["$title"]} | ||||
|  | ||||
|         indexes = Book.objects._collection.index_information() | ||||
|         self.assertIn("title_text", indexes) | ||||
|         assert "title_text" in indexes | ||||
|         key = indexes["title_text"]["key"] | ||||
|         self.assertIn(("_fts", "text"), key) | ||||
|         assert ("_fts", "text") in key | ||||
|  | ||||
|     def test_hashed_indexes(self): | ||||
|         class Book(Document): | ||||
| @@ -989,8 +982,8 @@ class TestIndexes(unittest.TestCase): | ||||
|             meta = {"indexes": ["#ref_id"]} | ||||
|  | ||||
|         indexes = Book.objects._collection.index_information() | ||||
|         self.assertIn("ref_id_hashed", indexes) | ||||
|         self.assertIn(("ref_id", "hashed"), indexes["ref_id_hashed"]["key"]) | ||||
|         assert "ref_id_hashed" in indexes | ||||
|         assert ("ref_id", "hashed") in indexes["ref_id_hashed"]["key"] | ||||
|  | ||||
|     def test_indexes_after_database_drop(self): | ||||
|         """ | ||||
| @@ -1027,7 +1020,8 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|             # Create Post #2 | ||||
|             post2 = BlogPost(title="test2", slug="test") | ||||
|             self.assertRaises(NotUniqueError, post2.save) | ||||
|             with pytest.raises(NotUniqueError): | ||||
|                 post2.save() | ||||
|         finally: | ||||
|             # Drop the temporary database at the end | ||||
|             connection.drop_database("tempdatabase") | ||||
| @@ -1074,15 +1068,12 @@ class TestIndexes(unittest.TestCase): | ||||
|                     "dropDups" | ||||
|                 ]  # drop the index dropDups - it is deprecated in MongoDB 3+ | ||||
|  | ||||
|         self.assertEqual( | ||||
|             index_info, | ||||
|             { | ||||
|                 "txt_1": {"key": [("txt", 1)], "background": False}, | ||||
|                 "_id_": {"key": [("_id", 1)]}, | ||||
|                 "txt2_1": {"key": [("txt2", 1)], "background": False}, | ||||
|                 "_cls_1": {"key": [("_cls", 1)], "background": False}, | ||||
|             }, | ||||
|         ) | ||||
|         assert index_info == { | ||||
|             "txt_1": {"key": [("txt", 1)], "background": False}, | ||||
|             "_id_": {"key": [("_id", 1)]}, | ||||
|             "txt2_1": {"key": [("txt2", 1)], "background": False}, | ||||
|             "_cls_1": {"key": [("_cls", 1)], "background": False}, | ||||
|         } | ||||
|  | ||||
|     def test_compound_index_underscore_cls_not_overwritten(self): | ||||
|         """ | ||||
| @@ -1105,7 +1096,7 @@ class TestIndexes(unittest.TestCase): | ||||
|         TestDoc.ensure_indexes() | ||||
|  | ||||
|         index_info = TestDoc._get_collection().index_information() | ||||
|         self.assertIn("shard_1_1__cls_1_txt_1_1", index_info) | ||||
|         assert "shard_1_1__cls_1_txt_1_1" in index_info | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|   | ||||
		Reference in New Issue
	
	Block a user