From 68e8584520cb57c9db649b38db64e9f92fb7e4da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Thu, 13 Jun 2019 23:29:02 +0200 Subject: [PATCH 001/216] add pypy3 back to travis and improve python 3.7 integration --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 38924a86..cf66da6a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -22,7 +22,9 @@ python: - 2.7 - 3.5 - 3.6 +- 3.7 - pypy +- pypy3 env: global: @@ -41,8 +43,6 @@ matrix: env: MONGODB=${MONGODB_3_4} PYMONGO=3.4.x - python: 3.6 env: MONGODB=${MONGODB_3_6} PYMONGO=3.x - - python: 3.7 - env: MONGODB=${MONGODB_3_6} PYMONGO=3.x install: From 6a4c342e45fbd556dc1938923b7bed17031c79c7 Mon Sep 17 00:00:00 2001 From: Dmitry Voronenkov Date: Tue, 18 Jun 2019 16:13:29 +0300 Subject: [PATCH 002/216] Supported updates of an array by negative index --- mongoengine/base/datastructures.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index fafc08b7..9307556f 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -108,6 +108,9 @@ class BaseList(list): super(BaseList, self).__init__(list_items) def __getitem__(self, key): + # change index to positive value because MongoDB does not support negative one + if isinstance(key, int) and key < 0: + key = len(self) + key value = super(BaseList, self).__getitem__(key) if isinstance(key, slice): From 8f57279dc786da910590207bd988717e712d764a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 19 Jun 2019 23:04:23 +0200 Subject: [PATCH 003/216] remove pymongo deprecated methods: find_and_modify & remove --- mongoengine/fields.py | 20 ++++++++++++-------- mongoengine/queryset/base.py | 7 ++++--- tests/queryset/queryset.py | 4 ++-- 3 files changed, 18 insertions(+), 13 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index aa5aa805..2a4a2ad8 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -10,6 +10,7 @@ from operator import itemgetter from bson import Binary, DBRef, ObjectId, SON import gridfs import pymongo +from pymongo import ReturnDocument import six from six import iteritems @@ -1964,10 +1965,12 @@ class SequenceField(BaseField): sequence_name = self.get_sequence_name() sequence_id = '%s.%s' % (sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] - counter = collection.find_and_modify(query={'_id': sequence_id}, - update={'$inc': {'next': 1}}, - new=True, - upsert=True) + + counter = collection.find_one_and_update( + filter={'_id': sequence_id}, + update={'$inc': {'next': 1}}, + return_document=ReturnDocument.AFTER, + upsert=True) return self.value_decorator(counter['next']) def set_next_value(self, value): @@ -1975,10 +1978,11 @@ class SequenceField(BaseField): sequence_name = self.get_sequence_name() sequence_id = "%s.%s" % (sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] - counter = collection.find_and_modify(query={"_id": sequence_id}, - update={"$set": {"next": value}}, - new=True, - upsert=True) + counter = collection.find_one_and_update( + filter={"_id": sequence_id}, + update={"$set": {"next": value}}, + return_document=ReturnDocument.AFTER, + upsert=True) return self.value_decorator(counter['next']) def get_next_value(self): diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 49e154fb..7acd9e58 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -480,9 +480,10 @@ class BaseQuerySet(object): write_concern=write_concern, **{'pull_all__%s' % field_name: self}) - result = queryset._collection.remove(queryset._query, **write_concern) - if result: - return result.get('n') + with set_write_concern(queryset._collection, write_concern) as collection: + result = collection.delete_many(queryset._query) + if result.acknowledged: + return result.deleted_count def update(self, upsert=False, multi=True, write_concern=None, full_result=False, **update): diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 55f256d9..c58176b8 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -1857,8 +1857,8 @@ class QuerySetTest(unittest.TestCase): self.Person.objects()[:1].delete() self.assertEqual(1, BlogPost.objects.count()) - def test_limit_with_write_concern_0(self): - + def test_delete_edge_case_with_write_concern_0_return_None(self): + """Return None when write is unacknowledged""" p1 = self.Person(name="User Z", age=20).save() del_result = p1.delete(w=0) self.assertEqual(None, del_result) From ae0384df29bae05c9c86414a4edb45a533dd02d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20W=C3=B3jcik?= Date: Thu, 20 Jun 2019 11:25:51 +0200 Subject: [PATCH 004/216] Improve Document.meta.shard_key docs (#2099) This closes #2096. Previous documentation of the shard_key meta attribute was missing the crucial point that it really only matters if your collection is sharded over a compound index. --- docs/guide/defining-documents.rst | 18 ++++++++++++------ mongoengine/document.py | 10 ++++++---- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index ae9d3b36..9dcca88c 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -714,11 +714,16 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: Shard keys ========== -If your collection is sharded, then you need to specify the shard key as a tuple, -using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`. -This ensures that the shard key is sent with the query when calling the -:meth:`~mongoengine.document.Document.save` or -:meth:`~mongoengine.document.Document.update` method on an existing +If your collection is sharded by multiple keys, then you can improve shard +routing (and thus the performance of your application) by specifying the shard +key, using the :attr:`shard_key` attribute of +:attr:`~mongoengine.Document.meta`. The shard key should be defined as a tuple. + +This ensures that the full shard key is sent with the query when calling +methods such as :meth:`~mongoengine.document.Document.save`, +:meth:`~mongoengine.document.Document.update`, +:meth:`~mongoengine.document.Document.modify`, or +:meth:`~mongoengine.document.Document.delete` on an existing :class:`~mongoengine.Document` instance:: class LogEntry(Document): @@ -728,7 +733,8 @@ This ensures that the shard key is sent with the query when calling the data = StringField() meta = { - 'shard_key': ('machine', 'timestamp',) + 'shard_key': ('machine', 'timestamp'), + 'indexes': ('machine', 'timestamp'), } .. _document-inheritance: diff --git a/mongoengine/document.py b/mongoengine/document.py index cc35c440..520de5bf 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -544,7 +544,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): @property def _qs(self): - """Return the queryset to use for updating / reloading / deletions.""" + """Return the default queryset corresponding to this document.""" if not hasattr(self, '__objects'): self.__objects = QuerySet(self, self._get_collection()) return self.__objects @@ -552,9 +552,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): @property def _object_key(self): """Get the query dict that can be used to fetch this object from - the database. Most of the time it's a simple PK lookup, but in - case of a sharded collection with a compound shard key, it can - contain a more complex query. + the database. + + Most of the time the dict is a simple PK lookup, but in case of + a sharded collection with a compound shard key, it can contain a more + complex query. """ select_dict = {'pk': self.pk} shard_key = self.__class__._meta.get('shard_key', tuple()) From 799775b3a76611cef40d4aecbf3f4bf2f298c9c9 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Thu, 20 Jun 2019 12:18:58 +0200 Subject: [PATCH 005/216] Slightly cleaner docstring of BaseQuerySet.no_sub_classes [ci skip] --- mongoengine/queryset/base.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 49e154fb..d853cf06 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -73,6 +73,7 @@ class BaseQuerySet(object): self._initial_query = { '_cls': {'$in': self._document._subclasses}} self._loaded_fields = QueryFieldList(always_include=['_cls']) + self._cursor_obj = None self._limit = None self._skip = None @@ -707,8 +708,9 @@ class BaseQuerySet(object): return queryset def no_sub_classes(self): - """ - Only return instances of this document and not any inherited documents + """Filter for only the instances of this specific document. + + Do NOT return any inherited documents. """ if self._document._meta.get('allow_inheritance') is True: self._initial_query = {'_cls': self._document._class_name} From 216217e2c68c3b629e84f4af590af399d569f102 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Fri, 21 Jun 2019 13:48:24 +0200 Subject: [PATCH 006/216] Datastructures comments: fix typos and tweak formatting [ci skip] --- mongoengine/base/datastructures.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index fafc08b7..cce71846 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -11,18 +11,20 @@ __all__ = ('BaseDict', 'StrictDict', 'BaseList', 'EmbeddedDocumentList', 'LazyRe def mark_as_changed_wrapper(parent_method): - """Decorators that ensures _mark_as_changed method gets called""" + """Decorator that ensures _mark_as_changed method gets called.""" def wrapper(self, *args, **kwargs): - result = parent_method(self, *args, **kwargs) # Can't use super() in the decorator + # Can't use super() in the decorator. + result = parent_method(self, *args, **kwargs) self._mark_as_changed() return result return wrapper def mark_key_as_changed_wrapper(parent_method): - """Decorators that ensures _mark_as_changed method gets called with the key argument""" + """Decorator that ensures _mark_as_changed method gets called with the key argument""" def wrapper(self, key, *args, **kwargs): - result = parent_method(self, key, *args, **kwargs) # Can't use super() in the decorator + # Can't use super() in the decorator. + result = parent_method(self, key, *args, **kwargs) self._mark_as_changed(key) return result return wrapper From a4fe091a513a9fdc91ce2e45670b543643380b96 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Fri, 21 Jun 2019 13:51:53 +0200 Subject: [PATCH 007/216] Cleaner code & comments in BaseField.__set__ --- mongoengine/base/fields.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index fe96f15b..9ce426c9 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -128,10 +128,9 @@ class BaseField(object): return instance._data.get(self.name) def __set__(self, instance, value): - """Descriptor for assigning a value to a field in a document. - """ - # If setting to None and there is a default - # Then set the value to the default value + """Descriptor for assigning a value to a field in a document.""" + # If setting to None and there is a default value provided for this + # field, then set the value to the default value. if value is None: if self.null: value = None @@ -142,12 +141,16 @@ class BaseField(object): if instance._initialised: try: - if (self.name not in instance._data or - instance._data[self.name] != value): + value_has_changed = ( + self.name not in instance._data or + instance._data[self.name] != value + ) + if value_has_changed: instance._mark_as_changed(self.name) except Exception: - # Values cant be compared eg: naive and tz datetimes - # So mark it as changed + # Some values can't be compared and throw an error when we + # attempt to do so (e.g. tz-naive and tz-aware datetimes). + # Mark the field as changed in such cases. instance._mark_as_changed(self.name) EmbeddedDocument = _import_class('EmbeddedDocument') @@ -157,6 +160,7 @@ class BaseField(object): for v in value: if isinstance(v, EmbeddedDocument): v._instance = weakref.proxy(instance) + instance._data[self.name] = value def error(self, message='', errors=None, field_name=None): From f45552f8f8ea2d4094c78cb60caee3c62f9d6c1e Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Mon, 24 Jun 2019 15:44:35 +0200 Subject: [PATCH 008/216] Drop support for positional arguments when instantiating a document For example, if you had the following class: ``` class Person(Document): name = StringField() age = IntField() ``` You could instantiate an object of such class by doing one of the following: 1. `new_person = Person('Tom', 30)` 2. `new_person = Person('Tom', age=30)` 3. `new_person = Person(name='Tom', age=30)` From now on, only option (3) is allowed. Supporting positional arguments may sound like a reasonable idea in this heavily simplified example, but in real life it's almost never what you want (especially if you use inheritance in your document definitions) and it may lead to ugly bugs. We should not rely on the *order* of fields to match a given value to a given name. This also helps us simplify the code e.g. by dropping the confusing (and undocumented) `BaseDocument._auto_id_field` attribute. --- docs/changelog.rst | 2 + mongoengine/base/document.py | 16 ++---- mongoengine/base/metaclasses.py | 57 +++++++++++++------ tests/document/instance.py | 68 +++++++++++------------ tests/fields/test_lazy_reference_field.py | 8 +-- 5 files changed, 82 insertions(+), 69 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index e82cc124..6a56325e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,6 +6,8 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). +- BREAKING CHANGE: Drop support for positional arguments when instantiating a document. #? + - From now on keyword arguments (e.g. `Doc(field_name=value)`) are required. Changes in 0.18.1 ================= diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 057258f5..047d50a4 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -60,18 +60,10 @@ class BaseDocument(object): self._created = True if args: - # Combine positional arguments with named arguments. - # We only want named arguments. - field = iter(self._fields_ordered) - # If its an automatic id field then skip to the first defined field - if getattr(self, '_auto_id_field', False): - next(field) - for value in args: - name = next(field) - if name in values: - raise TypeError( - 'Multiple values for keyword argument "%s"' % name) - values[name] = value + raise TypeError( + 'Instantiating a document with positional arguments is not ' + 'supported. Please use `field_name=value` keyword arguments.' + ) __auto_convert = values.pop('__auto_convert', True) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 6f507eaa..c5363b09 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -385,30 +385,35 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): new_class._meta['id_field'] = field_name new_class.id = field - # Set primary key if not defined by the document - new_class._auto_id_field = getattr(parent_doc_cls, - '_auto_id_field', False) + # If the document doesn't explicitly define a primary key field, create + # one. Make it an ObjectIdField and give it a non-clashing name ("id" + # by default, but can be different if that one's taken). if not new_class._meta.get('id_field'): - # After 0.10, find not existing names, instead of overwriting id_name, id_db_name = mcs.get_auto_id_names(new_class) - new_class._auto_id_field = True new_class._meta['id_field'] = id_name new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) new_class._fields[id_name].name = id_name new_class.id = new_class._fields[id_name] new_class._db_field_map[id_name] = id_db_name new_class._reverse_db_field_map[id_db_name] = id_name - # Prepend id field to _fields_ordered + + # Prepend the ID field to _fields_ordered (so that it's *always* + # the first field). new_class._fields_ordered = (id_name, ) + new_class._fields_ordered - # Merge in exceptions with parent hierarchy + # Merge in exceptions with parent hierarchy. exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) module = attrs.get('__module__') for exc in exceptions_to_merge: name = exc.__name__ - parents = tuple(getattr(base, name) for base in flattened_bases - if hasattr(base, name)) or (exc,) - # Create new exception and set to new_class + parents = tuple( + getattr(base, name) + for base in flattened_bases + if hasattr(base, name) + ) or (exc,) + + # Create a new exception and set it as an attribute on the new + # class. exception = type(name, parents, {'__module__': module}) setattr(new_class, name, exception) @@ -416,17 +421,35 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): @classmethod def get_auto_id_names(mcs, new_class): + """Find a name for the automatic ID field for the given new class. + + Return a two-element tuple where the first item is the field name (i.e. + the attribute name on the object) and the second element is the DB + field name (i.e. the name of the key stored in MongoDB). + + Defaults to ('id', '_id'), or generates a non-clashing name in the form + of ('auto_id_X', '_auto_id_X') if the default name is already taken. + """ id_name, id_db_name = ('id', '_id') - if id_name not in new_class._fields and \ - id_db_name not in (v.db_field for v in new_class._fields.values()): + existing_fields = new_class._fields + existing_db_fields = (v.db_field for v in new_class._fields.values()) + if ( + id_name not in existing_fields and + id_db_name not in existing_db_fields + ): return id_name, id_db_name - id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0 - while id_name in new_class._fields or \ - id_db_name in (v.db_field for v in new_class._fields.values()): + + id_basename, id_db_basename, i = ('auto_id', '_auto_id', 0) + while True: id_name = '{0}_{1}'.format(id_basename, i) id_db_name = '{0}_{1}'.format(id_db_basename, i) - i += 1 - return id_name, id_db_name + if ( + id_name not in existing_fields and + id_db_name not in existing_db_fields + ): + return id_name, id_db_name + else: + i += 1 class MetaDict(dict): diff --git a/tests/document/instance.py b/tests/document/instance.py index 02617b67..4be8aa45 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -3130,48 +3130,44 @@ class InstanceTest(MongoDBTestCase): self.assertEqual(classic_doc._data, dict_doc._data) def test_positional_creation(self): - """Ensure that document may be created using positional arguments.""" - person = self.Person("Test User", 42) - self.assertEqual(person.name, "Test User") - self.assertEqual(person.age, 42) + """Document cannot be instantiated using positional arguments.""" + with self.assertRaises(TypeError) as e: + person = self.Person("Test User", 42) + expected_msg = ( + 'Instantiating a document with positional arguments is not ' + 'supported. Please use `field_name=value` keyword arguments.' + ) + self.assertEqual(e.exception.message, expected_msg) def test_mixed_creation(self): - """Ensure that document may be created using mixed arguments.""" - person = self.Person("Test User", age=42) - self.assertEqual(person.name, "Test User") - self.assertEqual(person.age, 42) + """Document cannot be instantiated using mixed arguments.""" + with self.assertRaises(TypeError) as e: + person = self.Person("Test User", age=42) + expected_msg = ( + 'Instantiating a document with positional arguments is not ' + 'supported. Please use `field_name=value` keyword arguments.' + ) + self.assertEqual(e.exception.message, expected_msg) def test_positional_creation_embedded(self): - """Ensure that embedded document may be created using positional - arguments. - """ - job = self.Job("Test Job", 4) - self.assertEqual(job.name, "Test Job") - self.assertEqual(job.years, 4) + """Embedded document cannot be created using positional arguments.""" + with self.assertRaises(TypeError) as e: + job = self.Job("Test Job", 4) + expected_msg = ( + 'Instantiating a document with positional arguments is not ' + 'supported. Please use `field_name=value` keyword arguments.' + ) + self.assertEqual(e.exception.message, expected_msg) def test_mixed_creation_embedded(self): - """Ensure that embedded document may be created using mixed - arguments. - """ - job = self.Job("Test Job", years=4) - self.assertEqual(job.name, "Test Job") - self.assertEqual(job.years, 4) - - def test_mixed_creation_dynamic(self): - """Ensure that document may be created using mixed arguments.""" - class Person(DynamicDocument): - name = StringField() - - person = Person("Test User", age=42) - self.assertEqual(person.name, "Test User") - self.assertEqual(person.age, 42) - - def test_bad_mixed_creation(self): - """Ensure that document gives correct error when duplicating - arguments. - """ - with self.assertRaises(TypeError): - return self.Person("Test User", 42, name="Bad User") + """Embedded document cannot be created using mixed arguments.""" + with self.assertRaises(TypeError) as e: + job = self.Job("Test Job", years=4) + expected_msg = ( + 'Instantiating a document with positional arguments is not ' + 'supported. Please use `field_name=value` keyword arguments.' + ) + self.assertEqual(e.exception.message, expected_msg) def test_data_contains_id_field(self): """Ensure that asking for _data returns 'id'.""" diff --git a/tests/fields/test_lazy_reference_field.py b/tests/fields/test_lazy_reference_field.py index b10506e7..1d6e6e79 100644 --- a/tests/fields/test_lazy_reference_field.py +++ b/tests/fields/test_lazy_reference_field.py @@ -303,8 +303,8 @@ class TestLazyReferenceField(MongoDBTestCase): Animal.drop_collection() Ocurrence.drop_collection() - animal1 = Animal('doggo').save() - animal2 = Animal('cheeta').save() + animal1 = Animal(name='doggo').save() + animal2 = Animal(name='cheeta').save() def check_fields_type(occ): self.assertIsInstance(occ.direct, LazyReference) @@ -542,8 +542,8 @@ class TestGenericLazyReferenceField(MongoDBTestCase): Animal.drop_collection() Ocurrence.drop_collection() - animal1 = Animal('doggo').save() - animal2 = Animal('cheeta').save() + animal1 = Animal(name='doggo').save() + animal2 = Animal(name='cheeta').save() def check_fields_type(occ): self.assertIsInstance(occ.direct, LazyReference) From 8e69008699d20268956b6c9f3b2406e5d110c6d8 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Mon, 24 Jun 2019 16:00:21 +0200 Subject: [PATCH 009/216] Fill in the PR # in the changelog [ci skip] --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6a56325e..3754b010 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,7 +6,7 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). -- BREAKING CHANGE: Drop support for positional arguments when instantiating a document. #? +- BREAKING CHANGE: Drop support for positional arguments when instantiating a document. #2103 - From now on keyword arguments (e.g. `Doc(field_name=value)`) are required. Changes in 0.18.1 From b661afba0158befeaa8abc95f8c7184ede5aeeac Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Tue, 25 Jun 2019 11:34:31 +0200 Subject: [PATCH 010/216] Use set comprehensions for existing_fields & existing_db_fields --- mongoengine/base/metaclasses.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index c5363b09..44b74509 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -431,8 +431,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): of ('auto_id_X', '_auto_id_X') if the default name is already taken. """ id_name, id_db_name = ('id', '_id') - existing_fields = new_class._fields - existing_db_fields = (v.db_field for v in new_class._fields.values()) + existing_fields = {field_name for field_name in new_class._fields} + existing_db_fields = {v.db_field for v in new_class._fields.values()} if ( id_name not in existing_fields and id_db_name not in existing_db_fields From 0578cdb62ef762454941e872ca026ada5a01e5f9 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Tue, 25 Jun 2019 11:41:27 +0200 Subject: [PATCH 011/216] Cleaner loop using itertools.count() --- mongoengine/base/metaclasses.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 44b74509..c3ced5bb 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -1,3 +1,4 @@ +import itertools import warnings import six @@ -440,7 +441,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): return id_name, id_db_name id_basename, id_db_basename, i = ('auto_id', '_auto_id', 0) - while True: + for i in itertools.count(): id_name = '{0}_{1}'.format(id_basename, i) id_db_name = '{0}_{1}'.format(id_db_basename, i) if ( @@ -448,8 +449,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): id_db_name not in existing_db_fields ): return id_name, id_db_name - else: - i += 1 class MetaDict(dict): From e57d834a0d9752dc92497d08b7fb3648212686b8 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Tue, 25 Jun 2019 12:41:59 +0200 Subject: [PATCH 012/216] Fix automated tests for py3 --- tests/document/instance.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/document/instance.py b/tests/document/instance.py index 4be8aa45..06f65076 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -3137,7 +3137,7 @@ class InstanceTest(MongoDBTestCase): 'Instantiating a document with positional arguments is not ' 'supported. Please use `field_name=value` keyword arguments.' ) - self.assertEqual(e.exception.message, expected_msg) + self.assertEqual(str(e.exception), expected_msg) def test_mixed_creation(self): """Document cannot be instantiated using mixed arguments.""" @@ -3147,7 +3147,7 @@ class InstanceTest(MongoDBTestCase): 'Instantiating a document with positional arguments is not ' 'supported. Please use `field_name=value` keyword arguments.' ) - self.assertEqual(e.exception.message, expected_msg) + self.assertEqual(str(e.exception), expected_msg) def test_positional_creation_embedded(self): """Embedded document cannot be created using positional arguments.""" @@ -3157,7 +3157,7 @@ class InstanceTest(MongoDBTestCase): 'Instantiating a document with positional arguments is not ' 'supported. Please use `field_name=value` keyword arguments.' ) - self.assertEqual(e.exception.message, expected_msg) + self.assertEqual(str(e.exception), expected_msg) def test_mixed_creation_embedded(self): """Embedded document cannot be created using mixed arguments.""" @@ -3167,7 +3167,7 @@ class InstanceTest(MongoDBTestCase): 'Instantiating a document with positional arguments is not ' 'supported. Please use `field_name=value` keyword arguments.' ) - self.assertEqual(e.exception.message, expected_msg) + self.assertEqual(str(e.exception), expected_msg) def test_data_contains_id_field(self): """Ensure that asking for _data returns 'id'.""" From 5c91877b696318680bff7c4866576661dc299a52 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Tue, 25 Jun 2019 16:48:51 +0200 Subject: [PATCH 013/216] Fix the Travis deployment condition See https://github.com/MongoEngine/mongoengine/issues/2104 for details. For now I'm hardcoding `$MONGODB = 3.4.17` just to get a release out there, but we should probably use the globals going forward. Will do that in a follow-up commit once I get the `travis-conditions` gem up and running and hence can test `.travis.yml` changes without deploying. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 7bbeef8b..f9993e79 100644 --- a/.travis.yml +++ b/.travis.yml @@ -102,5 +102,5 @@ deploy: on: tags: true repo: MongoEngine/mongoengine - condition: ($PYMONGO = 3.x) && ($MONGODB = 3.4) + condition: ($PYMONGO = 3.x) && ($MONGODB = 3.4.17) python: 2.7 From 1ead7f9b2b1ecfeba19eadb6509af9727d3226fe Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Tue, 25 Jun 2019 16:51:56 +0200 Subject: [PATCH 014/216] Add changelog entries for v0.18.2 --- docs/changelog.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index e82cc124..487bc86a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -7,6 +7,11 @@ Development =========== - (Fill this out as you fix issues and develop your features). +Changes in 0.18.2 +================= +- Replace some of the deprecated PyMongo v2.x methods with their v3.x equivalents #2097 +- Various code clarity and documentation improvements + Changes in 0.18.1 ================= - Fix a bug introduced in 0.18.0 which was causing `.save()` to update all the fields From af292b0ec2cc65a4ce6684348d4fae301d527ef1 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Tue, 25 Jun 2019 16:52:31 +0200 Subject: [PATCH 015/216] Bump version to v0.18.2 --- mongoengine/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index d6a50766..bb7a4e57 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -23,7 +23,7 @@ __all__ = (list(document.__all__) + list(fields.__all__) + list(signals.__all__) + list(errors.__all__)) -VERSION = (0, 18, 1) +VERSION = (0, 18, 2) def get_version(): From 91899acfe5a7c2cd5354990153aaebb6269cf5c9 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Wed, 26 Jun 2019 15:14:43 +0200 Subject: [PATCH 016/216] Clarify unack'd write concern not returning the deleted count [ci skip] --- mongoengine/queryset/base.py | 4 ++++ tests/queryset/queryset.py | 6 +++++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 376e3fab..85616c85 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -483,6 +483,10 @@ class BaseQuerySet(object): with set_write_concern(queryset._collection, write_concern) as collection: result = collection.delete_many(queryset._query) + + # If we're using an unack'd write concern, we don't really know how + # many items have been deleted at this point, hence we only return + # the count for ack'd ops. if result.acknowledged: return result.deleted_count diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index c58176b8..c86e4095 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -1858,7 +1858,11 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(1, BlogPost.objects.count()) def test_delete_edge_case_with_write_concern_0_return_None(self): - """Return None when write is unacknowledged""" + """Return None if the delete operation is unacknowledged. + + If we use an unack'd write concern, we don't really know how many + documents have been deleted. + """ p1 = self.Person(name="User Z", age=20).save() del_result = p1.delete(w=0) self.assertEqual(None, del_result) From b47669403b49428f00716ee983ffd269e402557f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20W=C3=B3jcik?= Date: Thu, 27 Jun 2019 13:05:54 +0200 Subject: [PATCH 017/216] Format the codebase using Black (#2109) This commit: 1. Formats all of our existing code using `black`. 2. Adds a note about using `black` to `CONTRIBUTING.rst`. 3. Runs `black --check` as part of CI (failing builds that aren't properly formatted). --- .travis.yml | 9 +- CONTRIBUTING.rst | 8 +- benchmarks/test_basic_doc_ops.py | 92 +- benchmarks/test_inserts.py | 42 +- docs/code/tumblelog.py | 36 +- docs/conf.py | 93 +- mongoengine/__init__.py | 13 +- mongoengine/base/__init__.py | 23 +- mongoengine/base/common.py | 51 +- mongoengine/base/datastructures.py | 70 +- mongoengine/base/document.py | 528 ++-- mongoengine/base/fields.py | 278 ++- mongoengine/base/metaclasses.py | 333 +-- mongoengine/common.py | 22 +- mongoengine/connection.py | 190 +- mongoengine/context_managers.py | 51 +- mongoengine/dereference.py | 110 +- mongoengine/document.py | 400 +-- mongoengine/errors.py | 44 +- mongoengine/fields.py | 890 ++++--- mongoengine/mongodb_support.py | 2 +- mongoengine/pymongo_support.py | 2 +- mongoengine/queryset/__init__.py | 21 +- mongoengine/queryset/base.py | 538 ++-- mongoengine/queryset/field_list.py | 13 +- mongoengine/queryset/manager.py | 4 +- mongoengine/queryset/queryset.py | 36 +- mongoengine/queryset/transform.py | 346 +-- mongoengine/queryset/visitor.py | 14 +- mongoengine/signals.py | 43 +- requirements.txt | 1 + setup.cfg | 2 +- setup.py | 64 +- tests/all_warnings/__init__.py | 18 +- tests/document/__init__.py | 2 +- tests/document/class_methods.py | 229 +- tests/document/delta.py | 712 +++--- tests/document/dynamic.py | 255 +- tests/document/indexes.py | 697 +++--- tests/document/inheritance.py | 477 ++-- tests/document/instance.py | 1273 +++++----- tests/document/json_serialisation.py | 45 +- tests/document/validation.py | 108 +- tests/fields/fields.py | 974 ++++---- tests/fields/file_tests.py | 141 +- tests/fields/geo.py | 149 +- tests/fields/test_binary_field.py | 33 +- tests/fields/test_boolean_field.py | 15 +- tests/fields/test_cached_reference_field.py | 312 +-- tests/fields/test_complex_datetime_field.py | 31 +- tests/fields/test_date_field.py | 22 +- tests/fields/test_datetime_field.py | 45 +- tests/fields/test_decimal_field.py | 55 +- tests/fields/test_dict_field.py | 205 +- tests/fields/test_email_field.py | 45 +- tests/fields/test_embedded_document_field.py | 152 +- tests/fields/test_float_field.py | 6 +- tests/fields/test_int_field.py | 4 +- tests/fields/test_lazy_reference_field.py | 120 +- tests/fields/test_long_field.py | 9 +- tests/fields/test_map_field.py | 59 +- tests/fields/test_reference_field.py | 51 +- tests/fields/test_sequence_field.py | 148 +- tests/fields/test_url_field.py | 24 +- tests/fields/test_uuid_field.py | 19 +- tests/fixtures.py | 8 +- tests/queryset/field_list.py | 249 +- tests/queryset/geo.py | 178 +- tests/queryset/modify.py | 34 +- tests/queryset/pickable.py | 24 +- tests/queryset/queryset.py | 2345 +++++++++--------- tests/queryset/transform.py | 240 +- tests/queryset/visitor.py | 205 +- tests/test_common.py | 1 - tests/test_connection.py | 393 +-- tests/test_context_managers.py | 111 +- tests/test_datastructures.py | 205 +- tests/test_dereference.py | 252 +- tests/test_replicaset_connection.py | 11 +- tests/test_signals.py | 392 +-- tests/test_utils.py | 21 +- tests/utils.py | 7 +- 82 files changed, 8405 insertions(+), 7075 deletions(-) diff --git a/.travis.yml b/.travis.yml index f9993e79..8af73c6b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -52,19 +52,22 @@ install: - wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz - tar xzf mongodb-linux-x86_64-${MONGODB}.tgz - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version - # Install python dependencies + # Install Python dependencies. - pip install --upgrade pip - pip install coveralls - pip install flake8 flake8-import-order - pip install tox # tox 3.11.0 has requirement virtualenv>=14.0.0 - pip install virtualenv # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) - # Install the tox venv + # Install the tox venv. - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test + # Install black for Python v3.7 only. + - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then pip install black; fi before_script: - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork - - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi # Run flake8 for py27 + - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi # Run flake8 for Python 2.7 only + - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then black --check .; else echo "black only runs on py37"; fi # Run black for Python 3.7 only - mongo --eval 'db.version();' # Make sure mongo is awake script: diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index f7b15c85..4711c1d3 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -31,12 +31,8 @@ build. You should ensure that your code is properly converted by Style Guide ----------- -MongoEngine aims to follow `PEP8 `_ -including 4 space indents. When possible we try to stick to 79 character line -limits. However, screens got bigger and an ORM has a strong focus on -readability and if it can help, we accept 119 as maximum line length, in a -similar way as `django does -`_ +MongoEngine uses `black `_ for code +formatting. Testing ------- diff --git a/benchmarks/test_basic_doc_ops.py b/benchmarks/test_basic_doc_ops.py index 06f0538b..e840f97a 100644 --- a/benchmarks/test_basic_doc_ops.py +++ b/benchmarks/test_basic_doc_ops.py @@ -1,11 +1,18 @@ from timeit import repeat import mongoengine -from mongoengine import (BooleanField, Document, EmailField, EmbeddedDocument, - EmbeddedDocumentField, IntField, ListField, - StringField) +from mongoengine import ( + BooleanField, + Document, + EmailField, + EmbeddedDocument, + EmbeddedDocumentField, + IntField, + ListField, + StringField, +) -mongoengine.connect(db='mongoengine_benchmark_test') +mongoengine.connect(db="mongoengine_benchmark_test") def timeit(f, n=10000): @@ -24,46 +31,41 @@ def test_basic(): def init_book(): return Book( - name='Always be closing', + name="Always be closing", pages=100, - tags=['self-help', 'sales'], + tags=["self-help", "sales"], is_published=True, - author_email='alec@example.com', + author_email="alec@example.com", ) - print('Doc initialization: %.3fus' % (timeit(init_book, 1000) * 10**6)) + print("Doc initialization: %.3fus" % (timeit(init_book, 1000) * 10 ** 6)) b = init_book() - print('Doc getattr: %.3fus' % (timeit(lambda: b.name, 10000) * 10**6)) + print("Doc getattr: %.3fus" % (timeit(lambda: b.name, 10000) * 10 ** 6)) print( - 'Doc setattr: %.3fus' % ( - timeit(lambda: setattr(b, 'name', 'New name'), 10000) * 10**6 - ) + "Doc setattr: %.3fus" + % (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6) ) - print('Doc to mongo: %.3fus' % (timeit(b.to_mongo, 1000) * 10**6)) + print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6)) - print('Doc validation: %.3fus' % (timeit(b.validate, 1000) * 10**6)) + print("Doc validation: %.3fus" % (timeit(b.validate, 1000) * 10 ** 6)) def save_book(): - b._mark_as_changed('name') - b._mark_as_changed('tags') + b._mark_as_changed("name") + b._mark_as_changed("tags") b.save() - print('Save to database: %.3fus' % (timeit(save_book, 100) * 10**6)) + print("Save to database: %.3fus" % (timeit(save_book, 100) * 10 ** 6)) son = b.to_mongo() print( - 'Load from SON: %.3fus' % ( - timeit(lambda: Book._from_son(son), 1000) * 10**6 - ) + "Load from SON: %.3fus" % (timeit(lambda: Book._from_son(son), 1000) * 10 ** 6) ) print( - 'Load from database: %.3fus' % ( - timeit(lambda: Book.objects[0], 100) * 10**6 - ) + "Load from database: %.3fus" % (timeit(lambda: Book.objects[0], 100) * 10 ** 6) ) def create_and_delete_book(): @@ -72,9 +74,8 @@ def test_basic(): b.delete() print( - 'Init + save to database + delete: %.3fms' % ( - timeit(create_and_delete_book, 10) * 10**3 - ) + "Init + save to database + delete: %.3fms" + % (timeit(create_and_delete_book, 10) * 10 ** 3) ) @@ -92,42 +93,36 @@ def test_big_doc(): def init_company(): return Company( - name='MongoDB, Inc.', + name="MongoDB, Inc.", contacts=[ - Contact( - name='Contact %d' % x, - title='CEO', - address='Address %d' % x, - ) + Contact(name="Contact %d" % x, title="CEO", address="Address %d" % x) for x in range(1000) - ] + ], ) company = init_company() - print('Big doc to mongo: %.3fms' % (timeit(company.to_mongo, 100) * 10**3)) + print("Big doc to mongo: %.3fms" % (timeit(company.to_mongo, 100) * 10 ** 3)) - print('Big doc validation: %.3fms' % (timeit(company.validate, 1000) * 10**3)) + print("Big doc validation: %.3fms" % (timeit(company.validate, 1000) * 10 ** 3)) company.save() def save_company(): - company._mark_as_changed('name') - company._mark_as_changed('contacts') + company._mark_as_changed("name") + company._mark_as_changed("contacts") company.save() - print('Save to database: %.3fms' % (timeit(save_company, 100) * 10**3)) + print("Save to database: %.3fms" % (timeit(save_company, 100) * 10 ** 3)) son = company.to_mongo() print( - 'Load from SON: %.3fms' % ( - timeit(lambda: Company._from_son(son), 100) * 10**3 - ) + "Load from SON: %.3fms" + % (timeit(lambda: Company._from_son(son), 100) * 10 ** 3) ) print( - 'Load from database: %.3fms' % ( - timeit(lambda: Company.objects[0], 100) * 10**3 - ) + "Load from database: %.3fms" + % (timeit(lambda: Company.objects[0], 100) * 10 ** 3) ) def create_and_delete_company(): @@ -136,13 +131,12 @@ def test_big_doc(): c.delete() print( - 'Init + save to database + delete: %.3fms' % ( - timeit(create_and_delete_company, 10) * 10**3 - ) + "Init + save to database + delete: %.3fms" + % (timeit(create_and_delete_company, 10) * 10 ** 3) ) -if __name__ == '__main__': +if __name__ == "__main__": test_basic() - print('-' * 100) + print("-" * 100) test_big_doc() diff --git a/benchmarks/test_inserts.py b/benchmarks/test_inserts.py index 8113d988..fd017bae 100644 --- a/benchmarks/test_inserts.py +++ b/benchmarks/test_inserts.py @@ -26,10 +26,10 @@ myNoddys = noddy.find() [n for n in myNoddys] # iterate """ - print('-' * 100) - print('PyMongo: Creating 10000 dictionaries.') + print("-" * 100) + print("PyMongo: Creating 10000 dictionaries.") t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) stmt = """ from pymongo import MongoClient, WriteConcern @@ -49,10 +49,10 @@ myNoddys = noddy.find() [n for n in myNoddys] # iterate """ - print('-' * 100) + print("-" * 100) print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).') t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) setup = """ from pymongo import MongoClient @@ -78,10 +78,10 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print('-' * 100) - print('MongoEngine: Creating 10000 dictionaries.') + print("-" * 100) + print("MongoEngine: Creating 10000 dictionaries.") t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) stmt = """ for i in range(10000): @@ -96,10 +96,10 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print('-' * 100) - print('MongoEngine: Creating 10000 dictionaries (using a single field assignment).') + print("-" * 100) + print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).") t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) stmt = """ for i in range(10000): @@ -112,10 +112,10 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print('-' * 100) + print("-" * 100) print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).') t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) stmt = """ for i in range(10000): @@ -128,10 +128,12 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print('-' * 100) - print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).') + print("-" * 100) + print( + 'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).' + ) t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) stmt = """ for i in range(10000): @@ -144,10 +146,12 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print('-' * 100) - print('MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).') + print("-" * 100) + print( + 'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).' + ) t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) if __name__ == "__main__": diff --git a/docs/code/tumblelog.py b/docs/code/tumblelog.py index 796336e6..3ca2384c 100644 --- a/docs/code/tumblelog.py +++ b/docs/code/tumblelog.py @@ -1,16 +1,19 @@ from mongoengine import * -connect('tumblelog') +connect("tumblelog") + class Comment(EmbeddedDocument): content = StringField() name = StringField(max_length=120) + class User(Document): email = StringField(required=True) first_name = StringField(max_length=50) last_name = StringField(max_length=50) + class Post(Document): title = StringField(max_length=120, required=True) author = ReferenceField(User) @@ -18,54 +21,57 @@ class Post(Document): comments = ListField(EmbeddedDocumentField(Comment)) # bugfix - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class TextPost(Post): content = StringField() + class ImagePost(Post): image_path = StringField() + class LinkPost(Post): link_url = StringField() + Post.drop_collection() -john = User(email='jdoe@example.com', first_name='John', last_name='Doe') +john = User(email="jdoe@example.com", first_name="John", last_name="Doe") john.save() -post1 = TextPost(title='Fun with MongoEngine', author=john) -post1.content = 'Took a look at MongoEngine today, looks pretty cool.' -post1.tags = ['mongodb', 'mongoengine'] +post1 = TextPost(title="Fun with MongoEngine", author=john) +post1.content = "Took a look at MongoEngine today, looks pretty cool." +post1.tags = ["mongodb", "mongoengine"] post1.save() -post2 = LinkPost(title='MongoEngine Documentation', author=john) -post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' -post2.tags = ['mongoengine'] +post2 = LinkPost(title="MongoEngine Documentation", author=john) +post2.link_url = "http://tractiondigital.com/labs/mongoengine/docs" +post2.tags = ["mongoengine"] post2.save() -print('ALL POSTS') +print("ALL POSTS") print() for post in Post.objects: print(post.title) - #print '=' * post.title.count() + # print '=' * post.title.count() print("=" * 20) if isinstance(post, TextPost): print(post.content) if isinstance(post, LinkPost): - print('Link:', post.link_url) + print("Link:", post.link_url) print() print() -print('POSTS TAGGED \'MONGODB\'') +print("POSTS TAGGED 'MONGODB'") print() -for post in Post.objects(tags='mongodb'): +for post in Post.objects(tags="mongodb"): print(post.title) print() -num_posts = Post.objects(tags='mongodb').count() +num_posts = Post.objects(tags="mongodb").count() print('Found %d posts with tag "mongodb"' % num_posts) diff --git a/docs/conf.py b/docs/conf.py index 468e71e0..0d642e0c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,29 +20,29 @@ import mongoengine # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath("..")) # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo'] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8' +# source_encoding = 'utf-8' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'MongoEngine' -copyright = u'2009, MongoEngine Authors' +project = u"MongoEngine" +copyright = u"2009, MongoEngine Authors" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -55,68 +55,66 @@ release = mongoengine.get_version() # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. -#unused_docs = [] +# unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. -exclude_trees = ['_build'] +exclude_trees = ["_build"] # The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -html_theme_options = { - 'canonical_url': 'http://docs.mongoengine.org/en/latest/' -} +html_theme_options = {"canonical_url": "http://docs.mongoengine.org/en/latest/"} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 @@ -126,11 +124,11 @@ html_favicon = "favicon.ico" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -#html_static_path = ['_static'] +# html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. @@ -138,69 +136,68 @@ html_use_smartypants = True # Custom sidebar templates, maps document names to template names. html_sidebars = { - 'index': ['globaltoc.html', 'searchbox.html'], - '**': ['localtoc.html', 'relations.html', 'searchbox.html'] + "index": ["globaltoc.html", "searchbox.html"], + "**": ["localtoc.html", "relations.html", "searchbox.html"], } # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_use_modindex = True +# html_use_modindex = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = '' +# html_file_suffix = '' # Output file base name for HTML help builder. -htmlhelp_basename = 'MongoEnginedoc' +htmlhelp_basename = "MongoEnginedoc" # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). -latex_paper_size = 'a4' +latex_paper_size = "a4" # The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' +# latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'MongoEngine.tex', 'MongoEngine Documentation', - 'Ross Lawley', 'manual'), + ("index", "MongoEngine.tex", "MongoEngine Documentation", "Ross Lawley", "manual") ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # Additional stuff for the LaTeX preamble. -#latex_preamble = '' +# latex_preamble = '' # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_use_modindex = True +# latex_use_modindex = True -autoclass_content = 'both' +autoclass_content = "both" diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index bb7a4e57..d7093d28 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -18,9 +18,14 @@ from mongoengine.queryset import * from mongoengine.signals import * -__all__ = (list(document.__all__) + list(fields.__all__) + - list(connection.__all__) + list(queryset.__all__) + - list(signals.__all__) + list(errors.__all__)) +__all__ = ( + list(document.__all__) + + list(fields.__all__) + + list(connection.__all__) + + list(queryset.__all__) + + list(signals.__all__) + + list(errors.__all__) +) VERSION = (0, 18, 2) @@ -31,7 +36,7 @@ def get_version(): For example, if `VERSION == (0, 10, 7)`, return '0.10.7'. """ - return '.'.join(map(str, VERSION)) + return ".".join(map(str, VERSION)) __version__ = get_version() diff --git a/mongoengine/base/__init__.py b/mongoengine/base/__init__.py index e069a147..dca0c4bb 100644 --- a/mongoengine/base/__init__.py +++ b/mongoengine/base/__init__.py @@ -12,17 +12,22 @@ from mongoengine.base.metaclasses import * __all__ = ( # common - 'UPDATE_OPERATORS', '_document_registry', 'get_document', - + "UPDATE_OPERATORS", + "_document_registry", + "get_document", # datastructures - 'BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference', - + "BaseDict", + "BaseList", + "EmbeddedDocumentList", + "LazyReference", # document - 'BaseDocument', - + "BaseDocument", # fields - 'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField', - + "BaseField", + "ComplexBaseField", + "ObjectIdField", + "GeoJsonBaseField", # metaclasses - 'DocumentMetaclass', 'TopLevelDocumentMetaclass' + "DocumentMetaclass", + "TopLevelDocumentMetaclass", ) diff --git a/mongoengine/base/common.py b/mongoengine/base/common.py index 999fd23a..85897324 100644 --- a/mongoengine/base/common.py +++ b/mongoengine/base/common.py @@ -1,12 +1,25 @@ from mongoengine.errors import NotRegistered -__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') +__all__ = ("UPDATE_OPERATORS", "get_document", "_document_registry") -UPDATE_OPERATORS = {'set', 'unset', 'inc', 'dec', 'mul', - 'pop', 'push', 'push_all', 'pull', - 'pull_all', 'add_to_set', 'set_on_insert', - 'min', 'max', 'rename'} +UPDATE_OPERATORS = { + "set", + "unset", + "inc", + "dec", + "mul", + "pop", + "push", + "push_all", + "pull", + "pull_all", + "add_to_set", + "set_on_insert", + "min", + "max", + "rename", +} _document_registry = {} @@ -17,25 +30,33 @@ def get_document(name): doc = _document_registry.get(name, None) if not doc: # Possible old style name - single_end = name.split('.')[-1] - compound_end = '.%s' % single_end - possible_match = [k for k in _document_registry - if k.endswith(compound_end) or k == single_end] + single_end = name.split(".")[-1] + compound_end = ".%s" % single_end + possible_match = [ + k for k in _document_registry if k.endswith(compound_end) or k == single_end + ] if len(possible_match) == 1: doc = _document_registry.get(possible_match.pop(), None) if not doc: - raise NotRegistered(""" + raise NotRegistered( + """ `%s` has not been registered in the document registry. Importing the document class automatically registers it, has it been imported? - """.strip() % name) + """.strip() + % name + ) return doc def _get_documents_by_db(connection_alias, default_connection_alias): """Get all registered Documents class attached to a given database""" - def get_doc_alias(doc_cls): - return doc_cls._meta.get('db_alias', default_connection_alias) - return [doc_cls for doc_cls in _document_registry.values() - if get_doc_alias(doc_cls) == connection_alias] + def get_doc_alias(doc_cls): + return doc_cls._meta.get("db_alias", default_connection_alias) + + return [ + doc_cls + for doc_cls in _document_registry.values() + if get_doc_alias(doc_cls) == connection_alias + ] diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index cce71846..d1b5ae76 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -7,26 +7,36 @@ from six import iteritems from mongoengine.common import _import_class from mongoengine.errors import DoesNotExist, MultipleObjectsReturned -__all__ = ('BaseDict', 'StrictDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference') +__all__ = ( + "BaseDict", + "StrictDict", + "BaseList", + "EmbeddedDocumentList", + "LazyReference", +) def mark_as_changed_wrapper(parent_method): """Decorator that ensures _mark_as_changed method gets called.""" + def wrapper(self, *args, **kwargs): # Can't use super() in the decorator. result = parent_method(self, *args, **kwargs) self._mark_as_changed() return result + return wrapper def mark_key_as_changed_wrapper(parent_method): """Decorator that ensures _mark_as_changed method gets called with the key argument""" + def wrapper(self, key, *args, **kwargs): # Can't use super() in the decorator. result = parent_method(self, key, *args, **kwargs) self._mark_as_changed(key) return result + return wrapper @@ -38,7 +48,7 @@ class BaseDict(dict): _name = None def __init__(self, dict_items, instance, name): - BaseDocument = _import_class('BaseDocument') + BaseDocument = _import_class("BaseDocument") if isinstance(instance, BaseDocument): self._instance = weakref.proxy(instance) @@ -55,15 +65,15 @@ class BaseDict(dict): def __getitem__(self, key): value = super(BaseDict, self).__getitem__(key) - EmbeddedDocument = _import_class('EmbeddedDocument') + EmbeddedDocument = _import_class("EmbeddedDocument") if isinstance(value, EmbeddedDocument) and value._instance is None: value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): - value = BaseDict(value, None, '%s.%s' % (self._name, key)) + value = BaseDict(value, None, "%s.%s" % (self._name, key)) super(BaseDict, self).__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): - value = BaseList(value, None, '%s.%s' % (self._name, key)) + value = BaseList(value, None, "%s.%s" % (self._name, key)) super(BaseDict, self).__setitem__(key, value) value._instance = self._instance return value @@ -87,9 +97,9 @@ class BaseDict(dict): setdefault = mark_as_changed_wrapper(dict.setdefault) def _mark_as_changed(self, key=None): - if hasattr(self._instance, '_mark_as_changed'): + if hasattr(self._instance, "_mark_as_changed"): if key: - self._instance._mark_as_changed('%s.%s' % (self._name, key)) + self._instance._mark_as_changed("%s.%s" % (self._name, key)) else: self._instance._mark_as_changed(self._name) @@ -102,7 +112,7 @@ class BaseList(list): _name = None def __init__(self, list_items, instance, name): - BaseDocument = _import_class('BaseDocument') + BaseDocument = _import_class("BaseDocument") if isinstance(instance, BaseDocument): self._instance = weakref.proxy(instance) @@ -117,17 +127,17 @@ class BaseList(list): # to parent's instance. This is buggy for now but would require more work to be handled properly return value - EmbeddedDocument = _import_class('EmbeddedDocument') + EmbeddedDocument = _import_class("EmbeddedDocument") if isinstance(value, EmbeddedDocument) and value._instance is None: value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): # Replace dict by BaseDict - value = BaseDict(value, None, '%s.%s' % (self._name, key)) + value = BaseDict(value, None, "%s.%s" % (self._name, key)) super(BaseList, self).__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): # Replace list by BaseList - value = BaseList(value, None, '%s.%s' % (self._name, key)) + value = BaseList(value, None, "%s.%s" % (self._name, key)) super(BaseList, self).__setitem__(key, value) value._instance = self._instance return value @@ -181,17 +191,14 @@ class BaseList(list): return self.__getitem__(slice(i, j)) def _mark_as_changed(self, key=None): - if hasattr(self._instance, '_mark_as_changed'): + if hasattr(self._instance, "_mark_as_changed"): if key: - self._instance._mark_as_changed( - '%s.%s' % (self._name, key % len(self)) - ) + self._instance._mark_as_changed("%s.%s" % (self._name, key % len(self))) else: self._instance._mark_as_changed(self._name) class EmbeddedDocumentList(BaseList): - def __init__(self, list_items, instance, name): super(EmbeddedDocumentList, self).__init__(list_items, instance, name) self._instance = instance @@ -276,12 +283,10 @@ class EmbeddedDocumentList(BaseList): """ values = self.__only_matches(self, kwargs) if len(values) == 0: - raise DoesNotExist( - '%s matching query does not exist.' % self._name - ) + raise DoesNotExist("%s matching query does not exist." % self._name) elif len(values) > 1: raise MultipleObjectsReturned( - '%d items returned, instead of 1' % len(values) + "%d items returned, instead of 1" % len(values) ) return values[0] @@ -362,7 +367,7 @@ class EmbeddedDocumentList(BaseList): class StrictDict(object): __slots__ = () - _special_fields = {'get', 'pop', 'iteritems', 'items', 'keys', 'create'} + _special_fields = {"get", "pop", "iteritems", "items", "keys", "create"} _classes = {} def __init__(self, **kwargs): @@ -370,14 +375,14 @@ class StrictDict(object): setattr(self, k, v) def __getitem__(self, key): - key = '_reserved_' + key if key in self._special_fields else key + key = "_reserved_" + key if key in self._special_fields else key try: return getattr(self, key) except AttributeError: raise KeyError(key) def __setitem__(self, key, value): - key = '_reserved_' + key if key in self._special_fields else key + key = "_reserved_" + key if key in self._special_fields else key return setattr(self, key, value) def __contains__(self, key): @@ -424,27 +429,32 @@ class StrictDict(object): @classmethod def create(cls, allowed_keys): - allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys) + allowed_keys_tuple = tuple( + ("_reserved_" + k if k in cls._special_fields else k) for k in allowed_keys + ) allowed_keys = frozenset(allowed_keys_tuple) if allowed_keys not in cls._classes: + class SpecificStrictDict(cls): __slots__ = allowed_keys_tuple def __repr__(self): - return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items()) + return "{%s}" % ", ".join( + '"{0!s}": {1!r}'.format(k, v) for k, v in self.items() + ) cls._classes[allowed_keys] = SpecificStrictDict return cls._classes[allowed_keys] class LazyReference(DBRef): - __slots__ = ('_cached_doc', 'passthrough', 'document_type') + __slots__ = ("_cached_doc", "passthrough", "document_type") def fetch(self, force=False): if not self._cached_doc or force: self._cached_doc = self.document_type.objects.get(pk=self.pk) if not self._cached_doc: - raise DoesNotExist('Trying to dereference unknown document %s' % (self)) + raise DoesNotExist("Trying to dereference unknown document %s" % (self)) return self._cached_doc @property @@ -455,7 +465,9 @@ class LazyReference(DBRef): self.document_type = document_type self._cached_doc = cached_doc self.passthrough = passthrough - super(LazyReference, self).__init__(self.document_type._get_collection_name(), pk) + super(LazyReference, self).__init__( + self.document_type._get_collection_name(), pk + ) def __getitem__(self, name): if not self.passthrough: @@ -464,7 +476,7 @@ class LazyReference(DBRef): return document[name] def __getattr__(self, name): - if not object.__getattribute__(self, 'passthrough'): + if not object.__getattribute__(self, "passthrough"): raise AttributeError() document = self.fetch() try: diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 047d50a4..928a00c2 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -9,19 +9,27 @@ from six import iteritems from mongoengine import signals from mongoengine.base.common import get_document -from mongoengine.base.datastructures import (BaseDict, BaseList, - EmbeddedDocumentList, - LazyReference, - StrictDict) +from mongoengine.base.datastructures import ( + BaseDict, + BaseList, + EmbeddedDocumentList, + LazyReference, + StrictDict, +) from mongoengine.base.fields import ComplexBaseField from mongoengine.common import _import_class -from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, - LookUpError, OperationError, ValidationError) +from mongoengine.errors import ( + FieldDoesNotExist, + InvalidDocumentError, + LookUpError, + OperationError, + ValidationError, +) from mongoengine.python_support import Hashable -__all__ = ('BaseDocument', 'NON_FIELD_ERRORS') +__all__ = ("BaseDocument", "NON_FIELD_ERRORS") -NON_FIELD_ERRORS = '__all__' +NON_FIELD_ERRORS = "__all__" class BaseDocument(object): @@ -35,9 +43,16 @@ class BaseDocument(object): # field is primarily set via `_from_son` or `_clear_changed_fields`, # though there are also other methods that manipulate it. # 4. The codebase is littered with `hasattr` calls for `_changed_fields`. - __slots__ = ('_changed_fields', '_initialised', '_created', '_data', - '_dynamic_fields', '_auto_id_field', '_db_field_map', - '__weakref__') + __slots__ = ( + "_changed_fields", + "_initialised", + "_created", + "_data", + "_dynamic_fields", + "_auto_id_field", + "_db_field_map", + "__weakref__", + ) _dynamic = False _dynamic_lock = True @@ -61,27 +76,28 @@ class BaseDocument(object): if args: raise TypeError( - 'Instantiating a document with positional arguments is not ' - 'supported. Please use `field_name=value` keyword arguments.' + "Instantiating a document with positional arguments is not " + "supported. Please use `field_name=value` keyword arguments." ) - __auto_convert = values.pop('__auto_convert', True) + __auto_convert = values.pop("__auto_convert", True) - __only_fields = set(values.pop('__only_fields', values)) + __only_fields = set(values.pop("__only_fields", values)) - _created = values.pop('_created', True) + _created = values.pop("_created", True) signals.pre_init.send(self.__class__, document=self, values=values) # Check if there are undefined fields supplied to the constructor, # if so raise an Exception. - if not self._dynamic and (self._meta.get('strict', True) or _created): + if not self._dynamic and (self._meta.get("strict", True) or _created): _undefined_fields = set(values.keys()) - set( - self._fields.keys() + ['id', 'pk', '_cls', '_text_score']) + self._fields.keys() + ["id", "pk", "_cls", "_text_score"] + ) if _undefined_fields: - msg = ( - 'The fields "{0}" do not exist on the document "{1}"' - ).format(_undefined_fields, self._class_name) + msg = ('The fields "{0}" do not exist on the document "{1}"').format( + _undefined_fields, self._class_name + ) raise FieldDoesNotExist(msg) if self.STRICT and not self._dynamic: @@ -100,22 +116,22 @@ class BaseDocument(object): value = getattr(self, key, None) setattr(self, key, value) - if '_cls' not in values: + if "_cls" not in values: self._cls = self._class_name # Set passed values after initialisation if self._dynamic: dynamic_data = {} for key, value in iteritems(values): - if key in self._fields or key == '_id': + if key in self._fields or key == "_id": setattr(self, key, value) else: dynamic_data[key] = value else: - FileField = _import_class('FileField') + FileField = _import_class("FileField") for key, value in iteritems(values): key = self._reverse_db_field_map.get(key, key) - if key in self._fields or key in ('id', 'pk', '_cls'): + if key in self._fields or key in ("id", "pk", "_cls"): if __auto_convert and value is not None: field = self._fields.get(key) if field and not isinstance(field, FileField): @@ -153,20 +169,20 @@ class BaseDocument(object): # Handle dynamic data only if an initialised dynamic document if self._dynamic and not self._dynamic_lock: - if not hasattr(self, name) and not name.startswith('_'): - DynamicField = _import_class('DynamicField') + if not hasattr(self, name) and not name.startswith("_"): + DynamicField = _import_class("DynamicField") field = DynamicField(db_field=name, null=True) field.name = name self._dynamic_fields[name] = field self._fields_ordered += (name,) - if not name.startswith('_'): + if not name.startswith("_"): value = self.__expand_dynamic_values(name, value) # Handle marking data as changed if name in self._dynamic_fields: self._data[name] = value - if hasattr(self, '_changed_fields'): + if hasattr(self, "_changed_fields"): self._mark_as_changed(name) try: self__created = self._created @@ -174,12 +190,12 @@ class BaseDocument(object): self__created = True if ( - self._is_document and - not self__created and - name in self._meta.get('shard_key', tuple()) and - self._data.get(name) != value + self._is_document + and not self__created + and name in self._meta.get("shard_key", tuple()) + and self._data.get(name) != value ): - msg = 'Shard Keys are immutable. Tried to update %s' % name + msg = "Shard Keys are immutable. Tried to update %s" % name raise OperationError(msg) try: @@ -187,38 +203,52 @@ class BaseDocument(object): except AttributeError: self__initialised = False # Check if the user has created a new instance of a class - if (self._is_document and self__initialised and - self__created and name == self._meta.get('id_field')): - super(BaseDocument, self).__setattr__('_created', False) + if ( + self._is_document + and self__initialised + and self__created + and name == self._meta.get("id_field") + ): + super(BaseDocument, self).__setattr__("_created", False) super(BaseDocument, self).__setattr__(name, value) def __getstate__(self): data = {} - for k in ('_changed_fields', '_initialised', '_created', - '_dynamic_fields', '_fields_ordered'): + for k in ( + "_changed_fields", + "_initialised", + "_created", + "_dynamic_fields", + "_fields_ordered", + ): if hasattr(self, k): data[k] = getattr(self, k) - data['_data'] = self.to_mongo() + data["_data"] = self.to_mongo() return data def __setstate__(self, data): - if isinstance(data['_data'], SON): - data['_data'] = self.__class__._from_son(data['_data'])._data - for k in ('_changed_fields', '_initialised', '_created', '_data', - '_dynamic_fields'): + if isinstance(data["_data"], SON): + data["_data"] = self.__class__._from_son(data["_data"])._data + for k in ( + "_changed_fields", + "_initialised", + "_created", + "_data", + "_dynamic_fields", + ): if k in data: setattr(self, k, data[k]) - if '_fields_ordered' in data: + if "_fields_ordered" in data: if self._dynamic: - setattr(self, '_fields_ordered', data['_fields_ordered']) + setattr(self, "_fields_ordered", data["_fields_ordered"]) else: _super_fields_ordered = type(self)._fields_ordered - setattr(self, '_fields_ordered', _super_fields_ordered) + setattr(self, "_fields_ordered", _super_fields_ordered) - dynamic_fields = data.get('_dynamic_fields') or SON() + dynamic_fields = data.get("_dynamic_fields") or SON() for k in dynamic_fields.keys(): - setattr(self, k, data['_data'].get(k)) + setattr(self, k, data["_data"].get(k)) def __iter__(self): return iter(self._fields_ordered) @@ -255,24 +285,30 @@ class BaseDocument(object): try: u = self.__str__() except (UnicodeEncodeError, UnicodeDecodeError): - u = '[Bad Unicode data]' + u = "[Bad Unicode data]" repr_type = str if u is None else type(u) - return repr_type('<%s: %s>' % (self.__class__.__name__, u)) + return repr_type("<%s: %s>" % (self.__class__.__name__, u)) def __str__(self): # TODO this could be simpler? - if hasattr(self, '__unicode__'): + if hasattr(self, "__unicode__"): if six.PY3: return self.__unicode__() else: - return six.text_type(self).encode('utf-8') - return six.text_type('%s object' % self.__class__.__name__) + return six.text_type(self).encode("utf-8") + return six.text_type("%s object" % self.__class__.__name__) def __eq__(self, other): - if isinstance(other, self.__class__) and hasattr(other, 'id') and other.id is not None: + if ( + isinstance(other, self.__class__) + and hasattr(other, "id") + and other.id is not None + ): return self.id == other.id if isinstance(other, DBRef): - return self._get_collection_name() == other.collection and self.id == other.id + return ( + self._get_collection_name() == other.collection and self.id == other.id + ) if self.id is None: return self is other return False @@ -295,10 +331,12 @@ class BaseDocument(object): Get text score from text query """ - if '_text_score' not in self._data: - raise InvalidDocumentError('This document is not originally built from a text query') + if "_text_score" not in self._data: + raise InvalidDocumentError( + "This document is not originally built from a text query" + ) - return self._data['_text_score'] + return self._data["_text_score"] def to_mongo(self, use_db_field=True, fields=None): """ @@ -307,11 +345,11 @@ class BaseDocument(object): fields = fields or [] data = SON() - data['_id'] = None - data['_cls'] = self._class_name + data["_id"] = None + data["_cls"] = self._class_name # only root fields ['test1.a', 'test2'] => ['test1', 'test2'] - root_fields = {f.split('.')[0] for f in fields} + root_fields = {f.split(".")[0] for f in fields} for field_name in self: if root_fields and field_name not in root_fields: @@ -326,16 +364,16 @@ class BaseDocument(object): if value is not None: f_inputs = field.to_mongo.__code__.co_varnames ex_vars = {} - if fields and 'fields' in f_inputs: - key = '%s.' % field_name + if fields and "fields" in f_inputs: + key = "%s." % field_name embedded_fields = [ - i.replace(key, '') for i in fields - if i.startswith(key)] + i.replace(key, "") for i in fields if i.startswith(key) + ] - ex_vars['fields'] = embedded_fields + ex_vars["fields"] = embedded_fields - if 'use_db_field' in f_inputs: - ex_vars['use_db_field'] = use_db_field + if "use_db_field" in f_inputs: + ex_vars["use_db_field"] = use_db_field value = field.to_mongo(value, **ex_vars) @@ -351,8 +389,8 @@ class BaseDocument(object): data[field.name] = value # Only add _cls if allow_inheritance is True - if not self._meta.get('allow_inheritance'): - data.pop('_cls') + if not self._meta.get("allow_inheritance"): + data.pop("_cls") return data @@ -372,18 +410,23 @@ class BaseDocument(object): errors[NON_FIELD_ERRORS] = error # Get a list of tuples of field names and their current values - fields = [(self._fields.get(name, self._dynamic_fields.get(name)), - self._data.get(name)) for name in self._fields_ordered] + fields = [ + ( + self._fields.get(name, self._dynamic_fields.get(name)), + self._data.get(name), + ) + for name in self._fields_ordered + ] - EmbeddedDocumentField = _import_class('EmbeddedDocumentField') - GenericEmbeddedDocumentField = _import_class( - 'GenericEmbeddedDocumentField') + EmbeddedDocumentField = _import_class("EmbeddedDocumentField") + GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField") for field, value in fields: if value is not None: try: - if isinstance(field, (EmbeddedDocumentField, - GenericEmbeddedDocumentField)): + if isinstance( + field, (EmbeddedDocumentField, GenericEmbeddedDocumentField) + ): field._validate(value, clean=clean) else: field._validate(value) @@ -391,17 +434,18 @@ class BaseDocument(object): errors[field.name] = error.errors or error except (ValueError, AttributeError, AssertionError) as error: errors[field.name] = error - elif field.required and not getattr(field, '_auto_gen', False): - errors[field.name] = ValidationError('Field is required', - field_name=field.name) + elif field.required and not getattr(field, "_auto_gen", False): + errors[field.name] = ValidationError( + "Field is required", field_name=field.name + ) if errors: - pk = 'None' - if hasattr(self, 'pk'): + pk = "None" + if hasattr(self, "pk"): pk = self.pk - elif self._instance and hasattr(self._instance, 'pk'): + elif self._instance and hasattr(self._instance, "pk"): pk = self._instance.pk - message = 'ValidationError (%s:%s) ' % (self._class_name, pk) + message = "ValidationError (%s:%s) " % (self._class_name, pk) raise ValidationError(message, errors=errors) def to_json(self, *args, **kwargs): @@ -411,7 +455,7 @@ class BaseDocument(object): MongoDB (as opposed to attribute names on this document). Defaults to True. """ - use_db_field = kwargs.pop('use_db_field', True) + use_db_field = kwargs.pop("use_db_field", True) return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs) @classmethod @@ -434,22 +478,18 @@ class BaseDocument(object): # If the value is a dict with '_cls' in it, turn it into a document is_dict = isinstance(value, dict) - if is_dict and '_cls' in value: - cls = get_document(value['_cls']) + if is_dict and "_cls" in value: + cls = get_document(value["_cls"]) return cls(**value) if is_dict: - value = { - k: self.__expand_dynamic_values(k, v) - for k, v in value.items() - } + value = {k: self.__expand_dynamic_values(k, v) for k, v in value.items()} else: value = [self.__expand_dynamic_values(name, v) for v in value] # Convert lists / values so we can watch for any changes on them - EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') - if (isinstance(value, (list, tuple)) and - not isinstance(value, BaseList)): + EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") + if isinstance(value, (list, tuple)) and not isinstance(value, BaseList): if issubclass(type(self), EmbeddedDocumentListField): value = EmbeddedDocumentList(value, self, name) else: @@ -464,26 +504,26 @@ class BaseDocument(object): if not key: return - if not hasattr(self, '_changed_fields'): + if not hasattr(self, "_changed_fields"): return - if '.' in key: - key, rest = key.split('.', 1) + if "." in key: + key, rest = key.split(".", 1) key = self._db_field_map.get(key, key) - key = '%s.%s' % (key, rest) + key = "%s.%s" % (key, rest) else: key = self._db_field_map.get(key, key) if key not in self._changed_fields: - levels, idx = key.split('.'), 1 + levels, idx = key.split("."), 1 while idx <= len(levels): - if '.'.join(levels[:idx]) in self._changed_fields: + if ".".join(levels[:idx]) in self._changed_fields: break idx += 1 else: self._changed_fields.append(key) # remove lower level changed fields - level = '.'.join(levels[:idx]) + '.' + level = ".".join(levels[:idx]) + "." remove = self._changed_fields.remove for field in self._changed_fields[:]: if field.startswith(level): @@ -494,7 +534,7 @@ class BaseDocument(object): are marked as changed. """ for changed in self._get_changed_fields(): - parts = changed.split('.') + parts = changed.split(".") data = self for part in parts: if isinstance(data, list): @@ -507,8 +547,10 @@ class BaseDocument(object): else: data = getattr(data, part, None) - if not isinstance(data, LazyReference) and hasattr(data, '_changed_fields'): - if getattr(data, '_is_document', False): + if not isinstance(data, LazyReference) and hasattr( + data, "_changed_fields" + ): + if getattr(data, "_is_document", False): continue data._changed_fields = [] @@ -524,39 +566,38 @@ class BaseDocument(object): """ # Loop list / dict fields as they contain documents # Determine the iterator to use - if not hasattr(data, 'items'): + if not hasattr(data, "items"): iterator = enumerate(data) else: iterator = iteritems(data) for index_or_key, value in iterator: - item_key = '%s%s.' % (base_key, index_or_key) + item_key = "%s%s." % (base_key, index_or_key) # don't check anything lower if this key is already marked # as changed. if item_key[:-1] in changed_fields: continue - if hasattr(value, '_get_changed_fields'): + if hasattr(value, "_get_changed_fields"): changed = value._get_changed_fields() - changed_fields += ['%s%s' % (item_key, k) for k in changed if k] + changed_fields += ["%s%s" % (item_key, k) for k in changed if k] elif isinstance(value, (list, tuple, dict)): - self._nestable_types_changed_fields( - changed_fields, item_key, value) + self._nestable_types_changed_fields(changed_fields, item_key, value) def _get_changed_fields(self): """Return a list of all fields that have explicitly been changed. """ - EmbeddedDocument = _import_class('EmbeddedDocument') - ReferenceField = _import_class('ReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') - SortedListField = _import_class('SortedListField') + EmbeddedDocument = _import_class("EmbeddedDocument") + ReferenceField = _import_class("ReferenceField") + GenericReferenceField = _import_class("GenericReferenceField") + SortedListField = _import_class("SortedListField") changed_fields = [] - changed_fields += getattr(self, '_changed_fields', []) + changed_fields += getattr(self, "_changed_fields", []) for field_name in self._fields_ordered: db_field_name = self._db_field_map.get(field_name, field_name) - key = '%s.' % db_field_name + key = "%s." % db_field_name data = self._data.get(field_name, None) field = self._fields.get(field_name) @@ -564,16 +605,17 @@ class BaseDocument(object): # Whole field already marked as changed, no need to go further continue - if isinstance(field, ReferenceField): # Don't follow referenced documents + if isinstance(field, ReferenceField): # Don't follow referenced documents continue if isinstance(data, EmbeddedDocument): # Find all embedded fields that have been changed changed = data._get_changed_fields() - changed_fields += ['%s%s' % (key, k) for k in changed if k] + changed_fields += ["%s%s" % (key, k) for k in changed if k] elif isinstance(data, (list, tuple, dict)): - if (hasattr(field, 'field') and - isinstance(field.field, (ReferenceField, GenericReferenceField))): + if hasattr(field, "field") and isinstance( + field.field, (ReferenceField, GenericReferenceField) + ): continue elif isinstance(field, SortedListField) and field._ordering: # if ordering is affected whole list is changed @@ -581,8 +623,7 @@ class BaseDocument(object): changed_fields.append(db_field_name) continue - self._nestable_types_changed_fields( - changed_fields, key, data) + self._nestable_types_changed_fields(changed_fields, key, data) return changed_fields def _delta(self): @@ -594,11 +635,11 @@ class BaseDocument(object): set_fields = self._get_changed_fields() unset_data = {} - if hasattr(self, '_changed_fields'): + if hasattr(self, "_changed_fields"): set_data = {} # Fetch each set item from its path for path in set_fields: - parts = path.split('.') + parts = path.split(".") d = doc new_path = [] for p in parts: @@ -608,26 +649,27 @@ class BaseDocument(object): elif isinstance(d, list) and p.isdigit(): # An item of a list (identified by its index) is updated d = d[int(p)] - elif hasattr(d, 'get'): + elif hasattr(d, "get"): # dict-like (dict, embedded document) d = d.get(p) new_path.append(p) - path = '.'.join(new_path) + path = ".".join(new_path) set_data[path] = d else: set_data = doc - if '_id' in set_data: - del set_data['_id'] + if "_id" in set_data: + del set_data["_id"] # Determine if any changed items were actually unset. for path, value in set_data.items(): - if value or isinstance(value, (numbers.Number, bool)): # Account for 0 and True that are truthy + if value or isinstance( + value, (numbers.Number, bool) + ): # Account for 0 and True that are truthy continue - parts = path.split('.') + parts = path.split(".") - if (self._dynamic and len(parts) and parts[0] in - self._dynamic_fields): + if self._dynamic and len(parts) and parts[0] in self._dynamic_fields: del set_data[path] unset_data[path] = 1 continue @@ -642,16 +684,16 @@ class BaseDocument(object): for p in parts: if isinstance(d, list) and p.isdigit(): d = d[int(p)] - elif (hasattr(d, '__getattribute__') and - not isinstance(d, dict)): + elif hasattr(d, "__getattribute__") and not isinstance(d, dict): real_path = d._reverse_db_field_map.get(p, p) d = getattr(d, real_path) else: d = d.get(p) - if hasattr(d, '_fields'): - field_name = d._reverse_db_field_map.get(db_field_name, - db_field_name) + if hasattr(d, "_fields"): + field_name = d._reverse_db_field_map.get( + db_field_name, db_field_name + ) if field_name in d._fields: default = d._fields.get(field_name).default else: @@ -672,7 +714,7 @@ class BaseDocument(object): """Return the collection name for this class. None for abstract class. """ - return cls._meta.get('collection', None) + return cls._meta.get("collection", None) @classmethod def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False): @@ -685,7 +727,7 @@ class BaseDocument(object): # Get the class name from the document, falling back to the given # class if unavailable - class_name = son.get('_cls', cls._class_name) + class_name = son.get("_cls", cls._class_name) # Convert SON to a data dict, making sure each key is a string and # corresponds to the right db field. @@ -710,18 +752,20 @@ class BaseDocument(object): if field.db_field in data: value = data[field.db_field] try: - data[field_name] = (value if value is None - else field.to_python(value)) + data[field_name] = ( + value if value is None else field.to_python(value) + ) if field_name != field.db_field: del data[field.db_field] except (AttributeError, ValueError) as e: errors_dict[field_name] = e if errors_dict: - errors = '\n'.join(['%s - %s' % (k, v) - for k, v in errors_dict.items()]) - msg = ('Invalid data to create a `%s` instance.\n%s' - % (cls._class_name, errors)) + errors = "\n".join(["%s - %s" % (k, v) for k, v in errors_dict.items()]) + msg = "Invalid data to create a `%s` instance.\n%s" % ( + cls._class_name, + errors, + ) raise InvalidDocumentError(msg) # In STRICT documents, remove any keys that aren't in cls._fields @@ -729,10 +773,7 @@ class BaseDocument(object): data = {k: v for k, v in iteritems(data) if k in cls._fields} obj = cls( - __auto_convert=False, - _created=created, - __only_fields=only_fields, - **data + __auto_convert=False, _created=created, __only_fields=only_fields, **data ) obj._changed_fields = [] if not _auto_dereference: @@ -754,15 +795,13 @@ class BaseDocument(object): # Create a map of index fields to index spec. We're converting # the fields from a list to a tuple so that it's hashable. - spec_fields = { - tuple(index['fields']): index for index in index_specs - } + spec_fields = {tuple(index["fields"]): index for index in index_specs} # For each new index, if there's an existing index with the same # fields list, update the existing spec with all data from the # new spec. for new_index in indices: - candidate = spec_fields.get(tuple(new_index['fields'])) + candidate = spec_fields.get(tuple(new_index["fields"])) if candidate is None: index_specs.append(new_index) else: @@ -779,9 +818,9 @@ class BaseDocument(object): def _build_index_spec(cls, spec): """Build a PyMongo index spec from a MongoEngine index spec.""" if isinstance(spec, six.string_types): - spec = {'fields': [spec]} + spec = {"fields": [spec]} elif isinstance(spec, (list, tuple)): - spec = {'fields': list(spec)} + spec = {"fields": list(spec)} elif isinstance(spec, dict): spec = dict(spec) @@ -789,19 +828,21 @@ class BaseDocument(object): direction = None # Check to see if we need to include _cls - allow_inheritance = cls._meta.get('allow_inheritance') + allow_inheritance = cls._meta.get("allow_inheritance") include_cls = ( - allow_inheritance and - not spec.get('sparse', False) and - spec.get('cls', True) and - '_cls' not in spec['fields'] + allow_inheritance + and not spec.get("sparse", False) + and spec.get("cls", True) + and "_cls" not in spec["fields"] ) # 733: don't include cls if index_cls is False unless there is an explicit cls with the index - include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True)) - if 'cls' in spec: - spec.pop('cls') - for key in spec['fields']: + include_cls = include_cls and ( + spec.get("cls", False) or cls._meta.get("index_cls", True) + ) + if "cls" in spec: + spec.pop("cls") + for key in spec["fields"]: # If inherited spec continue if isinstance(key, (list, tuple)): continue @@ -814,51 +855,54 @@ class BaseDocument(object): # GEOHAYSTACK from ) # GEO2D from * direction = pymongo.ASCENDING - if key.startswith('-'): + if key.startswith("-"): direction = pymongo.DESCENDING - elif key.startswith('$'): + elif key.startswith("$"): direction = pymongo.TEXT - elif key.startswith('#'): + elif key.startswith("#"): direction = pymongo.HASHED - elif key.startswith('('): + elif key.startswith("("): direction = pymongo.GEOSPHERE - elif key.startswith(')'): + elif key.startswith(")"): direction = pymongo.GEOHAYSTACK - elif key.startswith('*'): + elif key.startswith("*"): direction = pymongo.GEO2D - if key.startswith(('+', '-', '*', '$', '#', '(', ')')): + if key.startswith(("+", "-", "*", "$", "#", "(", ")")): key = key[1:] # Use real field name, do it manually because we need field # objects for the next part (list field checking) - parts = key.split('.') - if parts in (['pk'], ['id'], ['_id']): - key = '_id' + parts = key.split(".") + if parts in (["pk"], ["id"], ["_id"]): + key = "_id" else: fields = cls._lookup_field(parts) parts = [] for field in fields: try: - if field != '_id': + if field != "_id": field = field.db_field except AttributeError: pass parts.append(field) - key = '.'.join(parts) + key = ".".join(parts) index_list.append((key, direction)) # Don't add cls to a geo index if include_cls and direction not in ( - pymongo.GEO2D, pymongo.GEOHAYSTACK, pymongo.GEOSPHERE): - index_list.insert(0, ('_cls', 1)) + pymongo.GEO2D, + pymongo.GEOHAYSTACK, + pymongo.GEOSPHERE, + ): + index_list.insert(0, ("_cls", 1)) if index_list: - spec['fields'] = index_list + spec["fields"] = index_list return spec @classmethod - def _unique_with_indexes(cls, namespace=''): + def _unique_with_indexes(cls, namespace=""): """Find unique indexes in the document schema and return them.""" unique_indexes = [] for field_name, field in cls._fields.items(): @@ -876,36 +920,39 @@ class BaseDocument(object): # Convert unique_with field names to real field names unique_with = [] for other_name in field.unique_with: - parts = other_name.split('.') + parts = other_name.split(".") # Lookup real name parts = cls._lookup_field(parts) name_parts = [part.db_field for part in parts] - unique_with.append('.'.join(name_parts)) + unique_with.append(".".join(name_parts)) # Unique field should be required parts[-1].required = True - sparse = (not sparse and - parts[-1].name not in cls.__dict__) + sparse = not sparse and parts[-1].name not in cls.__dict__ unique_fields += unique_with # Add the new index to the list fields = [ - ('%s%s' % (namespace, f), pymongo.ASCENDING) - for f in unique_fields + ("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields ] - index = {'fields': fields, 'unique': True, 'sparse': sparse} + index = {"fields": fields, "unique": True, "sparse": sparse} unique_indexes.append(index) - if field.__class__.__name__ in {'EmbeddedDocumentListField', - 'ListField', 'SortedListField'}: + if field.__class__.__name__ in { + "EmbeddedDocumentListField", + "ListField", + "SortedListField", + }: field = field.field # Grab any embedded document field unique indexes - if (field.__class__.__name__ == 'EmbeddedDocumentField' and - field.document_type != cls): - field_namespace = '%s.' % field_name + if ( + field.__class__.__name__ == "EmbeddedDocumentField" + and field.document_type != cls + ): + field_namespace = "%s." % field_name doc_cls = field.document_type unique_indexes += doc_cls._unique_with_indexes(field_namespace) @@ -917,32 +964,36 @@ class BaseDocument(object): geo_indices = [] inspected.append(cls) - geo_field_type_names = ('EmbeddedDocumentField', 'GeoPointField', - 'PointField', 'LineStringField', - 'PolygonField') + geo_field_type_names = ( + "EmbeddedDocumentField", + "GeoPointField", + "PointField", + "LineStringField", + "PolygonField", + ) - geo_field_types = tuple([_import_class(field) - for field in geo_field_type_names]) + geo_field_types = tuple( + [_import_class(field) for field in geo_field_type_names] + ) for field in cls._fields.values(): if not isinstance(field, geo_field_types): continue - if hasattr(field, 'document_type'): + if hasattr(field, "document_type"): field_cls = field.document_type if field_cls in inspected: continue - if hasattr(field_cls, '_geo_indices'): + if hasattr(field_cls, "_geo_indices"): geo_indices += field_cls._geo_indices( - inspected, parent_field=field.db_field) + inspected, parent_field=field.db_field + ) elif field._geo_index: field_name = field.db_field if parent_field: - field_name = '%s.%s' % (parent_field, field_name) - geo_indices.append({ - 'fields': [(field_name, field._geo_index)] - }) + field_name = "%s.%s" % (parent_field, field_name) + geo_indices.append({"fields": [(field_name, field._geo_index)]}) return geo_indices @@ -983,8 +1034,8 @@ class BaseDocument(object): # TODO this method is WAY too complicated. Simplify it. # TODO don't think returning a string for embedded non-existent fields is desired - ListField = _import_class('ListField') - DynamicField = _import_class('DynamicField') + ListField = _import_class("ListField") + DynamicField = _import_class("DynamicField") if not isinstance(parts, (list, tuple)): parts = [parts] @@ -1000,15 +1051,17 @@ class BaseDocument(object): # Look up first field from the document if field is None: - if field_name == 'pk': + if field_name == "pk": # Deal with "primary key" alias - field_name = cls._meta['id_field'] + field_name = cls._meta["id_field"] if field_name in cls._fields: field = cls._fields[field_name] elif cls._dynamic: field = DynamicField(db_field=field_name) - elif cls._meta.get('allow_inheritance') or cls._meta.get('abstract', False): + elif cls._meta.get("allow_inheritance") or cls._meta.get( + "abstract", False + ): # 744: in case the field is defined in a subclass for subcls in cls.__subclasses__(): try: @@ -1023,38 +1076,41 @@ class BaseDocument(object): else: raise LookUpError('Cannot resolve field "%s"' % field_name) else: - ReferenceField = _import_class('ReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') + ReferenceField = _import_class("ReferenceField") + GenericReferenceField = _import_class("GenericReferenceField") # If previous field was a reference, throw an error (we # cannot look up fields that are on references). if isinstance(field, (ReferenceField, GenericReferenceField)): - raise LookUpError('Cannot perform join in mongoDB: %s' % - '__'.join(parts)) + raise LookUpError( + "Cannot perform join in mongoDB: %s" % "__".join(parts) + ) # If the parent field has a "field" attribute which has a # lookup_member method, call it to find the field # corresponding to this iteration. - if hasattr(getattr(field, 'field', None), 'lookup_member'): + if hasattr(getattr(field, "field", None), "lookup_member"): new_field = field.field.lookup_member(field_name) # If the parent field is a DynamicField or if it's part of # a DynamicDocument, mark current field as a DynamicField # with db_name equal to the field name. - elif cls._dynamic and (isinstance(field, DynamicField) or - getattr(getattr(field, 'document_type', None), '_dynamic', None)): + elif cls._dynamic and ( + isinstance(field, DynamicField) + or getattr(getattr(field, "document_type", None), "_dynamic", None) + ): new_field = DynamicField(db_field=field_name) # Else, try to use the parent field's lookup_member method # to find the subfield. - elif hasattr(field, 'lookup_member'): + elif hasattr(field, "lookup_member"): new_field = field.lookup_member(field_name) # Raise a LookUpError if all the other conditions failed. else: raise LookUpError( - 'Cannot resolve subfield or operator {} ' - 'on the field {}'.format(field_name, field.name) + "Cannot resolve subfield or operator {} " + "on the field {}".format(field_name, field.name) ) # If current field still wasn't found and the parent field @@ -1073,23 +1129,24 @@ class BaseDocument(object): return fields @classmethod - def _translate_field_name(cls, field, sep='.'): + def _translate_field_name(cls, field, sep="."): """Translate a field attribute name to a database field name. """ parts = field.split(sep) parts = [f.db_field for f in cls._lookup_field(parts)] - return '.'.join(parts) + return ".".join(parts) def __set_field_display(self): """For each field that specifies choices, create a get__display method. """ - fields_with_choices = [(n, f) for n, f in self._fields.items() - if f.choices] + fields_with_choices = [(n, f) for n, f in self._fields.items() if f.choices] for attr_name, field in fields_with_choices: - setattr(self, - 'get_%s_display' % attr_name, - partial(self.__get_field_display, field=field)) + setattr( + self, + "get_%s_display" % attr_name, + partial(self.__get_field_display, field=field), + ) def __get_field_display(self, field): """Return the display value for a choice field""" @@ -1097,9 +1154,16 @@ class BaseDocument(object): if field.choices and isinstance(field.choices[0], (list, tuple)): if value is None: return None - sep = getattr(field, 'display_sep', ' ') - values = value if field.__class__.__name__ in ('ListField', 'SortedListField') else [value] - return sep.join([ - six.text_type(dict(field.choices).get(val, val)) - for val in values or []]) + sep = getattr(field, "display_sep", " ") + values = ( + value + if field.__class__.__name__ in ("ListField", "SortedListField") + else [value] + ) + return sep.join( + [ + six.text_type(dict(field.choices).get(val, val)) + for val in values or [] + ] + ) return value diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 9ce426c9..cd1039cb 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -8,13 +8,11 @@ import six from six import iteritems from mongoengine.base.common import UPDATE_OPERATORS -from mongoengine.base.datastructures import (BaseDict, BaseList, - EmbeddedDocumentList) +from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList from mongoengine.common import _import_class from mongoengine.errors import DeprecatedError, ValidationError -__all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField', - 'GeoJsonBaseField') +__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") class BaseField(object): @@ -23,6 +21,7 @@ class BaseField(object): .. versionchanged:: 0.5 - added verbose and help text """ + name = None _geo_index = False _auto_gen = False # Call `generate` to generate a value @@ -34,10 +33,21 @@ class BaseField(object): creation_counter = 0 auto_creation_counter = -1 - def __init__(self, db_field=None, name=None, required=False, default=None, - unique=False, unique_with=None, primary_key=False, - validation=None, choices=None, null=False, sparse=False, - **kwargs): + def __init__( + self, + db_field=None, + name=None, + required=False, + default=None, + unique=False, + unique_with=None, + primary_key=False, + validation=None, + choices=None, + null=False, + sparse=False, + **kwargs + ): """ :param db_field: The database field to store this field in (defaults to the name of the field) @@ -65,7 +75,7 @@ class BaseField(object): existing attributes. Common metadata includes `verbose_name` and `help_text`. """ - self.db_field = (db_field or name) if not primary_key else '_id' + self.db_field = (db_field or name) if not primary_key else "_id" if name: msg = 'Field\'s "name" attribute deprecated in favour of "db_field"' @@ -82,17 +92,16 @@ class BaseField(object): self._owner_document = None # Make sure db_field is a string (if it's explicitly defined). - if ( - self.db_field is not None and - not isinstance(self.db_field, six.string_types) + if self.db_field is not None and not isinstance( + self.db_field, six.string_types ): - raise TypeError('db_field should be a string.') + raise TypeError("db_field should be a string.") # Make sure db_field doesn't contain any forbidden characters. if isinstance(self.db_field, six.string_types) and ( - '.' in self.db_field or - '\0' in self.db_field or - self.db_field.startswith('$') + "." in self.db_field + or "\0" in self.db_field + or self.db_field.startswith("$") ): raise ValueError( 'field names cannot contain dots (".") or null characters ' @@ -102,15 +111,17 @@ class BaseField(object): # Detect and report conflicts between metadata and base properties. conflicts = set(dir(self)) & set(kwargs) if conflicts: - raise TypeError('%s already has attribute(s): %s' % ( - self.__class__.__name__, ', '.join(conflicts))) + raise TypeError( + "%s already has attribute(s): %s" + % (self.__class__.__name__, ", ".join(conflicts)) + ) # Assign metadata to the instance # This efficient method is available because no __slots__ are defined. self.__dict__.update(kwargs) # Adjust the appropriate creation counter, and save our local copy. - if self.db_field == '_id': + if self.db_field == "_id": self.creation_counter = BaseField.auto_creation_counter BaseField.auto_creation_counter -= 1 else: @@ -142,8 +153,8 @@ class BaseField(object): if instance._initialised: try: value_has_changed = ( - self.name not in instance._data or - instance._data[self.name] != value + self.name not in instance._data + or instance._data[self.name] != value ) if value_has_changed: instance._mark_as_changed(self.name) @@ -153,7 +164,7 @@ class BaseField(object): # Mark the field as changed in such cases. instance._mark_as_changed(self.name) - EmbeddedDocument = _import_class('EmbeddedDocument') + EmbeddedDocument = _import_class("EmbeddedDocument") if isinstance(value, EmbeddedDocument): value._instance = weakref.proxy(instance) elif isinstance(value, (list, tuple)): @@ -163,7 +174,7 @@ class BaseField(object): instance._data[self.name] = value - def error(self, message='', errors=None, field_name=None): + def error(self, message="", errors=None, field_name=None): """Raise a ValidationError.""" field_name = field_name if field_name else self.name raise ValidationError(message, errors=errors, field_name=field_name) @@ -180,11 +191,11 @@ class BaseField(object): """Helper method to call to_mongo with proper inputs.""" f_inputs = self.to_mongo.__code__.co_varnames ex_vars = {} - if 'fields' in f_inputs: - ex_vars['fields'] = fields + if "fields" in f_inputs: + ex_vars["fields"] = fields - if 'use_db_field' in f_inputs: - ex_vars['use_db_field'] = use_db_field + if "use_db_field" in f_inputs: + ex_vars["use_db_field"] = use_db_field return self.to_mongo(value, **ex_vars) @@ -199,8 +210,8 @@ class BaseField(object): pass def _validate_choices(self, value): - Document = _import_class('Document') - EmbeddedDocument = _import_class('EmbeddedDocument') + Document = _import_class("Document") + EmbeddedDocument = _import_class("EmbeddedDocument") choice_list = self.choices if isinstance(next(iter(choice_list)), (list, tuple)): @@ -211,15 +222,13 @@ class BaseField(object): if isinstance(value, (Document, EmbeddedDocument)): if not any(isinstance(value, c) for c in choice_list): self.error( - 'Value must be an instance of %s' % ( - six.text_type(choice_list) - ) + "Value must be an instance of %s" % (six.text_type(choice_list)) ) # Choices which are types other than Documents else: values = value if isinstance(value, (list, tuple)) else [value] if len(set(values) - set(choice_list)): - self.error('Value must be one of %s' % six.text_type(choice_list)) + self.error("Value must be one of %s" % six.text_type(choice_list)) def _validate(self, value, **kwargs): # Check the Choices Constraint @@ -235,13 +244,17 @@ class BaseField(object): # in favor of having validation raising a ValidationError ret = self.validation(value) if ret is not None: - raise DeprecatedError('validation argument for `%s` must not return anything, ' - 'it should raise a ValidationError if validation fails' % self.name) + raise DeprecatedError( + "validation argument for `%s` must not return anything, " + "it should raise a ValidationError if validation fails" + % self.name + ) except ValidationError as ex: self.error(str(ex)) else: - raise ValueError('validation argument for `"%s"` must be a ' - 'callable.' % self.name) + raise ValueError( + 'validation argument for `"%s"` must be a ' "callable." % self.name + ) self.validate(value, **kwargs) @@ -275,35 +288,41 @@ class ComplexBaseField(BaseField): # Document class being used rather than a document object return self - ReferenceField = _import_class('ReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') - EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') + ReferenceField = _import_class("ReferenceField") + GenericReferenceField = _import_class("GenericReferenceField") + EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") auto_dereference = instance._fields[self.name]._auto_dereference - dereference = (auto_dereference and - (self.field is None or isinstance(self.field, - (GenericReferenceField, ReferenceField)))) + dereference = auto_dereference and ( + self.field is None + or isinstance(self.field, (GenericReferenceField, ReferenceField)) + ) - _dereference = _import_class('DeReference')() + _dereference = _import_class("DeReference")() - if (instance._initialised and - dereference and - instance._data.get(self.name) and - not getattr(instance._data[self.name], '_dereferenced', False)): + if ( + instance._initialised + and dereference + and instance._data.get(self.name) + and not getattr(instance._data[self.name], "_dereferenced", False) + ): instance._data[self.name] = _dereference( - instance._data.get(self.name), max_depth=1, instance=instance, - name=self.name + instance._data.get(self.name), + max_depth=1, + instance=instance, + name=self.name, ) - if hasattr(instance._data[self.name], '_dereferenced'): + if hasattr(instance._data[self.name], "_dereferenced"): instance._data[self.name]._dereferenced = True value = super(ComplexBaseField, self).__get__(instance, owner) # Convert lists / values so we can watch for any changes on them if isinstance(value, (list, tuple)): - if (issubclass(type(self), EmbeddedDocumentListField) and - not isinstance(value, EmbeddedDocumentList)): + if issubclass(type(self), EmbeddedDocumentListField) and not isinstance( + value, EmbeddedDocumentList + ): value = EmbeddedDocumentList(value, instance, self.name) elif not isinstance(value, BaseList): value = BaseList(value, instance, self.name) @@ -312,12 +331,13 @@ class ComplexBaseField(BaseField): value = BaseDict(value, instance, self.name) instance._data[self.name] = value - if (auto_dereference and instance._initialised and - isinstance(value, (BaseList, BaseDict)) and - not value._dereferenced): - value = _dereference( - value, max_depth=1, instance=instance, name=self.name - ) + if ( + auto_dereference + and instance._initialised + and isinstance(value, (BaseList, BaseDict)) + and not value._dereferenced + ): + value = _dereference(value, max_depth=1, instance=instance, name=self.name) value._dereferenced = True instance._data[self.name] = value @@ -328,16 +348,16 @@ class ComplexBaseField(BaseField): if isinstance(value, six.string_types): return value - if hasattr(value, 'to_python'): + if hasattr(value, "to_python"): return value.to_python() - BaseDocument = _import_class('BaseDocument') + BaseDocument = _import_class("BaseDocument") if isinstance(value, BaseDocument): # Something is wrong, return the value as it is return value is_list = False - if not hasattr(value, 'items'): + if not hasattr(value, "items"): try: is_list = True value = {idx: v for idx, v in enumerate(value)} @@ -346,50 +366,54 @@ class ComplexBaseField(BaseField): if self.field: self.field._auto_dereference = self._auto_dereference - value_dict = {key: self.field.to_python(item) - for key, item in value.items()} + value_dict = { + key: self.field.to_python(item) for key, item in value.items() + } else: - Document = _import_class('Document') + Document = _import_class("Document") value_dict = {} for k, v in value.items(): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: - self.error('You can only reference documents once they' - ' have been saved to the database') + self.error( + "You can only reference documents once they" + " have been saved to the database" + ) collection = v._get_collection_name() value_dict[k] = DBRef(collection, v.pk) - elif hasattr(v, 'to_python'): + elif hasattr(v, "to_python"): value_dict[k] = v.to_python() else: value_dict[k] = self.to_python(v) if is_list: # Convert back to a list - return [v for _, v in sorted(value_dict.items(), - key=operator.itemgetter(0))] + return [ + v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0)) + ] return value_dict def to_mongo(self, value, use_db_field=True, fields=None): """Convert a Python type to a MongoDB-compatible type.""" - Document = _import_class('Document') - EmbeddedDocument = _import_class('EmbeddedDocument') - GenericReferenceField = _import_class('GenericReferenceField') + Document = _import_class("Document") + EmbeddedDocument = _import_class("EmbeddedDocument") + GenericReferenceField = _import_class("GenericReferenceField") if isinstance(value, six.string_types): return value - if hasattr(value, 'to_mongo'): + if hasattr(value, "to_mongo"): if isinstance(value, Document): return GenericReferenceField().to_mongo(value) cls = value.__class__ val = value.to_mongo(use_db_field, fields) # If it's a document that is not inherited add _cls if isinstance(value, EmbeddedDocument): - val['_cls'] = cls.__name__ + val["_cls"] = cls.__name__ return val is_list = False - if not hasattr(value, 'items'): + if not hasattr(value, "items"): try: is_list = True value = {k: v for k, v in enumerate(value)} @@ -407,39 +431,42 @@ class ComplexBaseField(BaseField): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: - self.error('You can only reference documents once they' - ' have been saved to the database') + self.error( + "You can only reference documents once they" + " have been saved to the database" + ) # If its a document that is not inheritable it won't have # any _cls data so make it a generic reference allows # us to dereference - meta = getattr(v, '_meta', {}) - allow_inheritance = meta.get('allow_inheritance') + meta = getattr(v, "_meta", {}) + allow_inheritance = meta.get("allow_inheritance") if not allow_inheritance and not self.field: value_dict[k] = GenericReferenceField().to_mongo(v) else: collection = v._get_collection_name() value_dict[k] = DBRef(collection, v.pk) - elif hasattr(v, 'to_mongo'): + elif hasattr(v, "to_mongo"): cls = v.__class__ val = v.to_mongo(use_db_field, fields) # If it's a document that is not inherited add _cls if isinstance(v, (Document, EmbeddedDocument)): - val['_cls'] = cls.__name__ + val["_cls"] = cls.__name__ value_dict[k] = val else: value_dict[k] = self.to_mongo(v, use_db_field, fields) if is_list: # Convert back to a list - return [v for _, v in sorted(value_dict.items(), - key=operator.itemgetter(0))] + return [ + v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0)) + ] return value_dict def validate(self, value): """If field is provided ensure the value is valid.""" errors = {} if self.field: - if hasattr(value, 'iteritems') or hasattr(value, 'items'): + if hasattr(value, "iteritems") or hasattr(value, "items"): sequence = iteritems(value) else: sequence = enumerate(value) @@ -453,11 +480,10 @@ class ComplexBaseField(BaseField): if errors: field_class = self.field.__class__.__name__ - self.error('Invalid %s item (%s)' % (field_class, value), - errors=errors) + self.error("Invalid %s item (%s)" % (field_class, value), errors=errors) # Don't allow empty values if required if self.required and not value: - self.error('Field is required and cannot be empty') + self.error("Field is required and cannot be empty") def prepare_query_value(self, op, value): return self.to_mongo(value) @@ -500,7 +526,7 @@ class ObjectIdField(BaseField): try: ObjectId(six.text_type(value)) except Exception: - self.error('Invalid Object ID') + self.error("Invalid Object ID") class GeoJsonBaseField(BaseField): @@ -510,14 +536,14 @@ class GeoJsonBaseField(BaseField): """ _geo_index = pymongo.GEOSPHERE - _type = 'GeoBase' + _type = "GeoBase" def __init__(self, auto_index=True, *args, **kwargs): """ :param bool auto_index: Automatically create a '2dsphere' index.\ Defaults to `True`. """ - self._name = '%sField' % self._type + self._name = "%sField" % self._type if not auto_index: self._geo_index = False super(GeoJsonBaseField, self).__init__(*args, **kwargs) @@ -525,57 +551,58 @@ class GeoJsonBaseField(BaseField): def validate(self, value): """Validate the GeoJson object based on its type.""" if isinstance(value, dict): - if set(value.keys()) == {'type', 'coordinates'}: - if value['type'] != self._type: - self.error('%s type must be "%s"' % - (self._name, self._type)) - return self.validate(value['coordinates']) + if set(value.keys()) == {"type", "coordinates"}: + if value["type"] != self._type: + self.error('%s type must be "%s"' % (self._name, self._type)) + return self.validate(value["coordinates"]) else: - self.error('%s can only accept a valid GeoJson dictionary' - ' or lists of (x, y)' % self._name) + self.error( + "%s can only accept a valid GeoJson dictionary" + " or lists of (x, y)" % self._name + ) return elif not isinstance(value, (list, tuple)): - self.error('%s can only accept lists of [x, y]' % self._name) + self.error("%s can only accept lists of [x, y]" % self._name) return - validate = getattr(self, '_validate_%s' % self._type.lower()) + validate = getattr(self, "_validate_%s" % self._type.lower()) error = validate(value) if error: self.error(error) def _validate_polygon(self, value, top_level=True): if not isinstance(value, (list, tuple)): - return 'Polygons must contain list of linestrings' + return "Polygons must contain list of linestrings" # Quick and dirty validator try: value[0][0][0] except (TypeError, IndexError): - return 'Invalid Polygon must contain at least one valid linestring' + return "Invalid Polygon must contain at least one valid linestring" errors = [] for val in value: error = self._validate_linestring(val, False) if not error and val[0] != val[-1]: - error = 'LineStrings must start and end at the same point' + error = "LineStrings must start and end at the same point" if error and error not in errors: errors.append(error) if errors: if top_level: - return 'Invalid Polygon:\n%s' % ', '.join(errors) + return "Invalid Polygon:\n%s" % ", ".join(errors) else: - return '%s' % ', '.join(errors) + return "%s" % ", ".join(errors) def _validate_linestring(self, value, top_level=True): """Validate a linestring.""" if not isinstance(value, (list, tuple)): - return 'LineStrings must contain list of coordinate pairs' + return "LineStrings must contain list of coordinate pairs" # Quick and dirty validator try: value[0][0] except (TypeError, IndexError): - return 'Invalid LineString must contain at least one valid point' + return "Invalid LineString must contain at least one valid point" errors = [] for val in value: @@ -584,29 +611,30 @@ class GeoJsonBaseField(BaseField): errors.append(error) if errors: if top_level: - return 'Invalid LineString:\n%s' % ', '.join(errors) + return "Invalid LineString:\n%s" % ", ".join(errors) else: - return '%s' % ', '.join(errors) + return "%s" % ", ".join(errors) def _validate_point(self, value): """Validate each set of coords""" if not isinstance(value, (list, tuple)): - return 'Points must be a list of coordinate pairs' + return "Points must be a list of coordinate pairs" elif not len(value) == 2: - return 'Value (%s) must be a two-dimensional point' % repr(value) - elif (not isinstance(value[0], (float, int)) or - not isinstance(value[1], (float, int))): - return 'Both values (%s) in point must be float or int' % repr(value) + return "Value (%s) must be a two-dimensional point" % repr(value) + elif not isinstance(value[0], (float, int)) or not isinstance( + value[1], (float, int) + ): + return "Both values (%s) in point must be float or int" % repr(value) def _validate_multipoint(self, value): if not isinstance(value, (list, tuple)): - return 'MultiPoint must be a list of Point' + return "MultiPoint must be a list of Point" # Quick and dirty validator try: value[0][0] except (TypeError, IndexError): - return 'Invalid MultiPoint must contain at least one valid point' + return "Invalid MultiPoint must contain at least one valid point" errors = [] for point in value: @@ -615,17 +643,17 @@ class GeoJsonBaseField(BaseField): errors.append(error) if errors: - return '%s' % ', '.join(errors) + return "%s" % ", ".join(errors) def _validate_multilinestring(self, value, top_level=True): if not isinstance(value, (list, tuple)): - return 'MultiLineString must be a list of LineString' + return "MultiLineString must be a list of LineString" # Quick and dirty validator try: value[0][0][0] except (TypeError, IndexError): - return 'Invalid MultiLineString must contain at least one valid linestring' + return "Invalid MultiLineString must contain at least one valid linestring" errors = [] for linestring in value: @@ -635,19 +663,19 @@ class GeoJsonBaseField(BaseField): if errors: if top_level: - return 'Invalid MultiLineString:\n%s' % ', '.join(errors) + return "Invalid MultiLineString:\n%s" % ", ".join(errors) else: - return '%s' % ', '.join(errors) + return "%s" % ", ".join(errors) def _validate_multipolygon(self, value): if not isinstance(value, (list, tuple)): - return 'MultiPolygon must be a list of Polygon' + return "MultiPolygon must be a list of Polygon" # Quick and dirty validator try: value[0][0][0][0] except (TypeError, IndexError): - return 'Invalid MultiPolygon must contain at least one valid Polygon' + return "Invalid MultiPolygon must contain at least one valid Polygon" errors = [] for polygon in value: @@ -656,9 +684,9 @@ class GeoJsonBaseField(BaseField): errors.append(error) if errors: - return 'Invalid MultiPolygon:\n%s' % ', '.join(errors) + return "Invalid MultiPolygon:\n%s" % ", ".join(errors) def to_mongo(self, value): if isinstance(value, dict): return value - return SON([('type', self._type), ('coordinates', value)]) + return SON([("type", self._type), ("coordinates", value)]) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index c3ced5bb..e4d26811 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -8,12 +8,15 @@ from mongoengine.base.common import _document_registry from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField from mongoengine.common import _import_class from mongoengine.errors import InvalidDocumentError -from mongoengine.queryset import (DO_NOTHING, DoesNotExist, - MultipleObjectsReturned, - QuerySetManager) +from mongoengine.queryset import ( + DO_NOTHING, + DoesNotExist, + MultipleObjectsReturned, + QuerySetManager, +) -__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass') +__all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass") class DocumentMetaclass(type): @@ -25,44 +28,46 @@ class DocumentMetaclass(type): super_new = super(DocumentMetaclass, mcs).__new__ # If a base class just call super - metaclass = attrs.get('my_metaclass') + metaclass = attrs.get("my_metaclass") if metaclass and issubclass(metaclass, DocumentMetaclass): return super_new(mcs, name, bases, attrs) - attrs['_is_document'] = attrs.get('_is_document', False) - attrs['_cached_reference_fields'] = [] + attrs["_is_document"] = attrs.get("_is_document", False) + attrs["_cached_reference_fields"] = [] # EmbeddedDocuments could have meta data for inheritance - if 'meta' in attrs: - attrs['_meta'] = attrs.pop('meta') + if "meta" in attrs: + attrs["_meta"] = attrs.pop("meta") # EmbeddedDocuments should inherit meta data - if '_meta' not in attrs: + if "_meta" not in attrs: meta = MetaDict() for base in flattened_bases[::-1]: # Add any mixin metadata from plain objects - if hasattr(base, 'meta'): + if hasattr(base, "meta"): meta.merge(base.meta) - elif hasattr(base, '_meta'): + elif hasattr(base, "_meta"): meta.merge(base._meta) - attrs['_meta'] = meta - attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract + attrs["_meta"] = meta + attrs["_meta"][ + "abstract" + ] = False # 789: EmbeddedDocument shouldn't inherit abstract # If allow_inheritance is True, add a "_cls" string field to the attrs - if attrs['_meta'].get('allow_inheritance'): - StringField = _import_class('StringField') - attrs['_cls'] = StringField() + if attrs["_meta"].get("allow_inheritance"): + StringField = _import_class("StringField") + attrs["_cls"] = StringField() # Handle document Fields # Merge all fields from subclasses doc_fields = {} for base in flattened_bases[::-1]: - if hasattr(base, '_fields'): + if hasattr(base, "_fields"): doc_fields.update(base._fields) # Standard object mixin - merge in any Fields - if not hasattr(base, '_meta'): + if not hasattr(base, "_meta"): base_fields = {} for attr_name, attr_value in iteritems(base.__dict__): if not isinstance(attr_value, BaseField): @@ -85,27 +90,31 @@ class DocumentMetaclass(type): doc_fields[attr_name] = attr_value # Count names to ensure no db_field redefinitions - field_names[attr_value.db_field] = field_names.get( - attr_value.db_field, 0) + 1 + field_names[attr_value.db_field] = ( + field_names.get(attr_value.db_field, 0) + 1 + ) # Ensure no duplicate db_fields duplicate_db_fields = [k for k, v in field_names.items() if v > 1] if duplicate_db_fields: - msg = ('Multiple db_fields defined for: %s ' % - ', '.join(duplicate_db_fields)) + msg = "Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields) raise InvalidDocumentError(msg) # Set _fields and db_field maps - attrs['_fields'] = doc_fields - attrs['_db_field_map'] = {k: getattr(v, 'db_field', k) - for k, v in doc_fields.items()} - attrs['_reverse_db_field_map'] = { - v: k for k, v in attrs['_db_field_map'].items() + attrs["_fields"] = doc_fields + attrs["_db_field_map"] = { + k: getattr(v, "db_field", k) for k, v in doc_fields.items() + } + attrs["_reverse_db_field_map"] = { + v: k for k, v in attrs["_db_field_map"].items() } - attrs['_fields_ordered'] = tuple(i[1] for i in sorted( - (v.creation_counter, v.name) - for v in itervalues(doc_fields))) + attrs["_fields_ordered"] = tuple( + i[1] + for i in sorted( + (v.creation_counter, v.name) for v in itervalues(doc_fields) + ) + ) # # Set document hierarchy @@ -113,32 +122,34 @@ class DocumentMetaclass(type): superclasses = () class_name = [name] for base in flattened_bases: - if (not getattr(base, '_is_base_cls', True) and - not getattr(base, '_meta', {}).get('abstract', True)): + if not getattr(base, "_is_base_cls", True) and not getattr( + base, "_meta", {} + ).get("abstract", True): # Collate hierarchy for _cls and _subclasses class_name.append(base.__name__) - if hasattr(base, '_meta'): + if hasattr(base, "_meta"): # Warn if allow_inheritance isn't set and prevent # inheritance of classes where inheritance is set to False - allow_inheritance = base._meta.get('allow_inheritance') - if not allow_inheritance and not base._meta.get('abstract'): - raise ValueError('Document %s may not be subclassed. ' - 'To enable inheritance, use the "allow_inheritance" meta attribute.' % - base.__name__) + allow_inheritance = base._meta.get("allow_inheritance") + if not allow_inheritance and not base._meta.get("abstract"): + raise ValueError( + "Document %s may not be subclassed. " + 'To enable inheritance, use the "allow_inheritance" meta attribute.' + % base.__name__ + ) # Get superclasses from last base superclass - document_bases = [b for b in flattened_bases - if hasattr(b, '_class_name')] + document_bases = [b for b in flattened_bases if hasattr(b, "_class_name")] if document_bases: superclasses = document_bases[0]._superclasses - superclasses += (document_bases[0]._class_name, ) + superclasses += (document_bases[0]._class_name,) - _cls = '.'.join(reversed(class_name)) - attrs['_class_name'] = _cls - attrs['_superclasses'] = superclasses - attrs['_subclasses'] = (_cls, ) - attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types + _cls = ".".join(reversed(class_name)) + attrs["_class_name"] = _cls + attrs["_superclasses"] = superclasses + attrs["_subclasses"] = (_cls,) + attrs["_types"] = attrs["_subclasses"] # TODO depreciate _types # Create the new_class new_class = super_new(mcs, name, bases, attrs) @@ -149,8 +160,12 @@ class DocumentMetaclass(type): base._subclasses += (_cls,) base._types = base._subclasses # TODO depreciate _types - (Document, EmbeddedDocument, DictField, - CachedReferenceField) = mcs._import_classes() + ( + Document, + EmbeddedDocument, + DictField, + CachedReferenceField, + ) = mcs._import_classes() if issubclass(new_class, Document): new_class._collection = None @@ -169,52 +184,55 @@ class DocumentMetaclass(type): for val in new_class.__dict__.values(): if isinstance(val, classmethod): f = val.__get__(new_class) - if hasattr(f, '__func__') and not hasattr(f, 'im_func'): - f.__dict__.update({'im_func': getattr(f, '__func__')}) - if hasattr(f, '__self__') and not hasattr(f, 'im_self'): - f.__dict__.update({'im_self': getattr(f, '__self__')}) + if hasattr(f, "__func__") and not hasattr(f, "im_func"): + f.__dict__.update({"im_func": getattr(f, "__func__")}) + if hasattr(f, "__self__") and not hasattr(f, "im_self"): + f.__dict__.update({"im_self": getattr(f, "__self__")}) # Handle delete rules for field in itervalues(new_class._fields): f = field if f.owner_document is None: f.owner_document = new_class - delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING) + delete_rule = getattr(f, "reverse_delete_rule", DO_NOTHING) if isinstance(f, CachedReferenceField): if issubclass(new_class, EmbeddedDocument): - raise InvalidDocumentError('CachedReferenceFields is not ' - 'allowed in EmbeddedDocuments') + raise InvalidDocumentError( + "CachedReferenceFields is not allowed in EmbeddedDocuments" + ) if f.auto_sync: f.start_listener() f.document_type._cached_reference_fields.append(f) - if isinstance(f, ComplexBaseField) and hasattr(f, 'field'): - delete_rule = getattr(f.field, - 'reverse_delete_rule', - DO_NOTHING) + if isinstance(f, ComplexBaseField) and hasattr(f, "field"): + delete_rule = getattr(f.field, "reverse_delete_rule", DO_NOTHING) if isinstance(f, DictField) and delete_rule != DO_NOTHING: - msg = ('Reverse delete rules are not supported ' - 'for %s (field: %s)' % - (field.__class__.__name__, field.name)) + msg = ( + "Reverse delete rules are not supported " + "for %s (field: %s)" % (field.__class__.__name__, field.name) + ) raise InvalidDocumentError(msg) f = field.field if delete_rule != DO_NOTHING: if issubclass(new_class, EmbeddedDocument): - msg = ('Reverse delete rules are not supported for ' - 'EmbeddedDocuments (field: %s)' % field.name) + msg = ( + "Reverse delete rules are not supported for " + "EmbeddedDocuments (field: %s)" % field.name + ) raise InvalidDocumentError(msg) - f.document_type.register_delete_rule(new_class, - field.name, delete_rule) + f.document_type.register_delete_rule(new_class, field.name, delete_rule) - if (field.name and hasattr(Document, field.name) and - EmbeddedDocument not in new_class.mro()): - msg = ('%s is a document method and not a valid ' - 'field name' % field.name) + if ( + field.name + and hasattr(Document, field.name) + and EmbeddedDocument not in new_class.mro() + ): + msg = "%s is a document method and not a valid field name" % field.name raise InvalidDocumentError(msg) return new_class @@ -239,10 +257,10 @@ class DocumentMetaclass(type): @classmethod def _import_classes(mcs): - Document = _import_class('Document') - EmbeddedDocument = _import_class('EmbeddedDocument') - DictField = _import_class('DictField') - CachedReferenceField = _import_class('CachedReferenceField') + Document = _import_class("Document") + EmbeddedDocument = _import_class("EmbeddedDocument") + DictField = _import_class("DictField") + CachedReferenceField = _import_class("CachedReferenceField") return Document, EmbeddedDocument, DictField, CachedReferenceField @@ -256,65 +274,67 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): super_new = super(TopLevelDocumentMetaclass, mcs).__new__ # Set default _meta data if base class, otherwise get user defined meta - if attrs.get('my_metaclass') == TopLevelDocumentMetaclass: + if attrs.get("my_metaclass") == TopLevelDocumentMetaclass: # defaults - attrs['_meta'] = { - 'abstract': True, - 'max_documents': None, - 'max_size': None, - 'ordering': [], # default ordering applied at runtime - 'indexes': [], # indexes to be ensured at runtime - 'id_field': None, - 'index_background': False, - 'index_drop_dups': False, - 'index_opts': None, - 'delete_rules': None, - + attrs["_meta"] = { + "abstract": True, + "max_documents": None, + "max_size": None, + "ordering": [], # default ordering applied at runtime + "indexes": [], # indexes to be ensured at runtime + "id_field": None, + "index_background": False, + "index_drop_dups": False, + "index_opts": None, + "delete_rules": None, # allow_inheritance can be True, False, and None. True means # "allow inheritance", False means "don't allow inheritance", # None means "do whatever your parent does, or don't allow # inheritance if you're a top-level class". - 'allow_inheritance': None, + "allow_inheritance": None, } - attrs['_is_base_cls'] = True - attrs['_meta'].update(attrs.get('meta', {})) + attrs["_is_base_cls"] = True + attrs["_meta"].update(attrs.get("meta", {})) else: - attrs['_meta'] = attrs.get('meta', {}) + attrs["_meta"] = attrs.get("meta", {}) # Explicitly set abstract to false unless set - attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False) - attrs['_is_base_cls'] = False + attrs["_meta"]["abstract"] = attrs["_meta"].get("abstract", False) + attrs["_is_base_cls"] = False # Set flag marking as document class - as opposed to an object mixin - attrs['_is_document'] = True + attrs["_is_document"] = True # Ensure queryset_class is inherited - if 'objects' in attrs: - manager = attrs['objects'] - if hasattr(manager, 'queryset_class'): - attrs['_meta']['queryset_class'] = manager.queryset_class + if "objects" in attrs: + manager = attrs["objects"] + if hasattr(manager, "queryset_class"): + attrs["_meta"]["queryset_class"] = manager.queryset_class # Clean up top level meta - if 'meta' in attrs: - del attrs['meta'] + if "meta" in attrs: + del attrs["meta"] # Find the parent document class - parent_doc_cls = [b for b in flattened_bases - if b.__class__ == TopLevelDocumentMetaclass] + parent_doc_cls = [ + b for b in flattened_bases if b.__class__ == TopLevelDocumentMetaclass + ] parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0] # Prevent classes setting collection different to their parents # If parent wasn't an abstract class - if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and - not parent_doc_cls._meta.get('abstract', True)): - msg = 'Trying to set a collection on a subclass (%s)' % name + if ( + parent_doc_cls + and "collection" in attrs.get("_meta", {}) + and not parent_doc_cls._meta.get("abstract", True) + ): + msg = "Trying to set a collection on a subclass (%s)" % name warnings.warn(msg, SyntaxWarning) - del attrs['_meta']['collection'] + del attrs["_meta"]["collection"] # Ensure abstract documents have abstract bases - if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'): - if (parent_doc_cls and - not parent_doc_cls._meta.get('abstract', False)): - msg = 'Abstract document cannot have non-abstract base' + if attrs.get("_is_base_cls") or attrs["_meta"].get("abstract"): + if parent_doc_cls and not parent_doc_cls._meta.get("abstract", False): + msg = "Abstract document cannot have non-abstract base" raise ValueError(msg) return super_new(mcs, name, bases, attrs) @@ -323,38 +343,43 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): meta = MetaDict() for base in flattened_bases[::-1]: # Add any mixin metadata from plain objects - if hasattr(base, 'meta'): + if hasattr(base, "meta"): meta.merge(base.meta) - elif hasattr(base, '_meta'): + elif hasattr(base, "_meta"): meta.merge(base._meta) # Set collection in the meta if its callable - if (getattr(base, '_is_document', False) and - not base._meta.get('abstract')): - collection = meta.get('collection', None) + if getattr(base, "_is_document", False) and not base._meta.get("abstract"): + collection = meta.get("collection", None) if callable(collection): - meta['collection'] = collection(base) + meta["collection"] = collection(base) - meta.merge(attrs.get('_meta', {})) # Top level meta + meta.merge(attrs.get("_meta", {})) # Top level meta # Only simple classes (i.e. direct subclasses of Document) may set # allow_inheritance to False. If the base Document allows inheritance, # none of its subclasses can override allow_inheritance to False. - simple_class = all([b._meta.get('abstract') - for b in flattened_bases if hasattr(b, '_meta')]) + simple_class = all( + [b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")] + ) if ( - not simple_class and - meta['allow_inheritance'] is False and - not meta['abstract'] + not simple_class + and meta["allow_inheritance"] is False + and not meta["abstract"] ): - raise ValueError('Only direct subclasses of Document may set ' - '"allow_inheritance" to False') + raise ValueError( + "Only direct subclasses of Document may set " + '"allow_inheritance" to False' + ) # Set default collection name - if 'collection' not in meta: - meta['collection'] = ''.join('_%s' % c if c.isupper() else c - for c in name).strip('_').lower() - attrs['_meta'] = meta + if "collection" not in meta: + meta["collection"] = ( + "".join("_%s" % c if c.isupper() else c for c in name) + .strip("_") + .lower() + ) + attrs["_meta"] = meta # Call super and get the new class new_class = super_new(mcs, name, bases, attrs) @@ -362,36 +387,36 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): meta = new_class._meta # Set index specifications - meta['index_specs'] = new_class._build_index_specs(meta['indexes']) + meta["index_specs"] = new_class._build_index_specs(meta["indexes"]) # If collection is a callable - call it and set the value - collection = meta.get('collection') + collection = meta.get("collection") if callable(collection): - new_class._meta['collection'] = collection(new_class) + new_class._meta["collection"] = collection(new_class) # Provide a default queryset unless exists or one has been set - if 'objects' not in dir(new_class): + if "objects" not in dir(new_class): new_class.objects = QuerySetManager() # Validate the fields and set primary key if needed for field_name, field in iteritems(new_class._fields): if field.primary_key: # Ensure only one primary key is set - current_pk = new_class._meta.get('id_field') + current_pk = new_class._meta.get("id_field") if current_pk and current_pk != field_name: - raise ValueError('Cannot override primary key field') + raise ValueError("Cannot override primary key field") # Set primary key if not current_pk: - new_class._meta['id_field'] = field_name + new_class._meta["id_field"] = field_name new_class.id = field # If the document doesn't explicitly define a primary key field, create # one. Make it an ObjectIdField and give it a non-clashing name ("id" # by default, but can be different if that one's taken). - if not new_class._meta.get('id_field'): + if not new_class._meta.get("id_field"): id_name, id_db_name = mcs.get_auto_id_names(new_class) - new_class._meta['id_field'] = id_name + new_class._meta["id_field"] = id_name new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) new_class._fields[id_name].name = id_name new_class.id = new_class._fields[id_name] @@ -400,22 +425,20 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): # Prepend the ID field to _fields_ordered (so that it's *always* # the first field). - new_class._fields_ordered = (id_name, ) + new_class._fields_ordered + new_class._fields_ordered = (id_name,) + new_class._fields_ordered # Merge in exceptions with parent hierarchy. exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) - module = attrs.get('__module__') + module = attrs.get("__module__") for exc in exceptions_to_merge: name = exc.__name__ parents = tuple( - getattr(base, name) - for base in flattened_bases - if hasattr(base, name) + getattr(base, name) for base in flattened_bases if hasattr(base, name) ) or (exc,) # Create a new exception and set it as an attribute on the new # class. - exception = type(name, parents, {'__module__': module}) + exception = type(name, parents, {"__module__": module}) setattr(new_class, name, exception) return new_class @@ -431,23 +454,17 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): Defaults to ('id', '_id'), or generates a non-clashing name in the form of ('auto_id_X', '_auto_id_X') if the default name is already taken. """ - id_name, id_db_name = ('id', '_id') + id_name, id_db_name = ("id", "_id") existing_fields = {field_name for field_name in new_class._fields} existing_db_fields = {v.db_field for v in new_class._fields.values()} - if ( - id_name not in existing_fields and - id_db_name not in existing_db_fields - ): + if id_name not in existing_fields and id_db_name not in existing_db_fields: return id_name, id_db_name - id_basename, id_db_basename, i = ('auto_id', '_auto_id', 0) + id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0) for i in itertools.count(): - id_name = '{0}_{1}'.format(id_basename, i) - id_db_name = '{0}_{1}'.format(id_db_basename, i) - if ( - id_name not in existing_fields and - id_db_name not in existing_db_fields - ): + id_name = "{0}_{1}".format(id_basename, i) + id_db_name = "{0}_{1}".format(id_db_basename, i) + if id_name not in existing_fields and id_db_name not in existing_db_fields: return id_name, id_db_name @@ -455,7 +472,8 @@ class MetaDict(dict): """Custom dictionary for meta classes. Handles the merging of set indexes """ - _merge_options = ('indexes',) + + _merge_options = ("indexes",) def merge(self, new_options): for k, v in iteritems(new_options): @@ -467,4 +485,5 @@ class MetaDict(dict): class BasesTuple(tuple): """Special class to handle introspection of bases tuple in __new__""" + pass diff --git a/mongoengine/common.py b/mongoengine/common.py index bcdea194..640384ec 100644 --- a/mongoengine/common.py +++ b/mongoengine/common.py @@ -19,34 +19,44 @@ def _import_class(cls_name): if cls_name in _class_registry_cache: return _class_registry_cache.get(cls_name) - doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument', - 'MapReduceDocument') + doc_classes = ( + "Document", + "DynamicEmbeddedDocument", + "EmbeddedDocument", + "MapReduceDocument", + ) # Field Classes if not _field_list_cache: from mongoengine.fields import __all__ as fields + _field_list_cache.extend(fields) from mongoengine.base.fields import __all__ as fields + _field_list_cache.extend(fields) field_classes = _field_list_cache - deref_classes = ('DeReference',) + deref_classes = ("DeReference",) - if cls_name == 'BaseDocument': + if cls_name == "BaseDocument": from mongoengine.base import document as module - import_classes = ['BaseDocument'] + + import_classes = ["BaseDocument"] elif cls_name in doc_classes: from mongoengine import document as module + import_classes = doc_classes elif cls_name in field_classes: from mongoengine import fields as module + import_classes = field_classes elif cls_name in deref_classes: from mongoengine import dereference as module + import_classes = deref_classes else: - raise ValueError('No import set for: %s' % cls_name) + raise ValueError("No import set for: %s" % cls_name) for cls in import_classes: _class_registry_cache[cls] = getattr(module, cls) diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 6a613a42..ef0dd27c 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -3,21 +3,21 @@ from pymongo.database import _check_name import six __all__ = [ - 'DEFAULT_CONNECTION_NAME', - 'DEFAULT_DATABASE_NAME', - 'MongoEngineConnectionError', - 'connect', - 'disconnect', - 'disconnect_all', - 'get_connection', - 'get_db', - 'register_connection', + "DEFAULT_CONNECTION_NAME", + "DEFAULT_DATABASE_NAME", + "MongoEngineConnectionError", + "connect", + "disconnect", + "disconnect_all", + "get_connection", + "get_db", + "register_connection", ] -DEFAULT_CONNECTION_NAME = 'default' -DEFAULT_DATABASE_NAME = 'test' -DEFAULT_HOST = 'localhost' +DEFAULT_CONNECTION_NAME = "default" +DEFAULT_DATABASE_NAME = "test" +DEFAULT_HOST = "localhost" DEFAULT_PORT = 27017 _connection_settings = {} @@ -31,6 +31,7 @@ class MongoEngineConnectionError(Exception): """Error raised when the database connection can't be established or when a connection with a requested alias can't be retrieved. """ + pass @@ -39,18 +40,23 @@ def _check_db_name(name): This functionality is copied from pymongo Database class constructor. """ if not isinstance(name, six.string_types): - raise TypeError('name must be an instance of %s' % six.string_types) - elif name != '$external': + raise TypeError("name must be an instance of %s" % six.string_types) + elif name != "$external": _check_name(name) def _get_connection_settings( - db=None, name=None, host=None, port=None, - read_preference=READ_PREFERENCE, - username=None, password=None, - authentication_source=None, - authentication_mechanism=None, - **kwargs): + db=None, + name=None, + host=None, + port=None, + read_preference=READ_PREFERENCE, + username=None, + password=None, + authentication_source=None, + authentication_mechanism=None, + **kwargs +): """Get the connection settings as a dict : param db: the name of the database to use, for compatibility with connect @@ -73,18 +79,18 @@ def _get_connection_settings( .. versionchanged:: 0.10.6 - added mongomock support """ conn_settings = { - 'name': name or db or DEFAULT_DATABASE_NAME, - 'host': host or DEFAULT_HOST, - 'port': port or DEFAULT_PORT, - 'read_preference': read_preference, - 'username': username, - 'password': password, - 'authentication_source': authentication_source, - 'authentication_mechanism': authentication_mechanism + "name": name or db or DEFAULT_DATABASE_NAME, + "host": host or DEFAULT_HOST, + "port": port or DEFAULT_PORT, + "read_preference": read_preference, + "username": username, + "password": password, + "authentication_source": authentication_source, + "authentication_mechanism": authentication_mechanism, } - _check_db_name(conn_settings['name']) - conn_host = conn_settings['host'] + _check_db_name(conn_settings["name"]) + conn_host = conn_settings["host"] # Host can be a list or a string, so if string, force to a list. if isinstance(conn_host, six.string_types): @@ -94,32 +100,32 @@ def _get_connection_settings( for entity in conn_host: # Handle Mongomock - if entity.startswith('mongomock://'): - conn_settings['is_mock'] = True + if entity.startswith("mongomock://"): + conn_settings["is_mock"] = True # `mongomock://` is not a valid url prefix and must be replaced by `mongodb://` - resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1)) + resolved_hosts.append(entity.replace("mongomock://", "mongodb://", 1)) # Handle URI style connections, only updating connection params which # were explicitly specified in the URI. - elif '://' in entity: + elif "://" in entity: uri_dict = uri_parser.parse_uri(entity) resolved_hosts.append(entity) - if uri_dict.get('database'): - conn_settings['name'] = uri_dict.get('database') + if uri_dict.get("database"): + conn_settings["name"] = uri_dict.get("database") - for param in ('read_preference', 'username', 'password'): + for param in ("read_preference", "username", "password"): if uri_dict.get(param): conn_settings[param] = uri_dict[param] - uri_options = uri_dict['options'] - if 'replicaset' in uri_options: - conn_settings['replicaSet'] = uri_options['replicaset'] - if 'authsource' in uri_options: - conn_settings['authentication_source'] = uri_options['authsource'] - if 'authmechanism' in uri_options: - conn_settings['authentication_mechanism'] = uri_options['authmechanism'] - if 'readpreference' in uri_options: + uri_options = uri_dict["options"] + if "replicaset" in uri_options: + conn_settings["replicaSet"] = uri_options["replicaset"] + if "authsource" in uri_options: + conn_settings["authentication_source"] = uri_options["authsource"] + if "authmechanism" in uri_options: + conn_settings["authentication_mechanism"] = uri_options["authmechanism"] + if "readpreference" in uri_options: read_preferences = ( ReadPreference.NEAREST, ReadPreference.PRIMARY, @@ -133,34 +139,41 @@ def _get_connection_settings( # int (e.g. 3). # TODO simplify the code below once we drop support for # PyMongo v3.4. - read_pf_mode = uri_options['readpreference'] + read_pf_mode = uri_options["readpreference"] if isinstance(read_pf_mode, six.string_types): read_pf_mode = read_pf_mode.lower() for preference in read_preferences: if ( - preference.name.lower() == read_pf_mode or - preference.mode == read_pf_mode + preference.name.lower() == read_pf_mode + or preference.mode == read_pf_mode ): - conn_settings['read_preference'] = preference + conn_settings["read_preference"] = preference break else: resolved_hosts.append(entity) - conn_settings['host'] = resolved_hosts + conn_settings["host"] = resolved_hosts # Deprecated parameters that should not be passed on - kwargs.pop('slaves', None) - kwargs.pop('is_slave', None) + kwargs.pop("slaves", None) + kwargs.pop("is_slave", None) conn_settings.update(kwargs) return conn_settings -def register_connection(alias, db=None, name=None, host=None, port=None, - read_preference=READ_PREFERENCE, - username=None, password=None, - authentication_source=None, - authentication_mechanism=None, - **kwargs): +def register_connection( + alias, + db=None, + name=None, + host=None, + port=None, + read_preference=READ_PREFERENCE, + username=None, + password=None, + authentication_source=None, + authentication_mechanism=None, + **kwargs +): """Register the connection settings. : param alias: the name that will be used to refer to this connection @@ -185,12 +198,17 @@ def register_connection(alias, db=None, name=None, host=None, port=None, .. versionchanged:: 0.10.6 - added mongomock support """ conn_settings = _get_connection_settings( - db=db, name=name, host=host, port=port, + db=db, + name=name, + host=host, + port=port, read_preference=read_preference, - username=username, password=password, + username=username, + password=password, authentication_source=authentication_source, authentication_mechanism=authentication_mechanism, - **kwargs) + **kwargs + ) _connection_settings[alias] = conn_settings @@ -206,7 +224,7 @@ def disconnect(alias=DEFAULT_CONNECTION_NAME): if alias in _dbs: # Detach all cached collections in Documents for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME): - if issubclass(doc_cls, Document): # Skip EmbeddedDocument + if issubclass(doc_cls, Document): # Skip EmbeddedDocument doc_cls._disconnect() del _dbs[alias] @@ -237,19 +255,21 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): # Raise MongoEngineConnectionError if it doesn't. if alias not in _connection_settings: if alias == DEFAULT_CONNECTION_NAME: - msg = 'You have not defined a default connection' + msg = "You have not defined a default connection" else: msg = 'Connection with alias "%s" has not been defined' % alias raise MongoEngineConnectionError(msg) def _clean_settings(settings_dict): irrelevant_fields_set = { - 'name', 'username', 'password', - 'authentication_source', 'authentication_mechanism' + "name", + "username", + "password", + "authentication_source", + "authentication_mechanism", } return { - k: v for k, v in settings_dict.items() - if k not in irrelevant_fields_set + k: v for k, v in settings_dict.items() if k not in irrelevant_fields_set } raw_conn_settings = _connection_settings[alias].copy() @@ -260,13 +280,12 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): conn_settings = _clean_settings(raw_conn_settings) # Determine if we should use PyMongo's or mongomock's MongoClient. - is_mock = conn_settings.pop('is_mock', False) + is_mock = conn_settings.pop("is_mock", False) if is_mock: try: import mongomock except ImportError: - raise RuntimeError('You need mongomock installed to mock ' - 'MongoEngine.') + raise RuntimeError("You need mongomock installed to mock MongoEngine.") connection_class = mongomock.MongoClient else: connection_class = MongoClient @@ -277,9 +296,7 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): connection = existing_connection else: connection = _create_connection( - alias=alias, - connection_class=connection_class, - **conn_settings + alias=alias, connection_class=connection_class, **conn_settings ) _connections[alias] = connection return _connections[alias] @@ -294,7 +311,8 @@ def _create_connection(alias, connection_class, **connection_settings): return connection_class(**connection_settings) except Exception as e: raise MongoEngineConnectionError( - 'Cannot connect to database %s :\n%s' % (alias, e)) + "Cannot connect to database %s :\n%s" % (alias, e) + ) def _find_existing_connection(connection_settings): @@ -316,7 +334,7 @@ def _find_existing_connection(connection_settings): # Only remove the name but it's important to # keep the username/password/authentication_source/authentication_mechanism # to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047) - return {k: v for k, v in settings_dict.items() if k != 'name'} + return {k: v for k, v in settings_dict.items() if k != "name"} cleaned_conn_settings = _clean_settings(connection_settings) for db_alias, connection_settings in connection_settings_bis: @@ -332,14 +350,18 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): if alias not in _dbs: conn = get_connection(alias) conn_settings = _connection_settings[alias] - db = conn[conn_settings['name']] - auth_kwargs = {'source': conn_settings['authentication_source']} - if conn_settings['authentication_mechanism'] is not None: - auth_kwargs['mechanism'] = conn_settings['authentication_mechanism'] + db = conn[conn_settings["name"]] + auth_kwargs = {"source": conn_settings["authentication_source"]} + if conn_settings["authentication_mechanism"] is not None: + auth_kwargs["mechanism"] = conn_settings["authentication_mechanism"] # Authenticate if necessary - if conn_settings['username'] and (conn_settings['password'] or - conn_settings['authentication_mechanism'] == 'MONGODB-X509'): - db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs) + if conn_settings["username"] and ( + conn_settings["password"] + or conn_settings["authentication_mechanism"] == "MONGODB-X509" + ): + db.authenticate( + conn_settings["username"], conn_settings["password"], **auth_kwargs + ) _dbs[alias] = db return _dbs[alias] @@ -368,8 +390,8 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): if new_conn_settings != prev_conn_setting: err_msg = ( - u'A different connection with alias `{}` was already ' - u'registered. Use disconnect() first' + u"A different connection with alias `{}` was already " + u"registered. Use disconnect() first" ).format(alias) raise MongoEngineConnectionError(err_msg) else: diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 98bd897b..3424a5d5 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -7,8 +7,14 @@ from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.pymongo_support import count_documents -__all__ = ('switch_db', 'switch_collection', 'no_dereference', - 'no_sub_classes', 'query_counter', 'set_write_concern') +__all__ = ( + "switch_db", + "switch_collection", + "no_dereference", + "no_sub_classes", + "query_counter", + "set_write_concern", +) class switch_db(object): @@ -38,17 +44,17 @@ class switch_db(object): self.cls = cls self.collection = cls._get_collection() self.db_alias = db_alias - self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME) + self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) def __enter__(self): """Change the db_alias and clear the cached collection.""" - self.cls._meta['db_alias'] = self.db_alias + self.cls._meta["db_alias"] = self.db_alias self.cls._collection = None return self.cls def __exit__(self, t, value, traceback): """Reset the db_alias and collection.""" - self.cls._meta['db_alias'] = self.ori_db_alias + self.cls._meta["db_alias"] = self.ori_db_alias self.cls._collection = self.collection @@ -111,14 +117,15 @@ class no_dereference(object): """ self.cls = cls - ReferenceField = _import_class('ReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') - ComplexBaseField = _import_class('ComplexBaseField') + ReferenceField = _import_class("ReferenceField") + GenericReferenceField = _import_class("GenericReferenceField") + ComplexBaseField = _import_class("ComplexBaseField") - self.deref_fields = [k for k, v in iteritems(self.cls._fields) - if isinstance(v, (ReferenceField, - GenericReferenceField, - ComplexBaseField))] + self.deref_fields = [ + k + for k, v in iteritems(self.cls._fields) + if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField)) + ] def __enter__(self): """Change the objects default and _auto_dereference values.""" @@ -180,15 +187,12 @@ class query_counter(object): """ self.db = get_db() self.initial_profiling_level = None - self._ctx_query_counter = 0 # number of queries issued by the context + self._ctx_query_counter = 0 # number of queries issued by the context self._ignored_query = { - 'ns': - {'$ne': '%s.system.indexes' % self.db.name}, - 'op': # MONGODB < 3.2 - {'$ne': 'killcursors'}, - 'command.killCursors': # MONGODB >= 3.2 - {'$exists': False} + "ns": {"$ne": "%s.system.indexes" % self.db.name}, + "op": {"$ne": "killcursors"}, # MONGODB < 3.2 + "command.killCursors": {"$exists": False}, # MONGODB >= 3.2 } def _turn_on_profiling(self): @@ -238,8 +242,13 @@ class query_counter(object): and substracting the queries issued by this context. In fact everytime this is called, 1 query is issued so we need to balance that """ - count = count_documents(self.db.system.profile, self._ignored_query) - self._ctx_query_counter - self._ctx_query_counter += 1 # Account for the query we just issued to gather the information + count = ( + count_documents(self.db.system.profile, self._ignored_query) + - self._ctx_query_counter + ) + self._ctx_query_counter += ( + 1 + ) # Account for the query we just issued to gather the information return count diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index eaebb56f..9e75f353 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -2,8 +2,13 @@ from bson import DBRef, SON import six from six import iteritems -from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, - TopLevelDocumentMetaclass, get_document) +from mongoengine.base import ( + BaseDict, + BaseList, + EmbeddedDocumentList, + TopLevelDocumentMetaclass, + get_document, +) from mongoengine.base.datastructures import LazyReference from mongoengine.connection import get_db from mongoengine.document import Document, EmbeddedDocument @@ -36,21 +41,23 @@ class DeReference(object): self.max_depth = max_depth doc_type = None - if instance and isinstance(instance, (Document, EmbeddedDocument, - TopLevelDocumentMetaclass)): + if instance and isinstance( + instance, (Document, EmbeddedDocument, TopLevelDocumentMetaclass) + ): doc_type = instance._fields.get(name) - while hasattr(doc_type, 'field'): + while hasattr(doc_type, "field"): doc_type = doc_type.field if isinstance(doc_type, ReferenceField): field = doc_type doc_type = doc_type.document_type - is_list = not hasattr(items, 'items') + is_list = not hasattr(items, "items") if is_list and all([i.__class__ == doc_type for i in items]): return items elif not is_list and all( - [i.__class__ == doc_type for i in items.values()]): + [i.__class__ == doc_type for i in items.values()] + ): return items elif not field.dbref: # We must turn the ObjectIds into DBRefs @@ -83,7 +90,7 @@ class DeReference(object): new_items[k] = value return new_items - if not hasattr(items, 'items'): + if not hasattr(items, "items"): items = _get_items_from_list(items) else: items = _get_items_from_dict(items) @@ -120,13 +127,19 @@ class DeReference(object): continue elif isinstance(v, DBRef): reference_map.setdefault(field.document_type, set()).add(v.id) - elif isinstance(v, (dict, SON)) and '_ref' in v: - reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id) + elif isinstance(v, (dict, SON)) and "_ref" in v: + reference_map.setdefault(get_document(v["_cls"]), set()).add( + v["_ref"].id + ) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - field_cls = getattr(getattr(field, 'field', None), 'document_type', None) + field_cls = getattr( + getattr(field, "field", None), "document_type", None + ) references = self._find_references(v, depth) for key, refs in iteritems(references): - if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): + if isinstance( + field_cls, (Document, TopLevelDocumentMetaclass) + ): key = field_cls reference_map.setdefault(key, set()).update(refs) elif isinstance(item, LazyReference): @@ -134,8 +147,10 @@ class DeReference(object): continue elif isinstance(item, DBRef): reference_map.setdefault(item.collection, set()).add(item.id) - elif isinstance(item, (dict, SON)) and '_ref' in item: - reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id) + elif isinstance(item, (dict, SON)) and "_ref" in item: + reference_map.setdefault(get_document(item["_cls"]), set()).add( + item["_ref"].id + ) elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: references = self._find_references(item, depth - 1) for key, refs in iteritems(references): @@ -151,12 +166,13 @@ class DeReference(object): # we use getattr instead of hasattr because hasattr swallows any exception under python2 # so it could hide nasty things without raising exceptions (cfr bug #1688)) - ref_document_cls_exists = (getattr(collection, 'objects', None) is not None) + ref_document_cls_exists = getattr(collection, "objects", None) is not None if ref_document_cls_exists: col_name = collection._get_collection_name() - refs = [dbref for dbref in dbrefs - if (col_name, dbref) not in object_map] + refs = [ + dbref for dbref in dbrefs if (col_name, dbref) not in object_map + ] references = collection.objects.in_bulk(refs) for key, doc in iteritems(references): object_map[(col_name, key)] = doc @@ -164,23 +180,26 @@ class DeReference(object): if isinstance(doc_type, (ListField, DictField, MapField)): continue - refs = [dbref for dbref in dbrefs - if (collection, dbref) not in object_map] + refs = [ + dbref for dbref in dbrefs if (collection, dbref) not in object_map + ] if doc_type: - references = doc_type._get_db()[collection].find({'_id': {'$in': refs}}) + references = doc_type._get_db()[collection].find( + {"_id": {"$in": refs}} + ) for ref in references: doc = doc_type._from_son(ref) object_map[(collection, doc.id)] = doc else: - references = get_db()[collection].find({'_id': {'$in': refs}}) + references = get_db()[collection].find({"_id": {"$in": refs}}) for ref in references: - if '_cls' in ref: - doc = get_document(ref['_cls'])._from_son(ref) + if "_cls" in ref: + doc = get_document(ref["_cls"])._from_son(ref) elif doc_type is None: doc = get_document( - ''.join(x.capitalize() - for x in collection.split('_')))._from_son(ref) + "".join(x.capitalize() for x in collection.split("_")) + )._from_son(ref) else: doc = doc_type._from_son(ref) object_map[(collection, doc.id)] = doc @@ -208,19 +227,20 @@ class DeReference(object): return BaseList(items, instance, name) if isinstance(items, (dict, SON)): - if '_ref' in items: + if "_ref" in items: return self.object_map.get( - (items['_ref'].collection, items['_ref'].id), items) - elif '_cls' in items: - doc = get_document(items['_cls'])._from_son(items) - _cls = doc._data.pop('_cls', None) - del items['_cls'] + (items["_ref"].collection, items["_ref"].id), items + ) + elif "_cls" in items: + doc = get_document(items["_cls"])._from_son(items) + _cls = doc._data.pop("_cls", None) + del items["_cls"] doc._data = self._attach_objects(doc._data, depth, doc, None) if _cls is not None: - doc._data['_cls'] = _cls + doc._data["_cls"] = _cls return doc - if not hasattr(items, 'items'): + if not hasattr(items, "items"): is_list = True list_type = BaseList if isinstance(items, EmbeddedDocumentList): @@ -247,17 +267,25 @@ class DeReference(object): v = data[k]._data.get(field_name, None) if isinstance(v, DBRef): data[k]._data[field_name] = self.object_map.get( - (v.collection, v.id), v) - elif isinstance(v, (dict, SON)) and '_ref' in v: + (v.collection, v.id), v + ) + elif isinstance(v, (dict, SON)) and "_ref" in v: data[k]._data[field_name] = self.object_map.get( - (v['_ref'].collection, v['_ref'].id), v) + (v["_ref"].collection, v["_ref"].id), v + ) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name) - data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name) + item_name = six.text_type("{0}.{1}.{2}").format( + name, k, field_name + ) + data[k]._data[field_name] = self._attach_objects( + v, depth, instance=instance, name=item_name + ) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - item_name = '%s.%s' % (name, k) if name else name - data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name) - elif isinstance(v, DBRef) and hasattr(v, 'id'): + item_name = "%s.%s" % (name, k) if name else name + data[k] = self._attach_objects( + v, depth - 1, instance=instance, name=item_name + ) + elif isinstance(v, DBRef) and hasattr(v, "id"): data[k] = self.object_map.get((v.collection, v.id), v) if instance and name: diff --git a/mongoengine/document.py b/mongoengine/document.py index 520de5bf..41166df4 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -8,23 +8,36 @@ import six from six import iteritems from mongoengine import signals -from mongoengine.base import (BaseDict, BaseDocument, BaseList, - DocumentMetaclass, EmbeddedDocumentList, - TopLevelDocumentMetaclass, get_document) +from mongoengine.base import ( + BaseDict, + BaseDocument, + BaseList, + DocumentMetaclass, + EmbeddedDocumentList, + TopLevelDocumentMetaclass, + get_document, +) from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db -from mongoengine.context_managers import (set_write_concern, - switch_collection, - switch_db) -from mongoengine.errors import (InvalidDocumentError, InvalidQueryError, - SaveConditionError) +from mongoengine.context_managers import set_write_concern, switch_collection, switch_db +from mongoengine.errors import ( + InvalidDocumentError, + InvalidQueryError, + SaveConditionError, +) from mongoengine.pymongo_support import list_collection_names -from mongoengine.queryset import (NotUniqueError, OperationError, - QuerySet, transform) +from mongoengine.queryset import NotUniqueError, OperationError, QuerySet, transform -__all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument', - 'DynamicEmbeddedDocument', 'OperationError', - 'InvalidCollectionError', 'NotUniqueError', 'MapReduceDocument') +__all__ = ( + "Document", + "EmbeddedDocument", + "DynamicDocument", + "DynamicEmbeddedDocument", + "OperationError", + "InvalidCollectionError", + "NotUniqueError", + "MapReduceDocument", +) def includes_cls(fields): @@ -35,7 +48,7 @@ def includes_cls(fields): first_field = fields[0] elif isinstance(fields[0], (list, tuple)) and len(fields[0]): first_field = fields[0][0] - return first_field == '_cls' + return first_field == "_cls" class InvalidCollectionError(Exception): @@ -56,7 +69,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): :attr:`meta` dictionary. """ - __slots__ = ('_instance', ) + __slots__ = ("_instance",) # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 @@ -85,8 +98,8 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs) # remove _id from the SON if it's in it and it's None - if '_id' in data and data['_id'] is None: - del data['_id'] + if "_id" in data and data["_id"] is None: + del data["_id"] return data @@ -147,19 +160,19 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = TopLevelDocumentMetaclass - __slots__ = ('__objects',) + __slots__ = ("__objects",) @property def pk(self): """Get the primary key.""" - if 'id_field' not in self._meta: + if "id_field" not in self._meta: return None - return getattr(self, self._meta['id_field']) + return getattr(self, self._meta["id_field"]) @pk.setter def pk(self, value): """Set the primary key.""" - return setattr(self, self._meta['id_field'], value) + return setattr(self, self._meta["id_field"], value) def __hash__(self): """Return the hash based on the PK of this document. If it's new @@ -173,7 +186,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): @classmethod def _get_db(cls): """Some Model using other db_alias""" - return get_db(cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)) + return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)) @classmethod def _disconnect(cls): @@ -190,9 +203,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): 2. Creates indexes defined in this document's :attr:`meta` dictionary. This happens only if `auto_create_index` is True. """ - if not hasattr(cls, '_collection') or cls._collection is None: + if not hasattr(cls, "_collection") or cls._collection is None: # Get the collection, either capped or regular. - if cls._meta.get('max_size') or cls._meta.get('max_documents'): + if cls._meta.get("max_size") or cls._meta.get("max_documents"): cls._collection = cls._get_capped_collection() else: db = cls._get_db() @@ -203,8 +216,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): # set to False. # Also there is no need to ensure indexes on slave. db = cls._get_db() - if cls._meta.get('auto_create_index', True) and\ - db.client.is_primary: + if cls._meta.get("auto_create_index", True) and db.client.is_primary: cls.ensure_indexes() return cls._collection @@ -216,8 +228,8 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): collection_name = cls._get_collection_name() # Get max document limit and max byte size from meta. - max_size = cls._meta.get('max_size') or 10 * 2 ** 20 # 10MB default - max_documents = cls._meta.get('max_documents') + max_size = cls._meta.get("max_size") or 10 * 2 ** 20 # 10MB default + max_documents = cls._meta.get("max_documents") # MongoDB will automatically raise the size to make it a multiple of # 256 bytes. We raise it here ourselves to be able to reliably compare @@ -227,24 +239,23 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): # If the collection already exists and has different options # (i.e. isn't capped or has different max/size), raise an error. - if collection_name in list_collection_names(db, include_system_collections=True): + if collection_name in list_collection_names( + db, include_system_collections=True + ): collection = db[collection_name] options = collection.options() - if ( - options.get('max') != max_documents or - options.get('size') != max_size - ): + if options.get("max") != max_documents or options.get("size") != max_size: raise InvalidCollectionError( 'Cannot create collection "{}" as a capped ' - 'collection as it already exists'.format(cls._collection) + "collection as it already exists".format(cls._collection) ) return collection # Create a new capped collection. - opts = {'capped': True, 'size': max_size} + opts = {"capped": True, "size": max_size} if max_documents: - opts['max'] = max_documents + opts["max"] = max_documents return db.create_collection(collection_name, **opts) @@ -253,11 +264,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): # If '_id' is None, try and set it from self._data. If that # doesn't exist either, remove '_id' from the SON completely. - if data['_id'] is None: - if self._data.get('id') is None: - del data['_id'] + if data["_id"] is None: + if self._data.get("id") is None: + del data["_id"] else: - data['_id'] = self._data['id'] + data["_id"] = self._data["id"] return data @@ -279,15 +290,17 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): query = {} if self.pk is None: - raise InvalidDocumentError('The document does not have a primary key.') + raise InvalidDocumentError("The document does not have a primary key.") - id_field = self._meta['id_field'] + id_field = self._meta["id_field"] query = query.copy() if isinstance(query, dict) else query.to_query(self) if id_field not in query: query[id_field] = self.pk elif query[id_field] != self.pk: - raise InvalidQueryError('Invalid document modify query: it must modify only this document.') + raise InvalidQueryError( + "Invalid document modify query: it must modify only this document." + ) # Need to add shard key to query, or you get an error query.update(self._object_key) @@ -304,9 +317,19 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): return True - def save(self, force_insert=False, validate=True, clean=True, - write_concern=None, cascade=None, cascade_kwargs=None, - _refs=None, save_condition=None, signal_kwargs=None, **kwargs): + def save( + self, + force_insert=False, + validate=True, + clean=True, + write_concern=None, + cascade=None, + cascade_kwargs=None, + _refs=None, + save_condition=None, + signal_kwargs=None, + **kwargs + ): """Save the :class:`~mongoengine.Document` to the database. If the document already exists, it will be updated, otherwise it will be created. @@ -360,8 +383,8 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): """ signal_kwargs = signal_kwargs or {} - if self._meta.get('abstract'): - raise InvalidDocumentError('Cannot save an abstract document.') + if self._meta.get("abstract"): + raise InvalidDocumentError("Cannot save an abstract document.") signals.pre_save.send(self.__class__, document=self, **signal_kwargs) @@ -371,15 +394,16 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): if write_concern is None: write_concern = {} - doc_id = self.to_mongo(fields=[self._meta['id_field']]) - created = ('_id' not in doc_id or self._created or force_insert) + doc_id = self.to_mongo(fields=[self._meta["id_field"]]) + created = "_id" not in doc_id or self._created or force_insert - signals.pre_save_post_validation.send(self.__class__, document=self, - created=created, **signal_kwargs) + signals.pre_save_post_validation.send( + self.__class__, document=self, created=created, **signal_kwargs + ) # it might be refreshed by the pre_save_post_validation hook, e.g., for etag generation doc = self.to_mongo() - if self._meta.get('auto_create_index', True): + if self._meta.get("auto_create_index", True): self.ensure_indexes() try: @@ -387,44 +411,45 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): if created: object_id = self._save_create(doc, force_insert, write_concern) else: - object_id, created = self._save_update(doc, save_condition, - write_concern) + object_id, created = self._save_update( + doc, save_condition, write_concern + ) if cascade is None: - cascade = (self._meta.get('cascade', False) or - cascade_kwargs is not None) + cascade = self._meta.get("cascade", False) or cascade_kwargs is not None if cascade: kwargs = { - 'force_insert': force_insert, - 'validate': validate, - 'write_concern': write_concern, - 'cascade': cascade + "force_insert": force_insert, + "validate": validate, + "write_concern": write_concern, + "cascade": cascade, } if cascade_kwargs: # Allow granular control over cascades kwargs.update(cascade_kwargs) - kwargs['_refs'] = _refs + kwargs["_refs"] = _refs self.cascade_save(**kwargs) except pymongo.errors.DuplicateKeyError as err: - message = u'Tried to save duplicate unique keys (%s)' + message = u"Tried to save duplicate unique keys (%s)" raise NotUniqueError(message % six.text_type(err)) except pymongo.errors.OperationFailure as err: - message = 'Could not save document (%s)' - if re.match('^E1100[01] duplicate key', six.text_type(err)): + message = "Could not save document (%s)" + if re.match("^E1100[01] duplicate key", six.text_type(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update - message = u'Tried to save duplicate unique keys (%s)' + message = u"Tried to save duplicate unique keys (%s)" raise NotUniqueError(message % six.text_type(err)) raise OperationError(message % six.text_type(err)) # Make sure we store the PK on this document now that it's saved - id_field = self._meta['id_field'] - if created or id_field not in self._meta.get('shard_key', []): + id_field = self._meta["id_field"] + if created or id_field not in self._meta.get("shard_key", []): self[id_field] = self._fields[id_field].to_python(object_id) - signals.post_save.send(self.__class__, document=self, - created=created, **signal_kwargs) + signals.post_save.send( + self.__class__, document=self, created=created, **signal_kwargs + ) self._clear_changed_fields() self._created = False @@ -442,11 +467,12 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): return wc_collection.insert_one(doc).inserted_id # insert_one will provoke UniqueError alongside save does not # therefore, it need to catch and call replace_one. - if '_id' in doc: + if "_id" in doc: raw_object = wc_collection.find_one_and_replace( - {'_id': doc['_id']}, doc) + {"_id": doc["_id"]}, doc + ) if raw_object: - return doc['_id'] + return doc["_id"] object_id = wc_collection.insert_one(doc).inserted_id @@ -461,9 +487,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): update_doc = {} if updates: - update_doc['$set'] = updates + update_doc["$set"] = updates if removals: - update_doc['$unset'] = removals + update_doc["$unset"] = removals return update_doc @@ -473,39 +499,38 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): Helper method, should only be used inside save(). """ collection = self._get_collection() - object_id = doc['_id'] + object_id = doc["_id"] created = False select_dict = {} if save_condition is not None: select_dict = transform.query(self.__class__, **save_condition) - select_dict['_id'] = object_id + select_dict["_id"] = object_id # Need to add shard key to query, or you get an error - shard_key = self._meta.get('shard_key', tuple()) + shard_key = self._meta.get("shard_key", tuple()) for k in shard_key: - path = self._lookup_field(k.split('.')) + path = self._lookup_field(k.split(".")) actual_key = [p.db_field for p in path] val = doc for ak in actual_key: val = val[ak] - select_dict['.'.join(actual_key)] = val + select_dict[".".join(actual_key)] = val update_doc = self._get_update_doc() if update_doc: upsert = save_condition is None with set_write_concern(collection, write_concern) as wc_collection: last_error = wc_collection.update_one( - select_dict, - update_doc, - upsert=upsert + select_dict, update_doc, upsert=upsert ).raw_result - if not upsert and last_error['n'] == 0: - raise SaveConditionError('Race condition preventing' - ' document update detected') + if not upsert and last_error["n"] == 0: + raise SaveConditionError( + "Race condition preventing document update detected" + ) if last_error is not None: - updated_existing = last_error.get('updatedExisting') + updated_existing = last_error.get("updatedExisting") if updated_existing is False: created = True # !!! This is bad, means we accidentally created a new, @@ -518,21 +543,20 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): """Recursively save any references and generic references on the document. """ - _refs = kwargs.get('_refs') or [] + _refs = kwargs.get("_refs") or [] - ReferenceField = _import_class('ReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') + ReferenceField = _import_class("ReferenceField") + GenericReferenceField = _import_class("GenericReferenceField") for name, cls in self._fields.items(): - if not isinstance(cls, (ReferenceField, - GenericReferenceField)): + if not isinstance(cls, (ReferenceField, GenericReferenceField)): continue ref = self._data.get(name) if not ref or isinstance(ref, DBRef): continue - if not getattr(ref, '_changed_fields', True): + if not getattr(ref, "_changed_fields", True): continue ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) @@ -545,7 +569,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): @property def _qs(self): """Return the default queryset corresponding to this document.""" - if not hasattr(self, '__objects'): + if not hasattr(self, "__objects"): self.__objects = QuerySet(self, self._get_collection()) return self.__objects @@ -558,15 +582,15 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): a sharded collection with a compound shard key, it can contain a more complex query. """ - select_dict = {'pk': self.pk} - shard_key = self.__class__._meta.get('shard_key', tuple()) + select_dict = {"pk": self.pk} + shard_key = self.__class__._meta.get("shard_key", tuple()) for k in shard_key: - path = self._lookup_field(k.split('.')) + path = self._lookup_field(k.split(".")) actual_key = [p.db_field for p in path] val = self for ak in actual_key: val = getattr(val, ak) - select_dict['__'.join(actual_key)] = val + select_dict["__".join(actual_key)] = val return select_dict def update(self, **kwargs): @@ -577,14 +601,13 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): been saved. """ if self.pk is None: - if kwargs.get('upsert', False): + if kwargs.get("upsert", False): query = self.to_mongo() - if '_cls' in query: - del query['_cls'] + if "_cls" in query: + del query["_cls"] return self._qs.filter(**query).update_one(**kwargs) else: - raise OperationError( - 'attempt to update a document not yet saved') + raise OperationError("attempt to update a document not yet saved") # Need to add shard key to query, or you get an error return self._qs.filter(**self._object_key).update_one(**kwargs) @@ -608,16 +631,17 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): signals.pre_delete.send(self.__class__, document=self, **signal_kwargs) # Delete FileFields separately - FileField = _import_class('FileField') + FileField = _import_class("FileField") for name, field in iteritems(self._fields): if isinstance(field, FileField): getattr(self, name).delete() try: - self._qs.filter( - **self._object_key).delete(write_concern=write_concern, _from_doc_delete=True) + self._qs.filter(**self._object_key).delete( + write_concern=write_concern, _from_doc_delete=True + ) except pymongo.errors.OperationFailure as err: - message = u'Could not delete document (%s)' % err.message + message = u"Could not delete document (%s)" % err.message raise OperationError(message) signals.post_delete.send(self.__class__, document=self, **signal_kwargs) @@ -686,7 +710,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): .. versionadded:: 0.5 """ - DeReference = _import_class('DeReference') + DeReference = _import_class("DeReference") DeReference()([self], max_depth + 1) return self @@ -704,20 +728,24 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): if fields and isinstance(fields[0], int): max_depth = fields[0] fields = fields[1:] - elif 'max_depth' in kwargs: - max_depth = kwargs['max_depth'] + elif "max_depth" in kwargs: + max_depth = kwargs["max_depth"] if self.pk is None: - raise self.DoesNotExist('Document does not exist') + raise self.DoesNotExist("Document does not exist") - obj = self._qs.read_preference(ReadPreference.PRIMARY).filter( - **self._object_key).only(*fields).limit( - 1).select_related(max_depth=max_depth) + obj = ( + self._qs.read_preference(ReadPreference.PRIMARY) + .filter(**self._object_key) + .only(*fields) + .limit(1) + .select_related(max_depth=max_depth) + ) if obj: obj = obj[0] else: - raise self.DoesNotExist('Document does not exist') + raise self.DoesNotExist("Document does not exist") for field in obj._data: if not fields or field in fields: try: @@ -733,9 +761,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): # i.e. obj.update(unset__field=1) followed by obj.reload() delattr(self, field) - self._changed_fields = list( - set(self._changed_fields) - set(fields) - ) if fields else obj._changed_fields + self._changed_fields = ( + list(set(self._changed_fields) - set(fields)) + if fields + else obj._changed_fields + ) self._created = False return self @@ -761,7 +791,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): """Returns an instance of :class:`~bson.dbref.DBRef` useful in `__raw__` queries.""" if self.pk is None: - msg = 'Only saved documents can have a valid dbref' + msg = "Only saved documents can have a valid dbref" raise OperationError(msg) return DBRef(self.__class__._get_collection_name(), self.pk) @@ -770,18 +800,22 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): """This method registers the delete rules to apply when removing this object. """ - classes = [get_document(class_name) - for class_name in cls._subclasses - if class_name != cls.__name__] + [cls] - documents = [get_document(class_name) - for class_name in document_cls._subclasses - if class_name != document_cls.__name__] + [document_cls] + classes = [ + get_document(class_name) + for class_name in cls._subclasses + if class_name != cls.__name__ + ] + [cls] + documents = [ + get_document(class_name) + for class_name in document_cls._subclasses + if class_name != document_cls.__name__ + ] + [document_cls] for klass in classes: for document_cls in documents: - delete_rules = klass._meta.get('delete_rules') or {} + delete_rules = klass._meta.get("delete_rules") or {} delete_rules[(document_cls, field_name)] = rule - klass._meta['delete_rules'] = delete_rules + klass._meta["delete_rules"] = delete_rules @classmethod def drop_collection(cls): @@ -796,8 +830,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): """ coll_name = cls._get_collection_name() if not coll_name: - raise OperationError('Document %s has no collection defined ' - '(is it abstract ?)' % cls) + raise OperationError( + "Document %s has no collection defined (is it abstract ?)" % cls + ) cls._collection = None db = cls._get_db() db.drop_collection(coll_name) @@ -813,19 +848,18 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): """ index_spec = cls._build_index_spec(keys) index_spec = index_spec.copy() - fields = index_spec.pop('fields') - drop_dups = kwargs.get('drop_dups', False) + fields = index_spec.pop("fields") + drop_dups = kwargs.get("drop_dups", False) if drop_dups: - msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' + msg = "drop_dups is deprecated and is removed when using PyMongo 3+." warnings.warn(msg, DeprecationWarning) - index_spec['background'] = background + index_spec["background"] = background index_spec.update(kwargs) return cls._get_collection().create_index(fields, **index_spec) @classmethod - def ensure_index(cls, key_or_list, drop_dups=False, background=False, - **kwargs): + def ensure_index(cls, key_or_list, drop_dups=False, background=False, **kwargs): """Ensure that the given indexes are in place. Deprecated in favour of create_index. @@ -837,7 +871,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): will be removed if PyMongo3+ is used """ if drop_dups: - msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' + msg = "drop_dups is deprecated and is removed when using PyMongo 3+." warnings.warn(msg, DeprecationWarning) return cls.create_index(key_or_list, background=background, **kwargs) @@ -850,12 +884,12 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): .. note:: You can disable automatic index creation by setting `auto_create_index` to False in the documents meta data """ - background = cls._meta.get('index_background', False) - drop_dups = cls._meta.get('index_drop_dups', False) - index_opts = cls._meta.get('index_opts') or {} - index_cls = cls._meta.get('index_cls', True) + background = cls._meta.get("index_background", False) + drop_dups = cls._meta.get("index_drop_dups", False) + index_opts = cls._meta.get("index_opts") or {} + index_cls = cls._meta.get("index_cls", True) if drop_dups: - msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' + msg = "drop_dups is deprecated and is removed when using PyMongo 3+." warnings.warn(msg, DeprecationWarning) collection = cls._get_collection() @@ -871,40 +905,39 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): cls_indexed = False # Ensure document-defined indexes are created - if cls._meta['index_specs']: - index_spec = cls._meta['index_specs'] + if cls._meta["index_specs"]: + index_spec = cls._meta["index_specs"] for spec in index_spec: spec = spec.copy() - fields = spec.pop('fields') + fields = spec.pop("fields") cls_indexed = cls_indexed or includes_cls(fields) opts = index_opts.copy() opts.update(spec) # we shouldn't pass 'cls' to the collection.ensureIndex options # because of https://jira.mongodb.org/browse/SERVER-769 - if 'cls' in opts: - del opts['cls'] + if "cls" in opts: + del opts["cls"] collection.create_index(fields, background=background, **opts) # If _cls is being used (for polymorphism), it needs an index, # only if another index doesn't begin with _cls - if index_cls and not cls_indexed and cls._meta.get('allow_inheritance'): + if index_cls and not cls_indexed and cls._meta.get("allow_inheritance"): # we shouldn't pass 'cls' to the collection.ensureIndex options # because of https://jira.mongodb.org/browse/SERVER-769 - if 'cls' in index_opts: - del index_opts['cls'] + if "cls" in index_opts: + del index_opts["cls"] - collection.create_index('_cls', background=background, - **index_opts) + collection.create_index("_cls", background=background, **index_opts) @classmethod def list_indexes(cls): """ Lists all of the indexes that should be created for given collection. It includes all the indexes from super- and sub-classes. """ - if cls._meta.get('abstract'): + if cls._meta.get("abstract"): return [] # get all the base classes, subclasses and siblings @@ -912,22 +945,27 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): def get_classes(cls): - if (cls not in classes and - isinstance(cls, TopLevelDocumentMetaclass)): + if cls not in classes and isinstance(cls, TopLevelDocumentMetaclass): classes.append(cls) for base_cls in cls.__bases__: - if (isinstance(base_cls, TopLevelDocumentMetaclass) and - base_cls != Document and - not base_cls._meta.get('abstract') and - base_cls._get_collection().full_name == cls._get_collection().full_name and - base_cls not in classes): + if ( + isinstance(base_cls, TopLevelDocumentMetaclass) + and base_cls != Document + and not base_cls._meta.get("abstract") + and base_cls._get_collection().full_name + == cls._get_collection().full_name + and base_cls not in classes + ): classes.append(base_cls) get_classes(base_cls) for subclass in cls.__subclasses__(): - if (isinstance(base_cls, TopLevelDocumentMetaclass) and - subclass._get_collection().full_name == cls._get_collection().full_name and - subclass not in classes): + if ( + isinstance(base_cls, TopLevelDocumentMetaclass) + and subclass._get_collection().full_name + == cls._get_collection().full_name + and subclass not in classes + ): classes.append(subclass) get_classes(subclass) @@ -937,11 +975,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): def get_indexes_spec(cls): indexes = [] - if cls._meta['index_specs']: - index_spec = cls._meta['index_specs'] + if cls._meta["index_specs"]: + index_spec = cls._meta["index_specs"] for spec in index_spec: spec = spec.copy() - fields = spec.pop('fields') + fields = spec.pop("fields") indexes.append(fields) return indexes @@ -952,10 +990,10 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): indexes.append(index) # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed - if [(u'_id', 1)] not in indexes: - indexes.append([(u'_id', 1)]) - if cls._meta.get('index_cls', True) and cls._meta.get('allow_inheritance'): - indexes.append([(u'_cls', 1)]) + if [(u"_id", 1)] not in indexes: + indexes.append([(u"_id", 1)]) + if cls._meta.get("index_cls", True) and cls._meta.get("allow_inheritance"): + indexes.append([(u"_cls", 1)]) return indexes @@ -969,27 +1007,26 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): existing = [] for info in cls._get_collection().index_information().values(): - if '_fts' in info['key'][0]: - index_type = info['key'][0][1] - text_index_fields = info.get('weights').keys() - existing.append( - [(key, index_type) for key in text_index_fields]) + if "_fts" in info["key"][0]: + index_type = info["key"][0][1] + text_index_fields = info.get("weights").keys() + existing.append([(key, index_type) for key in text_index_fields]) else: - existing.append(info['key']) + existing.append(info["key"]) missing = [index for index in required if index not in existing] extra = [index for index in existing if index not in required] # if { _cls: 1 } is missing, make sure it's *really* necessary - if [(u'_cls', 1)] in missing: + if [(u"_cls", 1)] in missing: cls_obsolete = False for index in existing: if includes_cls(index) and index not in extra: cls_obsolete = True break if cls_obsolete: - missing.remove([(u'_cls', 1)]) + missing.remove([(u"_cls", 1)]) - return {'missing': missing, 'extra': extra} + return {"missing": missing, "extra": extra} class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): @@ -1074,17 +1111,16 @@ class MapReduceDocument(object): """Lazy-load the object referenced by ``self.key``. ``self.key`` should be the ``primary_key``. """ - id_field = self._document()._meta['id_field'] + id_field = self._document()._meta["id_field"] id_field_type = type(id_field) if not isinstance(self.key, id_field_type): try: self.key = id_field_type(self.key) except Exception: - raise Exception('Could not cast key as %s' % - id_field_type.__name__) + raise Exception("Could not cast key as %s" % id_field_type.__name__) - if not hasattr(self, '_key_object'): + if not hasattr(self, "_key_object"): self._key_object = self._document.objects.with_id(self.key) return self._key_object return self._key_object diff --git a/mongoengine/errors.py b/mongoengine/errors.py index bea1d3dc..9852f2a1 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -3,10 +3,20 @@ from collections import defaultdict import six from six import iteritems -__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', - 'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', - 'OperationError', 'NotUniqueError', 'FieldDoesNotExist', - 'ValidationError', 'SaveConditionError', 'DeprecatedError') +__all__ = ( + "NotRegistered", + "InvalidDocumentError", + "LookUpError", + "DoesNotExist", + "MultipleObjectsReturned", + "InvalidQueryError", + "OperationError", + "NotUniqueError", + "FieldDoesNotExist", + "ValidationError", + "SaveConditionError", + "DeprecatedError", +) class NotRegistered(Exception): @@ -71,25 +81,25 @@ class ValidationError(AssertionError): field_name = None _message = None - def __init__(self, message='', **kwargs): + def __init__(self, message="", **kwargs): super(ValidationError, self).__init__(message) - self.errors = kwargs.get('errors', {}) - self.field_name = kwargs.get('field_name') + self.errors = kwargs.get("errors", {}) + self.field_name = kwargs.get("field_name") self.message = message def __str__(self): return six.text_type(self.message) def __repr__(self): - return '%s(%s,)' % (self.__class__.__name__, self.message) + return "%s(%s,)" % (self.__class__.__name__, self.message) def __getattribute__(self, name): message = super(ValidationError, self).__getattribute__(name) - if name == 'message': + if name == "message": if self.field_name: - message = '%s' % message + message = "%s" % message if self.errors: - message = '%s(%s)' % (message, self._format_errors()) + message = "%s(%s)" % (message, self._format_errors()) return message def _get_message(self): @@ -128,22 +138,22 @@ class ValidationError(AssertionError): def _format_errors(self): """Returns a string listing all errors within a document""" - def generate_key(value, prefix=''): + def generate_key(value, prefix=""): if isinstance(value, list): - value = ' '.join([generate_key(k) for k in value]) + value = " ".join([generate_key(k) for k in value]) elif isinstance(value, dict): - value = ' '.join( - [generate_key(v, k) for k, v in iteritems(value)]) + value = " ".join([generate_key(v, k) for k, v in iteritems(value)]) - results = '%s.%s' % (prefix, value) if prefix else value + results = "%s.%s" % (prefix, value) if prefix else value return results error_dict = defaultdict(list) for k, v in iteritems(self.to_dict()): error_dict[generate_key(v)].append(k) - return ' '.join(['%s: %s' % (k, v) for k, v in iteritems(error_dict)]) + return " ".join(["%s: %s" % (k, v) for k, v in iteritems(error_dict)]) class DeprecatedError(Exception): """Raise when a user uses a feature that has been Deprecated""" + pass diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 2a4a2ad8..7ab2276d 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -27,9 +27,15 @@ except ImportError: Int64 = long -from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField, - GeoJsonBaseField, LazyReference, ObjectIdField, - get_document) +from mongoengine.base import ( + BaseDocument, + BaseField, + ComplexBaseField, + GeoJsonBaseField, + LazyReference, + ObjectIdField, + get_document, +) from mongoengine.base.utils import LazyRegexCompiler from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db @@ -53,21 +59,51 @@ if six.PY3: __all__ = ( - 'StringField', 'URLField', 'EmailField', 'IntField', 'LongField', - 'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField', 'DateField', - 'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField', - 'GenericEmbeddedDocumentField', 'DynamicField', 'ListField', - 'SortedListField', 'EmbeddedDocumentListField', 'DictField', - 'MapField', 'ReferenceField', 'CachedReferenceField', - 'LazyReferenceField', 'GenericLazyReferenceField', - 'GenericReferenceField', 'BinaryField', 'GridFSError', 'GridFSProxy', - 'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField', - 'GeoPointField', 'PointField', 'LineStringField', 'PolygonField', - 'SequenceField', 'UUIDField', 'MultiPointField', 'MultiLineStringField', - 'MultiPolygonField', 'GeoJsonBaseField' + "StringField", + "URLField", + "EmailField", + "IntField", + "LongField", + "FloatField", + "DecimalField", + "BooleanField", + "DateTimeField", + "DateField", + "ComplexDateTimeField", + "EmbeddedDocumentField", + "ObjectIdField", + "GenericEmbeddedDocumentField", + "DynamicField", + "ListField", + "SortedListField", + "EmbeddedDocumentListField", + "DictField", + "MapField", + "ReferenceField", + "CachedReferenceField", + "LazyReferenceField", + "GenericLazyReferenceField", + "GenericReferenceField", + "BinaryField", + "GridFSError", + "GridFSProxy", + "FileField", + "ImageGridFsProxy", + "ImproperlyConfigured", + "ImageField", + "GeoPointField", + "PointField", + "LineStringField", + "PolygonField", + "SequenceField", + "UUIDField", + "MultiPointField", + "MultiLineStringField", + "MultiPolygonField", + "GeoJsonBaseField", ) -RECURSIVE_REFERENCE_CONSTANT = 'self' +RECURSIVE_REFERENCE_CONSTANT = "self" class StringField(BaseField): @@ -83,23 +119,23 @@ class StringField(BaseField): if isinstance(value, six.text_type): return value try: - value = value.decode('utf-8') + value = value.decode("utf-8") except Exception: pass return value def validate(self, value): if not isinstance(value, six.string_types): - self.error('StringField only accepts string values') + self.error("StringField only accepts string values") if self.max_length is not None and len(value) > self.max_length: - self.error('String value is too long') + self.error("String value is too long") if self.min_length is not None and len(value) < self.min_length: - self.error('String value is too short') + self.error("String value is too short") if self.regex is not None and self.regex.match(value) is None: - self.error('String value did not match validation regex') + self.error("String value did not match validation regex") def lookup_member(self, member_name): return None @@ -109,18 +145,18 @@ class StringField(BaseField): return value if op in STRING_OPERATORS: - case_insensitive = op.startswith('i') - op = op.lstrip('i') + case_insensitive = op.startswith("i") + op = op.lstrip("i") flags = re.IGNORECASE if case_insensitive else 0 - regex = r'%s' - if op == 'startswith': - regex = r'^%s' - elif op == 'endswith': - regex = r'%s$' - elif op == 'exact': - regex = r'^%s$' + regex = r"%s" + if op == "startswith": + regex = r"^%s" + elif op == "endswith": + regex = r"%s$" + elif op == "exact": + regex = r"^%s$" # escape unsafe characters which could lead to a re.error value = re.escape(value) @@ -135,14 +171,16 @@ class URLField(StringField): """ _URL_REGEX = LazyRegexCompiler( - r'^(?:[a-z0-9\.\-]*)://' # scheme is validated separately - r'(?:(?:[A-Z0-9](?:[A-Z0-9-_]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}(? self.max_value: - self.error('Integer value is too large') + self.error("Integer value is too large") def prepare_query_value(self, op, value): if value is None: @@ -319,13 +365,13 @@ class LongField(BaseField): try: value = long(value) except (TypeError, ValueError): - self.error('%s could not be converted to long' % value) + self.error("%s could not be converted to long" % value) if self.min_value is not None and value < self.min_value: - self.error('Long value is too small') + self.error("Long value is too small") if self.max_value is not None and value > self.max_value: - self.error('Long value is too large') + self.error("Long value is too large") def prepare_query_value(self, op, value): if value is None: @@ -353,16 +399,16 @@ class FloatField(BaseField): try: value = float(value) except OverflowError: - self.error('The value is too large to be converted to float') + self.error("The value is too large to be converted to float") if not isinstance(value, float): - self.error('FloatField only accepts float and integer values') + self.error("FloatField only accepts float and integer values") if self.min_value is not None and value < self.min_value: - self.error('Float value is too small') + self.error("Float value is too small") if self.max_value is not None and value > self.max_value: - self.error('Float value is too large') + self.error("Float value is too large") def prepare_query_value(self, op, value): if value is None: @@ -379,8 +425,15 @@ class DecimalField(BaseField): .. versionadded:: 0.3 """ - def __init__(self, min_value=None, max_value=None, force_string=False, - precision=2, rounding=decimal.ROUND_HALF_UP, **kwargs): + def __init__( + self, + min_value=None, + max_value=None, + force_string=False, + precision=2, + rounding=decimal.ROUND_HALF_UP, + **kwargs + ): """ :param min_value: Validation rule for the minimum acceptable value. :param max_value: Validation rule for the maximum acceptable value. @@ -416,10 +469,12 @@ class DecimalField(BaseField): # Convert to string for python 2.6 before casting to Decimal try: - value = decimal.Decimal('%s' % value) + value = decimal.Decimal("%s" % value) except (TypeError, ValueError, decimal.InvalidOperation): return value - return value.quantize(decimal.Decimal('.%s' % ('0' * self.precision)), rounding=self.rounding) + return value.quantize( + decimal.Decimal(".%s" % ("0" * self.precision)), rounding=self.rounding + ) def to_mongo(self, value): if value is None: @@ -435,13 +490,13 @@ class DecimalField(BaseField): try: value = decimal.Decimal(value) except (TypeError, ValueError, decimal.InvalidOperation) as exc: - self.error('Could not convert value to decimal: %s' % exc) + self.error("Could not convert value to decimal: %s" % exc) if self.min_value is not None and value < self.min_value: - self.error('Decimal value is too small') + self.error("Decimal value is too small") if self.max_value is not None and value > self.max_value: - self.error('Decimal value is too large') + self.error("Decimal value is too large") def prepare_query_value(self, op, value): return super(DecimalField, self).prepare_query_value(op, self.to_mongo(value)) @@ -462,7 +517,7 @@ class BooleanField(BaseField): def validate(self, value): if not isinstance(value, bool): - self.error('BooleanField only accepts boolean values') + self.error("BooleanField only accepts boolean values") class DateTimeField(BaseField): @@ -514,26 +569,29 @@ class DateTimeField(BaseField): return None # split usecs, because they are not recognized by strptime. - if '.' in value: + if "." in value: try: - value, usecs = value.split('.') + value, usecs = value.split(".") usecs = int(usecs) except ValueError: return None else: usecs = 0 - kwargs = {'microsecond': usecs} + kwargs = {"microsecond": usecs} try: # Seconds are optional, so try converting seconds first. - return datetime.datetime(*time.strptime(value, - '%Y-%m-%d %H:%M:%S')[:6], **kwargs) + return datetime.datetime( + *time.strptime(value, "%Y-%m-%d %H:%M:%S")[:6], **kwargs + ) except ValueError: try: # Try without seconds. - return datetime.datetime(*time.strptime(value, - '%Y-%m-%d %H:%M')[:5], **kwargs) + return datetime.datetime( + *time.strptime(value, "%Y-%m-%d %H:%M")[:5], **kwargs + ) except ValueError: # Try without hour/minutes/seconds. try: - return datetime.datetime(*time.strptime(value, - '%Y-%m-%d')[:3], **kwargs) + return datetime.datetime( + *time.strptime(value, "%Y-%m-%d")[:3], **kwargs + ) except ValueError: return None @@ -578,12 +636,12 @@ class ComplexDateTimeField(StringField): .. versionadded:: 0.5 """ - def __init__(self, separator=',', **kwargs): + def __init__(self, separator=",", **kwargs): """ :param separator: Allows to customize the separator used for storage (default ``,``) """ self.separator = separator - self.format = separator.join(['%Y', '%m', '%d', '%H', '%M', '%S', '%f']) + self.format = separator.join(["%Y", "%m", "%d", "%H", "%M", "%S", "%f"]) super(ComplexDateTimeField, self).__init__(**kwargs) def _convert_from_datetime(self, val): @@ -630,8 +688,7 @@ class ComplexDateTimeField(StringField): def validate(self, value): value = self.to_python(value) if not isinstance(value, datetime.datetime): - self.error('Only datetime objects may used in a ' - 'ComplexDateTimeField') + self.error("Only datetime objects may used in a ComplexDateTimeField") def to_python(self, value): original_value = value @@ -645,7 +702,9 @@ class ComplexDateTimeField(StringField): return self._convert_from_datetime(value) def prepare_query_value(self, op, value): - return super(ComplexDateTimeField, self).prepare_query_value(op, self._convert_from_datetime(value)) + return super(ComplexDateTimeField, self).prepare_query_value( + op, self._convert_from_datetime(value) + ) class EmbeddedDocumentField(BaseField): @@ -656,11 +715,13 @@ class EmbeddedDocumentField(BaseField): def __init__(self, document_type, **kwargs): # XXX ValidationError raised outside of the "validate" method. if not ( - isinstance(document_type, six.string_types) or - issubclass(document_type, EmbeddedDocument) + isinstance(document_type, six.string_types) + or issubclass(document_type, EmbeddedDocument) ): - self.error('Invalid embedded document class provided to an ' - 'EmbeddedDocumentField') + self.error( + "Invalid embedded document class provided to an " + "EmbeddedDocumentField" + ) self.document_type_obj = document_type super(EmbeddedDocumentField, self).__init__(**kwargs) @@ -676,15 +737,19 @@ class EmbeddedDocumentField(BaseField): if not issubclass(resolved_document_type, EmbeddedDocument): # Due to the late resolution of the document_type # There is a chance that it won't be an EmbeddedDocument (#1661) - self.error('Invalid embedded document class provided to an ' - 'EmbeddedDocumentField') + self.error( + "Invalid embedded document class provided to an " + "EmbeddedDocumentField" + ) self.document_type_obj = resolved_document_type return self.document_type_obj def to_python(self, value): if not isinstance(value, self.document_type): - return self.document_type._from_son(value, _auto_dereference=self._auto_dereference) + return self.document_type._from_son( + value, _auto_dereference=self._auto_dereference + ) return value def to_mongo(self, value, use_db_field=True, fields=None): @@ -698,8 +763,10 @@ class EmbeddedDocumentField(BaseField): """ # Using isinstance also works for subclasses of self.document if not isinstance(value, self.document_type): - self.error('Invalid embedded document instance provided to an ' - 'EmbeddedDocumentField') + self.error( + "Invalid embedded document instance provided to an " + "EmbeddedDocumentField" + ) self.document_type.validate(value, clean) def lookup_member(self, member_name): @@ -714,8 +781,10 @@ class EmbeddedDocumentField(BaseField): try: value = self.document_type._from_son(value) except ValueError: - raise InvalidQueryError("Querying the embedded document '%s' failed, due to an invalid query value" % - (self.document_type._class_name,)) + raise InvalidQueryError( + "Querying the embedded document '%s' failed, due to an invalid query value" + % (self.document_type._class_name,) + ) super(EmbeddedDocumentField, self).prepare_query_value(op, value) return self.to_mongo(value) @@ -732,11 +801,13 @@ class GenericEmbeddedDocumentField(BaseField): """ def prepare_query_value(self, op, value): - return super(GenericEmbeddedDocumentField, self).prepare_query_value(op, self.to_mongo(value)) + return super(GenericEmbeddedDocumentField, self).prepare_query_value( + op, self.to_mongo(value) + ) def to_python(self, value): if isinstance(value, dict): - doc_cls = get_document(value['_cls']) + doc_cls = get_document(value["_cls"]) value = doc_cls._from_son(value) return value @@ -744,12 +815,14 @@ class GenericEmbeddedDocumentField(BaseField): def validate(self, value, clean=True): if self.choices and isinstance(value, SON): for choice in self.choices: - if value['_cls'] == choice._class_name: + if value["_cls"] == choice._class_name: return True if not isinstance(value, EmbeddedDocument): - self.error('Invalid embedded document instance provided to an ' - 'GenericEmbeddedDocumentField') + self.error( + "Invalid embedded document instance provided to an " + "GenericEmbeddedDocumentField" + ) value.validate(clean=clean) @@ -766,8 +839,8 @@ class GenericEmbeddedDocumentField(BaseField): if document is None: return None data = document.to_mongo(use_db_field, fields) - if '_cls' not in data: - data['_cls'] = document._class_name + if "_cls" not in data: + data["_cls"] = document._class_name return data @@ -784,21 +857,21 @@ class DynamicField(BaseField): if isinstance(value, six.string_types): return value - if hasattr(value, 'to_mongo'): + if hasattr(value, "to_mongo"): cls = value.__class__ val = value.to_mongo(use_db_field, fields) # If we its a document thats not inherited add _cls if isinstance(value, Document): - val = {'_ref': value.to_dbref(), '_cls': cls.__name__} + val = {"_ref": value.to_dbref(), "_cls": cls.__name__} if isinstance(value, EmbeddedDocument): - val['_cls'] = cls.__name__ + val["_cls"] = cls.__name__ return val if not isinstance(value, (dict, list, tuple)): return value is_list = False - if not hasattr(value, 'items'): + if not hasattr(value, "items"): is_list = True value = {k: v for k, v in enumerate(value)} @@ -812,10 +885,10 @@ class DynamicField(BaseField): return value def to_python(self, value): - if isinstance(value, dict) and '_cls' in value: - doc_cls = get_document(value['_cls']) - if '_ref' in value: - value = doc_cls._get_db().dereference(value['_ref']) + if isinstance(value, dict) and "_cls" in value: + doc_cls = get_document(value["_cls"]) + if "_ref" in value: + value = doc_cls._get_db().dereference(value["_ref"]) return doc_cls._from_son(value) return super(DynamicField, self).to_python(value) @@ -829,7 +902,7 @@ class DynamicField(BaseField): return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value)) def validate(self, value, clean=True): - if hasattr(value, 'validate'): + if hasattr(value, "validate"): value.validate(clean=clean) @@ -845,7 +918,7 @@ class ListField(ComplexBaseField): def __init__(self, field=None, **kwargs): self.field = field - kwargs.setdefault('default', lambda: []) + kwargs.setdefault("default", lambda: []) super(ListField, self).__init__(**kwargs) def __get__(self, instance, owner): @@ -853,16 +926,19 @@ class ListField(ComplexBaseField): # Document class being used rather than a document object return self value = instance._data.get(self.name) - LazyReferenceField = _import_class('LazyReferenceField') - GenericLazyReferenceField = _import_class('GenericLazyReferenceField') - if isinstance(self.field, (LazyReferenceField, GenericLazyReferenceField)) and value: + LazyReferenceField = _import_class("LazyReferenceField") + GenericLazyReferenceField = _import_class("GenericLazyReferenceField") + if ( + isinstance(self.field, (LazyReferenceField, GenericLazyReferenceField)) + and value + ): instance._data[self.name] = [self.field.build_lazyref(x) for x in value] return super(ListField, self).__get__(instance, owner) def validate(self, value): """Make sure that a list of valid fields is being used.""" if not isinstance(value, (list, tuple, BaseQuerySet)): - self.error('Only lists and tuples may be used in a list field') + self.error("Only lists and tuples may be used in a list field") super(ListField, self).validate(value) def prepare_query_value(self, op, value): @@ -871,10 +947,10 @@ class ListField(ComplexBaseField): # If the value is iterable and it's not a string nor a # BaseDocument, call prepare_query_value for each of its items. if ( - op in ('set', 'unset', None) and - hasattr(value, '__iter__') and - not isinstance(value, six.string_types) and - not isinstance(value, BaseDocument) + op in ("set", "unset", None) + and hasattr(value, "__iter__") + and not isinstance(value, six.string_types) + and not isinstance(value, BaseDocument) ): return [self.field.prepare_query_value(op, v) for v in value] @@ -925,17 +1001,18 @@ class SortedListField(ListField): _order_reverse = False def __init__(self, field, **kwargs): - if 'ordering' in kwargs.keys(): - self._ordering = kwargs.pop('ordering') - if 'reverse' in kwargs.keys(): - self._order_reverse = kwargs.pop('reverse') + if "ordering" in kwargs.keys(): + self._ordering = kwargs.pop("ordering") + if "reverse" in kwargs.keys(): + self._order_reverse = kwargs.pop("reverse") super(SortedListField, self).__init__(field, **kwargs) def to_mongo(self, value, use_db_field=True, fields=None): value = super(SortedListField, self).to_mongo(value, use_db_field, fields) if self._ordering is not None: - return sorted(value, key=itemgetter(self._ordering), - reverse=self._order_reverse) + return sorted( + value, key=itemgetter(self._ordering), reverse=self._order_reverse + ) return sorted(value, reverse=self._order_reverse) @@ -944,7 +1021,9 @@ def key_not_string(d): dictionary is not a string. """ for k, v in d.items(): - if not isinstance(k, six.string_types) or (isinstance(v, dict) and key_not_string(v)): + if not isinstance(k, six.string_types) or ( + isinstance(v, dict) and key_not_string(v) + ): return True @@ -953,7 +1032,9 @@ def key_has_dot_or_dollar(d): dictionary contains a dot or a dollar sign. """ for k, v in d.items(): - if ('.' in k or k.startswith('$')) or (isinstance(v, dict) and key_has_dot_or_dollar(v)): + if ("." in k or k.startswith("$")) or ( + isinstance(v, dict) and key_has_dot_or_dollar(v) + ): return True @@ -972,39 +1053,48 @@ class DictField(ComplexBaseField): self.field = field self._auto_dereference = False - kwargs.setdefault('default', lambda: {}) + kwargs.setdefault("default", lambda: {}) super(DictField, self).__init__(*args, **kwargs) def validate(self, value): """Make sure that a list of valid fields is being used.""" if not isinstance(value, dict): - self.error('Only dictionaries may be used in a DictField') + self.error("Only dictionaries may be used in a DictField") if key_not_string(value): - msg = ('Invalid dictionary key - documents must ' - 'have only string keys') + msg = "Invalid dictionary key - documents must have only string keys" self.error(msg) if key_has_dot_or_dollar(value): - self.error('Invalid dictionary key name - keys may not contain "."' - ' or startswith "$" characters') + self.error( + 'Invalid dictionary key name - keys may not contain "."' + ' or startswith "$" characters' + ) super(DictField, self).validate(value) def lookup_member(self, member_name): return DictField(db_field=member_name) def prepare_query_value(self, op, value): - match_operators = ['contains', 'icontains', 'startswith', - 'istartswith', 'endswith', 'iendswith', - 'exact', 'iexact'] + match_operators = [ + "contains", + "icontains", + "startswith", + "istartswith", + "endswith", + "iendswith", + "exact", + "iexact", + ] if op in match_operators and isinstance(value, six.string_types): return StringField().prepare_query_value(op, value) - if hasattr(self.field, 'field'): # Used for instance when using DictField(ListField(IntField())) - if op in ('set', 'unset') and isinstance(value, dict): + if hasattr( + self.field, "field" + ): # Used for instance when using DictField(ListField(IntField())) + if op in ("set", "unset") and isinstance(value, dict): return { - k: self.field.prepare_query_value(op, v) - for k, v in value.items() + k: self.field.prepare_query_value(op, v) for k, v in value.items() } return self.field.prepare_query_value(op, value) @@ -1022,8 +1112,7 @@ class MapField(DictField): def __init__(self, field=None, *args, **kwargs): # XXX ValidationError raised outside of the "validate" method. if not isinstance(field, BaseField): - self.error('Argument to MapField constructor must be a valid ' - 'field') + self.error("Argument to MapField constructor must be a valid field") super(MapField, self).__init__(field=field, *args, **kwargs) @@ -1069,8 +1158,9 @@ class ReferenceField(BaseField): .. versionchanged:: 0.5 added `reverse_delete_rule` """ - def __init__(self, document_type, dbref=False, - reverse_delete_rule=DO_NOTHING, **kwargs): + def __init__( + self, document_type, dbref=False, reverse_delete_rule=DO_NOTHING, **kwargs + ): """Initialises the Reference Field. :param dbref: Store the reference as :class:`~pymongo.dbref.DBRef` @@ -1083,12 +1173,13 @@ class ReferenceField(BaseField): :class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`. """ # XXX ValidationError raised outside of the "validate" method. - if ( - not isinstance(document_type, six.string_types) and - not issubclass(document_type, Document) + if not isinstance(document_type, six.string_types) and not issubclass( + document_type, Document ): - self.error('Argument to ReferenceField constructor must be a ' - 'document class or a string') + self.error( + "Argument to ReferenceField constructor must be a " + "document class or a string" + ) self.dbref = dbref self.document_type_obj = document_type @@ -1115,14 +1206,14 @@ class ReferenceField(BaseField): auto_dereference = instance._fields[self.name]._auto_dereference # Dereference DBRefs if auto_dereference and isinstance(value, DBRef): - if hasattr(value, 'cls'): + if hasattr(value, "cls"): # Dereference using the class type specified in the reference cls = get_document(value.cls) else: cls = self.document_type dereferenced = cls._get_db().dereference(value) if dereferenced is None: - raise DoesNotExist('Trying to dereference unknown document %s' % value) + raise DoesNotExist("Trying to dereference unknown document %s" % value) else: instance._data[self.name] = cls._from_son(dereferenced) @@ -1140,8 +1231,10 @@ class ReferenceField(BaseField): # XXX ValidationError raised outside of the "validate" method. if id_ is None: - self.error('You can only reference documents once they have' - ' been saved to the database') + self.error( + "You can only reference documents once they have" + " been saved to the database" + ) # Use the attributes from the document instance, so that they # override the attributes of this field's document type @@ -1150,11 +1243,11 @@ class ReferenceField(BaseField): id_ = document cls = self.document_type - id_field_name = cls._meta['id_field'] + id_field_name = cls._meta["id_field"] id_field = cls._fields[id_field_name] id_ = id_field.to_mongo(id_) - if self.document_type._meta.get('abstract'): + if self.document_type._meta.get("abstract"): collection = cls._get_collection_name() return DBRef(collection, id_, cls=cls._class_name) elif self.dbref: @@ -1165,8 +1258,9 @@ class ReferenceField(BaseField): def to_python(self, value): """Convert a MongoDB-compatible type to a Python type.""" - if (not self.dbref and - not isinstance(value, (DBRef, Document, EmbeddedDocument))): + if not self.dbref and not isinstance( + value, (DBRef, Document, EmbeddedDocument) + ): collection = self.document_type._get_collection_name() value = DBRef(collection, self.document_type.id.to_python(value)) return value @@ -1179,11 +1273,15 @@ class ReferenceField(BaseField): def validate(self, value): if not isinstance(value, (self.document_type, LazyReference, DBRef, ObjectId)): - self.error('A ReferenceField only accepts DBRef, LazyReference, ObjectId or documents') + self.error( + "A ReferenceField only accepts DBRef, LazyReference, ObjectId or documents" + ) if isinstance(value, Document) and value.id is None: - self.error('You can only reference documents once they have been ' - 'saved to the database') + self.error( + "You can only reference documents once they have been " + "saved to the database" + ) def lookup_member(self, member_name): return self.document_type._fields.get(member_name) @@ -1206,12 +1304,13 @@ class CachedReferenceField(BaseField): fields = [] # XXX ValidationError raised outside of the "validate" method. - if ( - not isinstance(document_type, six.string_types) and - not issubclass(document_type, Document) + if not isinstance(document_type, six.string_types) and not issubclass( + document_type, Document ): - self.error('Argument to CachedReferenceField constructor must be a' - ' document class or a string') + self.error( + "Argument to CachedReferenceField constructor must be a" + " document class or a string" + ) self.auto_sync = auto_sync self.document_type_obj = document_type @@ -1221,15 +1320,14 @@ class CachedReferenceField(BaseField): def start_listener(self): from mongoengine import signals - signals.post_save.connect(self.on_document_pre_save, - sender=self.document_type) + signals.post_save.connect(self.on_document_pre_save, sender=self.document_type) def on_document_pre_save(self, sender, document, created, **kwargs): if created: return None update_kwargs = { - 'set__%s__%s' % (self.name, key): val + "set__%s__%s" % (self.name, key): val for key, val in document._delta()[0].items() if key in self.fields } @@ -1237,15 +1335,15 @@ class CachedReferenceField(BaseField): filter_kwargs = {} filter_kwargs[self.name] = document - self.owner_document.objects( - **filter_kwargs).update(**update_kwargs) + self.owner_document.objects(**filter_kwargs).update(**update_kwargs) def to_python(self, value): if isinstance(value, dict): collection = self.document_type._get_collection_name() - value = DBRef( - collection, self.document_type.id.to_python(value['_id'])) - return self.document_type._from_son(self.document_type._get_db().dereference(value)) + value = DBRef(collection, self.document_type.id.to_python(value["_id"])) + return self.document_type._from_son( + self.document_type._get_db().dereference(value) + ) return value @@ -1271,14 +1369,14 @@ class CachedReferenceField(BaseField): if auto_dereference and isinstance(value, DBRef): dereferenced = self.document_type._get_db().dereference(value) if dereferenced is None: - raise DoesNotExist('Trying to dereference unknown document %s' % value) + raise DoesNotExist("Trying to dereference unknown document %s" % value) else: instance._data[self.name] = self.document_type._from_son(dereferenced) return super(CachedReferenceField, self).__get__(instance, owner) def to_mongo(self, document, use_db_field=True, fields=None): - id_field_name = self.document_type._meta['id_field'] + id_field_name = self.document_type._meta["id_field"] id_field = self.document_type._fields[id_field_name] # XXX ValidationError raised outside of the "validate" method. @@ -1286,14 +1384,14 @@ class CachedReferenceField(BaseField): # We need the id from the saved object to create the DBRef id_ = document.pk if id_ is None: - self.error('You can only reference documents once they have' - ' been saved to the database') + self.error( + "You can only reference documents once they have" + " been saved to the database" + ) else: - self.error('Only accept a document object') + self.error("Only accept a document object") - value = SON(( - ('_id', id_field.to_mongo(id_)), - )) + value = SON((("_id", id_field.to_mongo(id_)),)) if fields: new_fields = [f for f in self.fields if f in fields] @@ -1310,9 +1408,11 @@ class CachedReferenceField(BaseField): # XXX ValidationError raised outside of the "validate" method. if isinstance(value, Document): if value.pk is None: - self.error('You can only reference documents once they have' - ' been saved to the database') - value_dict = {'_id': value.pk} + self.error( + "You can only reference documents once they have" + " been saved to the database" + ) + value_dict = {"_id": value.pk} for field in self.fields: value_dict.update({field: value[field]}) @@ -1322,11 +1422,13 @@ class CachedReferenceField(BaseField): def validate(self, value): if not isinstance(value, self.document_type): - self.error('A CachedReferenceField only accepts documents') + self.error("A CachedReferenceField only accepts documents") if isinstance(value, Document) and value.id is None: - self.error('You can only reference documents once they have been ' - 'saved to the database') + self.error( + "You can only reference documents once they have been " + "saved to the database" + ) def lookup_member(self, member_name): return self.document_type._fields.get(member_name) @@ -1336,7 +1438,7 @@ class CachedReferenceField(BaseField): Sync all cached fields on demand. Caution: this operation may be slower. """ - update_key = 'set__%s' % self.name + update_key = "set__%s" % self.name for doc in self.document_type.objects: filter_kwargs = {} @@ -1345,8 +1447,7 @@ class CachedReferenceField(BaseField): update_kwargs = {} update_kwargs[update_key] = doc - self.owner_document.objects( - **filter_kwargs).update(**update_kwargs) + self.owner_document.objects(**filter_kwargs).update(**update_kwargs) class GenericReferenceField(BaseField): @@ -1370,7 +1471,7 @@ class GenericReferenceField(BaseField): """ def __init__(self, *args, **kwargs): - choices = kwargs.pop('choices', None) + choices = kwargs.pop("choices", None) super(GenericReferenceField, self).__init__(*args, **kwargs) self.choices = [] # Keep the choices as a list of allowed Document class names @@ -1383,14 +1484,16 @@ class GenericReferenceField(BaseField): else: # XXX ValidationError raised outside of the "validate" # method. - self.error('Invalid choices provided: must be a list of' - 'Document subclasses and/or six.string_typess') + self.error( + "Invalid choices provided: must be a list of" + "Document subclasses and/or six.string_typess" + ) def _validate_choices(self, value): if isinstance(value, dict): # If the field has not been dereferenced, it is still a dict # of class and DBRef - value = value.get('_cls') + value = value.get("_cls") elif isinstance(value, Document): value = value._class_name super(GenericReferenceField, self)._validate_choices(value) @@ -1405,7 +1508,7 @@ class GenericReferenceField(BaseField): if auto_dereference and isinstance(value, (dict, SON)): dereferenced = self.dereference(value) if dereferenced is None: - raise DoesNotExist('Trying to dereference unknown document %s' % value) + raise DoesNotExist("Trying to dereference unknown document %s" % value) else: instance._data[self.name] = dereferenced @@ -1413,20 +1516,22 @@ class GenericReferenceField(BaseField): def validate(self, value): if not isinstance(value, (Document, DBRef, dict, SON)): - self.error('GenericReferences can only contain documents') + self.error("GenericReferences can only contain documents") if isinstance(value, (dict, SON)): - if '_ref' not in value or '_cls' not in value: - self.error('GenericReferences can only contain documents') + if "_ref" not in value or "_cls" not in value: + self.error("GenericReferences can only contain documents") # We need the id from the saved object to create the DBRef elif isinstance(value, Document) and value.id is None: - self.error('You can only reference documents once they have been' - ' saved to the database') + self.error( + "You can only reference documents once they have been" + " saved to the database" + ) def dereference(self, value): - doc_cls = get_document(value['_cls']) - reference = value['_ref'] + doc_cls = get_document(value["_cls"]) + reference = value["_ref"] doc = doc_cls._get_db().dereference(reference) if doc is not None: doc = doc_cls._from_son(doc) @@ -1439,7 +1544,7 @@ class GenericReferenceField(BaseField): if isinstance(document, (dict, SON, ObjectId, DBRef)): return document - id_field_name = document.__class__._meta['id_field'] + id_field_name = document.__class__._meta["id_field"] id_field = document.__class__._fields[id_field_name] if isinstance(document, Document): @@ -1447,18 +1552,17 @@ class GenericReferenceField(BaseField): id_ = document.id if id_ is None: # XXX ValidationError raised outside of the "validate" method. - self.error('You can only reference documents once they have' - ' been saved to the database') + self.error( + "You can only reference documents once they have" + " been saved to the database" + ) else: id_ = document id_ = id_field.to_mongo(id_) collection = document._get_collection_name() ref = DBRef(collection, id_) - return SON(( - ('_cls', document._class_name), - ('_ref', ref) - )) + return SON((("_cls", document._class_name), ("_ref", ref))) def prepare_query_value(self, op, value): if value is None: @@ -1485,18 +1589,18 @@ class BinaryField(BaseField): def validate(self, value): if not isinstance(value, (six.binary_type, Binary)): - self.error('BinaryField only accepts instances of ' - '(%s, %s, Binary)' % ( - six.binary_type.__name__, Binary.__name__)) + self.error( + "BinaryField only accepts instances of " + "(%s, %s, Binary)" % (six.binary_type.__name__, Binary.__name__) + ) if self.max_bytes is not None and len(value) > self.max_bytes: - self.error('Binary value is too long') + self.error("Binary value is too long") def prepare_query_value(self, op, value): if value is None: return value - return super(BinaryField, self).prepare_query_value( - op, self.to_mongo(value)) + return super(BinaryField, self).prepare_query_value(op, self.to_mongo(value)) class GridFSError(Exception): @@ -1513,10 +1617,14 @@ class GridFSProxy(object): _fs = None - def __init__(self, grid_id=None, key=None, - instance=None, - db_alias=DEFAULT_CONNECTION_NAME, - collection_name='fs'): + def __init__( + self, + grid_id=None, + key=None, + instance=None, + db_alias=DEFAULT_CONNECTION_NAME, + collection_name="fs", + ): self.grid_id = grid_id # Store GridFS id for file self.key = key self.instance = instance @@ -1526,8 +1634,16 @@ class GridFSProxy(object): self.gridout = None def __getattr__(self, name): - attrs = ('_fs', 'grid_id', 'key', 'instance', 'db_alias', - 'collection_name', 'newfile', 'gridout') + attrs = ( + "_fs", + "grid_id", + "key", + "instance", + "db_alias", + "collection_name", + "newfile", + "gridout", + ) if name in attrs: return self.__getattribute__(name) obj = self.get() @@ -1545,7 +1661,7 @@ class GridFSProxy(object): def __getstate__(self): self_dict = self.__dict__ - self_dict['_fs'] = None + self_dict["_fs"] = None return self_dict def __copy__(self): @@ -1557,18 +1673,20 @@ class GridFSProxy(object): return self.__copy__() def __repr__(self): - return '<%s: %s>' % (self.__class__.__name__, self.grid_id) + return "<%s: %s>" % (self.__class__.__name__, self.grid_id) def __str__(self): gridout = self.get() - filename = getattr(gridout, 'filename') if gridout else '' - return '<%s: %s (%s)>' % (self.__class__.__name__, filename, self.grid_id) + filename = getattr(gridout, "filename") if gridout else "" + return "<%s: %s (%s)>" % (self.__class__.__name__, filename, self.grid_id) def __eq__(self, other): if isinstance(other, GridFSProxy): - return ((self.grid_id == other.grid_id) and - (self.collection_name == other.collection_name) and - (self.db_alias == other.db_alias)) + return ( + (self.grid_id == other.grid_id) + and (self.collection_name == other.collection_name) + and (self.db_alias == other.db_alias) + ) else: return False @@ -1578,8 +1696,7 @@ class GridFSProxy(object): @property def fs(self): if not self._fs: - self._fs = gridfs.GridFS( - get_db(self.db_alias), self.collection_name) + self._fs = gridfs.GridFS(get_db(self.db_alias), self.collection_name) return self._fs def get(self, grid_id=None): @@ -1604,16 +1721,20 @@ class GridFSProxy(object): def put(self, file_obj, **kwargs): if self.grid_id: - raise GridFSError('This document already has a file. Either delete ' - 'it or call replace to overwrite it') + raise GridFSError( + "This document already has a file. Either delete " + "it or call replace to overwrite it" + ) self.grid_id = self.fs.put(file_obj, **kwargs) self._mark_as_changed() def write(self, string): if self.grid_id: if not self.newfile: - raise GridFSError('This document already has a file. Either ' - 'delete it or call replace to overwrite it') + raise GridFSError( + "This document already has a file. Either " + "delete it or call replace to overwrite it" + ) else: self.new_file() self.newfile.write(string) @@ -1632,7 +1753,7 @@ class GridFSProxy(object): try: return gridout.read(size) except Exception: - return '' + return "" def delete(self): # Delete file from GridFS, FileField still remains @@ -1662,10 +1783,12 @@ class FileField(BaseField): .. versionchanged:: 0.5 added optional size param for read .. versionchanged:: 0.6 added db_alias for multidb support """ + proxy_class = GridFSProxy - def __init__(self, db_alias=DEFAULT_CONNECTION_NAME, collection_name='fs', - **kwargs): + def __init__( + self, db_alias=DEFAULT_CONNECTION_NAME, collection_name="fs", **kwargs + ): super(FileField, self).__init__(**kwargs) self.collection_name = collection_name self.db_alias = db_alias @@ -1688,9 +1811,8 @@ class FileField(BaseField): def __set__(self, instance, value): key = self.name if ( - (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or - isinstance(value, (six.binary_type, six.string_types)) - ): + hasattr(value, "read") and not isinstance(value, GridFSProxy) + ) or isinstance(value, (six.binary_type, six.string_types)): # using "FileField() = file/string" notation grid_file = instance._data.get(self.name) # If a file already exists, delete it @@ -1701,8 +1823,7 @@ class FileField(BaseField): pass # Create a new proxy object as we don't already have one - instance._data[key] = self.get_proxy_obj( - key=key, instance=instance) + instance._data[key] = self.get_proxy_obj(key=key, instance=instance) instance._data[key].put(value) else: instance._data[key] = value @@ -1715,9 +1836,12 @@ class FileField(BaseField): if collection_name is None: collection_name = self.collection_name - return self.proxy_class(key=key, instance=instance, - db_alias=db_alias, - collection_name=collection_name) + return self.proxy_class( + key=key, + instance=instance, + db_alias=db_alias, + collection_name=collection_name, + ) def to_mongo(self, value): # Store the GridFS file id in MongoDB @@ -1727,16 +1851,16 @@ class FileField(BaseField): def to_python(self, value): if value is not None: - return self.proxy_class(value, - collection_name=self.collection_name, - db_alias=self.db_alias) + return self.proxy_class( + value, collection_name=self.collection_name, db_alias=self.db_alias + ) def validate(self, value): if value.grid_id is not None: if not isinstance(value, self.proxy_class): - self.error('FileField only accepts GridFSProxy values') + self.error("FileField only accepts GridFSProxy values") if not isinstance(value.grid_id, ObjectId): - self.error('Invalid GridFSProxy value') + self.error("Invalid GridFSProxy value") class ImageGridFsProxy(GridFSProxy): @@ -1753,52 +1877,51 @@ class ImageGridFsProxy(GridFSProxy): """ field = self.instance._fields[self.key] # Handle nested fields - if hasattr(field, 'field') and isinstance(field.field, FileField): + if hasattr(field, "field") and isinstance(field.field, FileField): field = field.field try: img = Image.open(file_obj) img_format = img.format except Exception as e: - raise ValidationError('Invalid image: %s' % e) + raise ValidationError("Invalid image: %s" % e) # Progressive JPEG # TODO: fixme, at least unused, at worst bad implementation - progressive = img.info.get('progressive') or False + progressive = img.info.get("progressive") or False - if (kwargs.get('progressive') and - isinstance(kwargs.get('progressive'), bool) and - img_format == 'JPEG'): + if ( + kwargs.get("progressive") + and isinstance(kwargs.get("progressive"), bool) + and img_format == "JPEG" + ): progressive = True else: progressive = False - if (field.size and (img.size[0] > field.size['width'] or - img.size[1] > field.size['height'])): + if field.size and ( + img.size[0] > field.size["width"] or img.size[1] > field.size["height"] + ): size = field.size - if size['force']: - img = ImageOps.fit(img, - (size['width'], - size['height']), - Image.ANTIALIAS) + if size["force"]: + img = ImageOps.fit( + img, (size["width"], size["height"]), Image.ANTIALIAS + ) else: - img.thumbnail((size['width'], - size['height']), - Image.ANTIALIAS) + img.thumbnail((size["width"], size["height"]), Image.ANTIALIAS) thumbnail = None if field.thumbnail_size: size = field.thumbnail_size - if size['force']: + if size["force"]: thumbnail = ImageOps.fit( - img, (size['width'], size['height']), Image.ANTIALIAS) + img, (size["width"], size["height"]), Image.ANTIALIAS + ) else: thumbnail = img.copy() - thumbnail.thumbnail((size['width'], - size['height']), - Image.ANTIALIAS) + thumbnail.thumbnail((size["width"], size["height"]), Image.ANTIALIAS) if thumbnail: thumb_id = self._put_thumbnail(thumbnail, img_format, progressive) @@ -1811,12 +1934,9 @@ class ImageGridFsProxy(GridFSProxy): img.save(io, img_format, progressive=progressive) io.seek(0) - return super(ImageGridFsProxy, self).put(io, - width=w, - height=h, - format=img_format, - thumbnail_id=thumb_id, - **kwargs) + return super(ImageGridFsProxy, self).put( + io, width=w, height=h, format=img_format, thumbnail_id=thumb_id, **kwargs + ) def delete(self, *args, **kwargs): # deletes thumbnail @@ -1833,10 +1953,7 @@ class ImageGridFsProxy(GridFSProxy): thumbnail.save(io, format, progressive=progressive) io.seek(0) - return self.fs.put(io, width=w, - height=h, - format=format, - **kwargs) + return self.fs.put(io, width=w, height=h, format=format, **kwargs) @property def size(self): @@ -1888,32 +2005,30 @@ class ImageField(FileField): .. versionadded:: 0.6 """ + proxy_class = ImageGridFsProxy - def __init__(self, size=None, thumbnail_size=None, - collection_name='images', **kwargs): + def __init__( + self, size=None, thumbnail_size=None, collection_name="images", **kwargs + ): if not Image: - raise ImproperlyConfigured('PIL library was not found') + raise ImproperlyConfigured("PIL library was not found") - params_size = ('width', 'height', 'force') - extra_args = { - 'size': size, - 'thumbnail_size': thumbnail_size - } + params_size = ("width", "height", "force") + extra_args = {"size": size, "thumbnail_size": thumbnail_size} for att_name, att in extra_args.items(): value = None if isinstance(att, (tuple, list)): if six.PY3: - value = dict(itertools.zip_longest(params_size, att, - fillvalue=None)) + value = dict( + itertools.zip_longest(params_size, att, fillvalue=None) + ) else: value = dict(map(None, params_size, att)) setattr(self, att_name, value) - super(ImageField, self).__init__( - collection_name=collection_name, - **kwargs) + super(ImageField, self).__init__(collection_name=collection_name, **kwargs) class SequenceField(BaseField): @@ -1947,15 +2062,24 @@ class SequenceField(BaseField): """ _auto_gen = True - COLLECTION_NAME = 'mongoengine.counters' + COLLECTION_NAME = "mongoengine.counters" VALUE_DECORATOR = int - def __init__(self, collection_name=None, db_alias=None, sequence_name=None, - value_decorator=None, *args, **kwargs): + def __init__( + self, + collection_name=None, + db_alias=None, + sequence_name=None, + value_decorator=None, + *args, + **kwargs + ): self.collection_name = collection_name or self.COLLECTION_NAME self.db_alias = db_alias or DEFAULT_CONNECTION_NAME self.sequence_name = sequence_name - self.value_decorator = value_decorator if callable(value_decorator) else self.VALUE_DECORATOR + self.value_decorator = ( + value_decorator if callable(value_decorator) else self.VALUE_DECORATOR + ) super(SequenceField, self).__init__(*args, **kwargs) def generate(self): @@ -1963,15 +2087,16 @@ class SequenceField(BaseField): Generate and Increment the counter """ sequence_name = self.get_sequence_name() - sequence_id = '%s.%s' % (sequence_name, self.name) + sequence_id = "%s.%s" % (sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] counter = collection.find_one_and_update( - filter={'_id': sequence_id}, - update={'$inc': {'next': 1}}, + filter={"_id": sequence_id}, + update={"$inc": {"next": 1}}, return_document=ReturnDocument.AFTER, - upsert=True) - return self.value_decorator(counter['next']) + upsert=True, + ) + return self.value_decorator(counter["next"]) def set_next_value(self, value): """Helper method to set the next sequence value""" @@ -1982,8 +2107,9 @@ class SequenceField(BaseField): filter={"_id": sequence_id}, update={"$set": {"next": value}}, return_document=ReturnDocument.AFTER, - upsert=True) - return self.value_decorator(counter['next']) + upsert=True, + ) + return self.value_decorator(counter["next"]) def get_next_value(self): """Helper method to get the next value for previewing. @@ -1992,12 +2118,12 @@ class SequenceField(BaseField): as it is only fixed on set. """ sequence_name = self.get_sequence_name() - sequence_id = '%s.%s' % (sequence_name, self.name) + sequence_id = "%s.%s" % (sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] - data = collection.find_one({'_id': sequence_id}) + data = collection.find_one({"_id": sequence_id}) if data: - return self.value_decorator(data['next'] + 1) + return self.value_decorator(data["next"] + 1) return self.value_decorator(1) @@ -2005,11 +2131,14 @@ class SequenceField(BaseField): if self.sequence_name: return self.sequence_name owner = self.owner_document - if issubclass(owner, Document) and not owner._meta.get('abstract'): + if issubclass(owner, Document) and not owner._meta.get("abstract"): return owner._get_collection_name() else: - return ''.join('_%s' % c if c.isupper() else c - for c in owner._class_name).strip('_').lower() + return ( + "".join("_%s" % c if c.isupper() else c for c in owner._class_name) + .strip("_") + .lower() + ) def __get__(self, instance, owner): value = super(SequenceField, self).__get__(instance, owner) @@ -2046,6 +2175,7 @@ class UUIDField(BaseField): .. versionadded:: 0.6 """ + _binary = None def __init__(self, binary=True, **kwargs): @@ -2090,7 +2220,7 @@ class UUIDField(BaseField): try: uuid.UUID(value) except (ValueError, TypeError, AttributeError) as exc: - self.error('Could not convert to UUID: %s' % exc) + self.error("Could not convert to UUID: %s" % exc) class GeoPointField(BaseField): @@ -2109,16 +2239,14 @@ class GeoPointField(BaseField): def validate(self, value): """Make sure that a geo-value is of type (x, y)""" if not isinstance(value, (list, tuple)): - self.error('GeoPointField can only accept tuples or lists ' - 'of (x, y)') + self.error("GeoPointField can only accept tuples or lists of (x, y)") if not len(value) == 2: - self.error('Value (%s) must be a two-dimensional point' % - repr(value)) - elif (not isinstance(value[0], (float, int)) or - not isinstance(value[1], (float, int))): - self.error( - 'Both values (%s) in point must be float or int' % repr(value)) + self.error("Value (%s) must be a two-dimensional point" % repr(value)) + elif not isinstance(value[0], (float, int)) or not isinstance( + value[1], (float, int) + ): + self.error("Both values (%s) in point must be float or int" % repr(value)) class PointField(GeoJsonBaseField): @@ -2138,7 +2266,8 @@ class PointField(GeoJsonBaseField): .. versionadded:: 0.8 """ - _type = 'Point' + + _type = "Point" class LineStringField(GeoJsonBaseField): @@ -2157,7 +2286,8 @@ class LineStringField(GeoJsonBaseField): .. versionadded:: 0.8 """ - _type = 'LineString' + + _type = "LineString" class PolygonField(GeoJsonBaseField): @@ -2179,7 +2309,8 @@ class PolygonField(GeoJsonBaseField): .. versionadded:: 0.8 """ - _type = 'Polygon' + + _type = "Polygon" class MultiPointField(GeoJsonBaseField): @@ -2199,7 +2330,8 @@ class MultiPointField(GeoJsonBaseField): .. versionadded:: 0.9 """ - _type = 'MultiPoint' + + _type = "MultiPoint" class MultiLineStringField(GeoJsonBaseField): @@ -2219,7 +2351,8 @@ class MultiLineStringField(GeoJsonBaseField): .. versionadded:: 0.9 """ - _type = 'MultiLineString' + + _type = "MultiLineString" class MultiPolygonField(GeoJsonBaseField): @@ -2246,7 +2379,8 @@ class MultiPolygonField(GeoJsonBaseField): .. versionadded:: 0.9 """ - _type = 'MultiPolygon' + + _type = "MultiPolygon" class LazyReferenceField(BaseField): @@ -2260,8 +2394,14 @@ class LazyReferenceField(BaseField): .. versionadded:: 0.15 """ - def __init__(self, document_type, passthrough=False, dbref=False, - reverse_delete_rule=DO_NOTHING, **kwargs): + def __init__( + self, + document_type, + passthrough=False, + dbref=False, + reverse_delete_rule=DO_NOTHING, + **kwargs + ): """Initialises the Reference Field. :param dbref: Store the reference as :class:`~pymongo.dbref.DBRef` @@ -2274,12 +2414,13 @@ class LazyReferenceField(BaseField): document. Note this only work getting field (not setting or deleting). """ # XXX ValidationError raised outside of the "validate" method. - if ( - not isinstance(document_type, six.string_types) and - not issubclass(document_type, Document) + if not isinstance(document_type, six.string_types) and not issubclass( + document_type, Document ): - self.error('Argument to LazyReferenceField constructor must be a ' - 'document class or a string') + self.error( + "Argument to LazyReferenceField constructor must be a " + "document class or a string" + ) self.dbref = dbref self.passthrough = passthrough @@ -2299,15 +2440,23 @@ class LazyReferenceField(BaseField): def build_lazyref(self, value): if isinstance(value, LazyReference): if value.passthrough != self.passthrough: - value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough) + value = LazyReference( + value.document_type, value.pk, passthrough=self.passthrough + ) elif value is not None: if isinstance(value, self.document_type): - value = LazyReference(self.document_type, value.pk, passthrough=self.passthrough) + value = LazyReference( + self.document_type, value.pk, passthrough=self.passthrough + ) elif isinstance(value, DBRef): - value = LazyReference(self.document_type, value.id, passthrough=self.passthrough) + value = LazyReference( + self.document_type, value.id, passthrough=self.passthrough + ) else: # value is the primary key of the referenced document - value = LazyReference(self.document_type, value, passthrough=self.passthrough) + value = LazyReference( + self.document_type, value, passthrough=self.passthrough + ) return value def __get__(self, instance, owner): @@ -2332,7 +2481,7 @@ class LazyReferenceField(BaseField): else: # value is the primary key of the referenced document pk = value - id_field_name = self.document_type._meta['id_field'] + id_field_name = self.document_type._meta["id_field"] id_field = self.document_type._fields[id_field_name] pk = id_field.to_mongo(pk) if self.dbref: @@ -2343,7 +2492,7 @@ class LazyReferenceField(BaseField): def validate(self, value): if isinstance(value, LazyReference): if value.collection != self.document_type._get_collection_name(): - self.error('Reference must be on a `%s` document.' % self.document_type) + self.error("Reference must be on a `%s` document." % self.document_type) pk = value.pk elif isinstance(value, self.document_type): pk = value.pk @@ -2355,7 +2504,7 @@ class LazyReferenceField(BaseField): pk = value.id else: # value is the primary key of the referenced document - id_field_name = self.document_type._meta['id_field'] + id_field_name = self.document_type._meta["id_field"] id_field = getattr(self.document_type, id_field_name) pk = value try: @@ -2364,11 +2513,15 @@ class LazyReferenceField(BaseField): self.error( "value should be `{0}` document, LazyReference or DBRef on `{0}` " "or `{0}`'s primary key (i.e. `{1}`)".format( - self.document_type.__name__, type(id_field).__name__)) + self.document_type.__name__, type(id_field).__name__ + ) + ) if pk is None: - self.error('You can only reference documents once they have been ' - 'saved to the database') + self.error( + "You can only reference documents once they have been " + "saved to the database" + ) def prepare_query_value(self, op, value): if value is None: @@ -2399,7 +2552,7 @@ class GenericLazyReferenceField(GenericReferenceField): """ def __init__(self, *args, **kwargs): - self.passthrough = kwargs.pop('passthrough', False) + self.passthrough = kwargs.pop("passthrough", False) super(GenericLazyReferenceField, self).__init__(*args, **kwargs) def _validate_choices(self, value): @@ -2410,12 +2563,20 @@ class GenericLazyReferenceField(GenericReferenceField): def build_lazyref(self, value): if isinstance(value, LazyReference): if value.passthrough != self.passthrough: - value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough) + value = LazyReference( + value.document_type, value.pk, passthrough=self.passthrough + ) elif value is not None: if isinstance(value, (dict, SON)): - value = LazyReference(get_document(value['_cls']), value['_ref'].id, passthrough=self.passthrough) + value = LazyReference( + get_document(value["_cls"]), + value["_ref"].id, + passthrough=self.passthrough, + ) elif isinstance(value, Document): - value = LazyReference(type(value), value.pk, passthrough=self.passthrough) + value = LazyReference( + type(value), value.pk, passthrough=self.passthrough + ) return value def __get__(self, instance, owner): @@ -2430,8 +2591,10 @@ class GenericLazyReferenceField(GenericReferenceField): def validate(self, value): if isinstance(value, LazyReference) and value.pk is None: - self.error('You can only reference documents once they have been' - ' saved to the database') + self.error( + "You can only reference documents once they have been" + " saved to the database" + ) return super(GenericLazyReferenceField, self).validate(value) def to_mongo(self, document): @@ -2439,9 +2602,16 @@ class GenericLazyReferenceField(GenericReferenceField): return None if isinstance(document, LazyReference): - return SON(( - ('_cls', document.document_type._class_name), - ('_ref', DBRef(document.document_type._get_collection_name(), document.pk)) - )) + return SON( + ( + ("_cls", document.document_type._class_name), + ( + "_ref", + DBRef( + document.document_type._get_collection_name(), document.pk + ), + ), + ) + ) else: return super(GenericLazyReferenceField, self).to_mongo(document) diff --git a/mongoengine/mongodb_support.py b/mongoengine/mongodb_support.py index b20ebc1e..5d437fef 100644 --- a/mongoengine/mongodb_support.py +++ b/mongoengine/mongodb_support.py @@ -15,5 +15,5 @@ def get_mongodb_version(): :return: tuple(int, int) """ - version_list = get_connection().server_info()['versionArray'][:2] # e.g: (3, 2) + version_list = get_connection().server_info()["versionArray"][:2] # e.g: (3, 2) return tuple(version_list) diff --git a/mongoengine/pymongo_support.py b/mongoengine/pymongo_support.py index f66c038e..80c0661b 100644 --- a/mongoengine/pymongo_support.py +++ b/mongoengine/pymongo_support.py @@ -27,6 +27,6 @@ def list_collection_names(db, include_system_collections=False): collections = db.collection_names() if not include_system_collections: - collections = [c for c in collections if not c.startswith('system.')] + collections = [c for c in collections if not c.startswith("system.")] return collections diff --git a/mongoengine/queryset/__init__.py b/mongoengine/queryset/__init__.py index 5219c39e..f041d07b 100644 --- a/mongoengine/queryset/__init__.py +++ b/mongoengine/queryset/__init__.py @@ -7,11 +7,22 @@ from mongoengine.queryset.visitor import * # Expose just the public subset of all imported objects and constants. __all__ = ( - 'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager', - 'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL', - + "QuerySet", + "QuerySetNoCache", + "Q", + "queryset_manager", + "QuerySetManager", + "QueryFieldList", + "DO_NOTHING", + "NULLIFY", + "CASCADE", + "DENY", + "PULL", # Errors that might be related to a queryset, mostly here for backward # compatibility - 'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned', - 'NotUniqueError', 'OperationError', + "DoesNotExist", + "InvalidQueryError", + "MultipleObjectsReturned", + "NotUniqueError", + "OperationError", ) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 85616c85..78e85399 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -20,14 +20,18 @@ from mongoengine.base import get_document from mongoengine.common import _import_class from mongoengine.connection import get_db from mongoengine.context_managers import set_write_concern, switch_db -from mongoengine.errors import (InvalidQueryError, LookUpError, - NotUniqueError, OperationError) +from mongoengine.errors import ( + InvalidQueryError, + LookUpError, + NotUniqueError, + OperationError, +) from mongoengine.queryset import transform from mongoengine.queryset.field_list import QueryFieldList from mongoengine.queryset.visitor import Q, QNode -__all__ = ('BaseQuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') +__all__ = ("BaseQuerySet", "DO_NOTHING", "NULLIFY", "CASCADE", "DENY", "PULL") # Delete rules DO_NOTHING = 0 @@ -41,6 +45,7 @@ class BaseQuerySet(object): """A set of results returned from a query. Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as the results. """ + __dereference = False _auto_dereference = True @@ -66,13 +71,12 @@ class BaseQuerySet(object): # If inheritance is allowed, only return instances and instances of # subclasses of the class being used - if document._meta.get('allow_inheritance') is True: + if document._meta.get("allow_inheritance") is True: if len(self._document._subclasses) == 1: - self._initial_query = {'_cls': self._document._subclasses[0]} + self._initial_query = {"_cls": self._document._subclasses[0]} else: - self._initial_query = { - '_cls': {'$in': self._document._subclasses}} - self._loaded_fields = QueryFieldList(always_include=['_cls']) + self._initial_query = {"_cls": {"$in": self._document._subclasses}} + self._loaded_fields = QueryFieldList(always_include=["_cls"]) self._cursor_obj = None self._limit = None @@ -83,8 +87,7 @@ class BaseQuerySet(object): self._max_time_ms = None self._comment = None - def __call__(self, q_obj=None, class_check=True, read_preference=None, - **query): + def __call__(self, q_obj=None, class_check=True, read_preference=None, **query): """Filter the selected documents by calling the :class:`~mongoengine.queryset.QuerySet` with a query. @@ -102,8 +105,10 @@ class BaseQuerySet(object): if q_obj: # make sure proper query object is passed if not isinstance(q_obj, QNode): - msg = ('Not a query object: %s. ' - 'Did you intend to use key=value?' % q_obj) + msg = ( + "Not a query object: %s. " + "Did you intend to use key=value?" % q_obj + ) raise InvalidQueryError(msg) query &= q_obj @@ -130,10 +135,10 @@ class BaseQuerySet(object): obj_dict = self.__dict__.copy() # don't picke collection, instead pickle collection params - obj_dict.pop('_collection_obj') + obj_dict.pop("_collection_obj") # don't pickle cursor - obj_dict['_cursor_obj'] = None + obj_dict["_cursor_obj"] = None return obj_dict @@ -144,7 +149,7 @@ class BaseQuerySet(object): See https://github.com/MongoEngine/mongoengine/issues/442 """ - obj_dict['_collection_obj'] = obj_dict['_document']._get_collection() + obj_dict["_collection_obj"] = obj_dict["_document"]._get_collection() # update attributes self.__dict__.update(obj_dict) @@ -182,7 +187,7 @@ class BaseQuerySet(object): queryset._document._from_son( queryset._cursor[key], _auto_dereference=self._auto_dereference, - only_fields=self.only_fields + only_fields=self.only_fields, ) ) @@ -192,10 +197,10 @@ class BaseQuerySet(object): return queryset._document._from_son( queryset._cursor[key], _auto_dereference=self._auto_dereference, - only_fields=self.only_fields + only_fields=self.only_fields, ) - raise TypeError('Provide a slice or an integer index') + raise TypeError("Provide a slice or an integer index") def __iter__(self): raise NotImplementedError @@ -235,14 +240,13 @@ class BaseQuerySet(object): """ queryset = self.clone() if queryset._search_text: - raise OperationError( - 'It is not possible to use search_text two times.') + raise OperationError("It is not possible to use search_text two times.") - query_kwargs = SON({'$search': text}) + query_kwargs = SON({"$search": text}) if language: - query_kwargs['$language'] = language + query_kwargs["$language"] = language - queryset._query_obj &= Q(__raw__={'$text': query_kwargs}) + queryset._query_obj &= Q(__raw__={"$text": query_kwargs}) queryset._mongo_query = None queryset._cursor_obj = None queryset._search_text = text @@ -265,8 +269,7 @@ class BaseQuerySet(object): try: result = six.next(queryset) except StopIteration: - msg = ('%s matching query does not exist.' - % queryset._document._class_name) + msg = "%s matching query does not exist." % queryset._document._class_name raise queryset._document.DoesNotExist(msg) try: six.next(queryset) @@ -276,7 +279,7 @@ class BaseQuerySet(object): # If we were able to retrieve the 2nd doc, rewind the cursor and # raise the MultipleObjectsReturned exception. queryset.rewind() - message = u'%d items returned, instead of 1' % queryset.count() + message = u"%d items returned, instead of 1" % queryset.count() raise queryset._document.MultipleObjectsReturned(message) def create(self, **kwargs): @@ -295,8 +298,9 @@ class BaseQuerySet(object): result = None return result - def insert(self, doc_or_docs, load_bulk=True, - write_concern=None, signal_kwargs=None): + def insert( + self, doc_or_docs, load_bulk=True, write_concern=None, signal_kwargs=None + ): """bulk insert documents :param doc_or_docs: a document or list of documents to be inserted @@ -319,7 +323,7 @@ class BaseQuerySet(object): .. versionchanged:: 0.10.7 Add signal_kwargs argument """ - Document = _import_class('Document') + Document = _import_class("Document") if write_concern is None: write_concern = {} @@ -332,16 +336,16 @@ class BaseQuerySet(object): for doc in docs: if not isinstance(doc, self._document): - msg = ("Some documents inserted aren't instances of %s" - % str(self._document)) + msg = "Some documents inserted aren't instances of %s" % str( + self._document + ) raise OperationError(msg) if doc.pk and not doc._created: - msg = 'Some documents have ObjectIds, use doc.update() instead' + msg = "Some documents have ObjectIds, use doc.update() instead" raise OperationError(msg) signal_kwargs = signal_kwargs or {} - signals.pre_bulk_insert.send(self._document, - documents=docs, **signal_kwargs) + signals.pre_bulk_insert.send(self._document, documents=docs, **signal_kwargs) raw = [doc.to_mongo() for doc in docs] @@ -353,21 +357,25 @@ class BaseQuerySet(object): try: inserted_result = insert_func(raw) - ids = [inserted_result.inserted_id] if return_one else inserted_result.inserted_ids + ids = ( + [inserted_result.inserted_id] + if return_one + else inserted_result.inserted_ids + ) except pymongo.errors.DuplicateKeyError as err: - message = 'Could not save document (%s)' + message = "Could not save document (%s)" raise NotUniqueError(message % six.text_type(err)) except pymongo.errors.BulkWriteError as err: # inserting documents that already have an _id field will # give huge performance debt or raise - message = u'Document must not have _id value before bulk write (%s)' + message = u"Document must not have _id value before bulk write (%s)" raise NotUniqueError(message % six.text_type(err)) except pymongo.errors.OperationFailure as err: - message = 'Could not save document (%s)' - if re.match('^E1100[01] duplicate key', six.text_type(err)): + message = "Could not save document (%s)" + if re.match("^E1100[01] duplicate key", six.text_type(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update - message = u'Tried to save duplicate unique keys (%s)' + message = u"Tried to save duplicate unique keys (%s)" raise NotUniqueError(message % six.text_type(err)) raise OperationError(message % six.text_type(err)) @@ -377,13 +385,15 @@ class BaseQuerySet(object): if not load_bulk: signals.post_bulk_insert.send( - self._document, documents=docs, loaded=False, **signal_kwargs) + self._document, documents=docs, loaded=False, **signal_kwargs + ) return ids[0] if return_one else ids documents = self.in_bulk(ids) results = [documents.get(obj_id) for obj_id in ids] signals.post_bulk_insert.send( - self._document, documents=results, loaded=True, **signal_kwargs) + self._document, documents=results, loaded=True, **signal_kwargs + ) return results[0] if return_one else results def count(self, with_limit_and_skip=False): @@ -399,8 +409,7 @@ class BaseQuerySet(object): self._cursor_obj = None return count - def delete(self, write_concern=None, _from_doc_delete=False, - cascade_refs=None): + def delete(self, write_concern=None, _from_doc_delete=False, cascade_refs=None): """Delete the documents matched by the query. :param write_concern: Extra keyword arguments are passed down which @@ -423,12 +432,13 @@ class BaseQuerySet(object): # Handle deletes where skips or limits have been applied or # there is an untriggered delete signal has_delete_signal = signals.signals_available and ( - signals.pre_delete.has_receivers_for(doc) or - signals.post_delete.has_receivers_for(doc) + signals.pre_delete.has_receivers_for(doc) + or signals.post_delete.has_receivers_for(doc) ) - call_document_delete = (queryset._skip or queryset._limit or - has_delete_signal) and not _from_doc_delete + call_document_delete = ( + queryset._skip or queryset._limit or has_delete_signal + ) and not _from_doc_delete if call_document_delete: cnt = 0 @@ -437,28 +447,28 @@ class BaseQuerySet(object): cnt += 1 return cnt - delete_rules = doc._meta.get('delete_rules') or {} + delete_rules = doc._meta.get("delete_rules") or {} delete_rules = list(delete_rules.items()) # Check for DENY rules before actually deleting/nullifying any other # references for rule_entry, rule in delete_rules: document_cls, field_name = rule_entry - if document_cls._meta.get('abstract'): + if document_cls._meta.get("abstract"): continue if rule == DENY: - refs = document_cls.objects(**{field_name + '__in': self}) + refs = document_cls.objects(**{field_name + "__in": self}) if refs.limit(1).count() > 0: raise OperationError( - 'Could not delete document (%s.%s refers to it)' + "Could not delete document (%s.%s refers to it)" % (document_cls.__name__, field_name) ) # Check all the other rules for rule_entry, rule in delete_rules: document_cls, field_name = rule_entry - if document_cls._meta.get('abstract'): + if document_cls._meta.get("abstract"): continue if rule == CASCADE: @@ -467,19 +477,19 @@ class BaseQuerySet(object): if doc._collection == document_cls._collection: for ref in queryset: cascade_refs.add(ref.id) - refs = document_cls.objects(**{field_name + '__in': self, - 'pk__nin': cascade_refs}) + refs = document_cls.objects( + **{field_name + "__in": self, "pk__nin": cascade_refs} + ) if refs.count() > 0: - refs.delete(write_concern=write_concern, - cascade_refs=cascade_refs) + refs.delete(write_concern=write_concern, cascade_refs=cascade_refs) elif rule == NULLIFY: - document_cls.objects(**{field_name + '__in': self}).update( - write_concern=write_concern, - **{'unset__%s' % field_name: 1}) + document_cls.objects(**{field_name + "__in": self}).update( + write_concern=write_concern, **{"unset__%s" % field_name: 1} + ) elif rule == PULL: - document_cls.objects(**{field_name + '__in': self}).update( - write_concern=write_concern, - **{'pull_all__%s' % field_name: self}) + document_cls.objects(**{field_name + "__in": self}).update( + write_concern=write_concern, **{"pull_all__%s" % field_name: self} + ) with set_write_concern(queryset._collection, write_concern) as collection: result = collection.delete_many(queryset._query) @@ -490,8 +500,9 @@ class BaseQuerySet(object): if result.acknowledged: return result.deleted_count - def update(self, upsert=False, multi=True, write_concern=None, - full_result=False, **update): + def update( + self, upsert=False, multi=True, write_concern=None, full_result=False, **update + ): """Perform an atomic update on the fields matched by the query. :param upsert: insert if document doesn't exist (default ``False``) @@ -511,7 +522,7 @@ class BaseQuerySet(object): .. versionadded:: 0.2 """ if not update and not upsert: - raise OperationError('No update parameters, would remove data') + raise OperationError("No update parameters, would remove data") if write_concern is None: write_concern = {} @@ -522,11 +533,11 @@ class BaseQuerySet(object): # If doing an atomic upsert on an inheritable class # then ensure we add _cls to the update operation - if upsert and '_cls' in query: - if '$set' in update: - update['$set']['_cls'] = queryset._document._class_name + if upsert and "_cls" in query: + if "$set" in update: + update["$set"]["_cls"] = queryset._document._class_name else: - update['$set'] = {'_cls': queryset._document._class_name} + update["$set"] = {"_cls": queryset._document._class_name} try: with set_write_concern(queryset._collection, write_concern) as collection: update_func = collection.update_one @@ -536,14 +547,14 @@ class BaseQuerySet(object): if full_result: return result elif result.raw_result: - return result.raw_result['n'] + return result.raw_result["n"] except pymongo.errors.DuplicateKeyError as err: - raise NotUniqueError(u'Update failed (%s)' % six.text_type(err)) + raise NotUniqueError(u"Update failed (%s)" % six.text_type(err)) except pymongo.errors.OperationFailure as err: - if six.text_type(err) == u'multi not coded yet': - message = u'update() method requires MongoDB 1.1.3+' + if six.text_type(err) == u"multi not coded yet": + message = u"update() method requires MongoDB 1.1.3+" raise OperationError(message) - raise OperationError(u'Update failed (%s)' % six.text_type(err)) + raise OperationError(u"Update failed (%s)" % six.text_type(err)) def upsert_one(self, write_concern=None, **update): """Overwrite or add the first document matched by the query. @@ -561,11 +572,15 @@ class BaseQuerySet(object): .. versionadded:: 0.10.2 """ - atomic_update = self.update(multi=False, upsert=True, - write_concern=write_concern, - full_result=True, **update) + atomic_update = self.update( + multi=False, + upsert=True, + write_concern=write_concern, + full_result=True, + **update + ) - if atomic_update.raw_result['updatedExisting']: + if atomic_update.raw_result["updatedExisting"]: document = self.get() else: document = self._document.objects.with_id(atomic_update.upserted_id) @@ -594,9 +609,12 @@ class BaseQuerySet(object): multi=False, write_concern=write_concern, full_result=full_result, - **update) + **update + ) - def modify(self, upsert=False, full_response=False, remove=False, new=False, **update): + def modify( + self, upsert=False, full_response=False, remove=False, new=False, **update + ): """Update and return the updated document. Returns either the document before or after modification based on `new` @@ -621,11 +639,10 @@ class BaseQuerySet(object): """ if remove and new: - raise OperationError('Conflicting parameters: remove and new') + raise OperationError("Conflicting parameters: remove and new") if not update and not upsert and not remove: - raise OperationError( - 'No update parameters, must either update or remove') + raise OperationError("No update parameters, must either update or remove") queryset = self.clone() query = queryset._query @@ -635,27 +652,35 @@ class BaseQuerySet(object): try: if full_response: - msg = 'With PyMongo 3+, it is not possible anymore to get the full response.' + msg = "With PyMongo 3+, it is not possible anymore to get the full response." warnings.warn(msg, DeprecationWarning) if remove: result = queryset._collection.find_one_and_delete( - query, sort=sort, **self._cursor_args) + query, sort=sort, **self._cursor_args + ) else: if new: return_doc = ReturnDocument.AFTER else: return_doc = ReturnDocument.BEFORE result = queryset._collection.find_one_and_update( - query, update, upsert=upsert, sort=sort, return_document=return_doc, - **self._cursor_args) + query, + update, + upsert=upsert, + sort=sort, + return_document=return_doc, + **self._cursor_args + ) except pymongo.errors.DuplicateKeyError as err: - raise NotUniqueError(u'Update failed (%s)' % err) + raise NotUniqueError(u"Update failed (%s)" % err) except pymongo.errors.OperationFailure as err: - raise OperationError(u'Update failed (%s)' % err) + raise OperationError(u"Update failed (%s)" % err) if full_response: - if result['value'] is not None: - result['value'] = self._document._from_son(result['value'], only_fields=self.only_fields) + if result["value"] is not None: + result["value"] = self._document._from_son( + result["value"], only_fields=self.only_fields + ) else: if result is not None: result = self._document._from_son(result, only_fields=self.only_fields) @@ -673,7 +698,7 @@ class BaseQuerySet(object): """ queryset = self.clone() if not queryset._query_obj.empty: - msg = 'Cannot use a filter whilst using `with_id`' + msg = "Cannot use a filter whilst using `with_id`" raise InvalidQueryError(msg) return queryset.filter(pk=object_id).first() @@ -688,21 +713,22 @@ class BaseQuerySet(object): """ doc_map = {} - docs = self._collection.find({'_id': {'$in': object_ids}}, - **self._cursor_args) + docs = self._collection.find({"_id": {"$in": object_ids}}, **self._cursor_args) if self._scalar: for doc in docs: - doc_map[doc['_id']] = self._get_scalar( - self._document._from_son(doc, only_fields=self.only_fields)) + doc_map[doc["_id"]] = self._get_scalar( + self._document._from_son(doc, only_fields=self.only_fields) + ) elif self._as_pymongo: for doc in docs: - doc_map[doc['_id']] = doc + doc_map[doc["_id"]] = doc else: for doc in docs: - doc_map[doc['_id']] = self._document._from_son( + doc_map[doc["_id"]] = self._document._from_son( doc, only_fields=self.only_fields, - _auto_dereference=self._auto_dereference) + _auto_dereference=self._auto_dereference, + ) return doc_map @@ -717,8 +743,8 @@ class BaseQuerySet(object): Do NOT return any inherited documents. """ - if self._document._meta.get('allow_inheritance') is True: - self._initial_query = {'_cls': self._document._class_name} + if self._document._meta.get("allow_inheritance") is True: + self._initial_query = {"_cls": self._document._class_name} return self @@ -747,15 +773,35 @@ class BaseQuerySet(object): """ if not isinstance(new_qs, BaseQuerySet): raise OperationError( - '%s is not a subclass of BaseQuerySet' % new_qs.__name__) + "%s is not a subclass of BaseQuerySet" % new_qs.__name__ + ) - copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', - '_where_clause', '_loaded_fields', '_ordering', - '_snapshot', '_timeout', '_class_check', '_slave_okay', - '_read_preference', '_iter', '_scalar', '_as_pymongo', - '_limit', '_skip', '_hint', '_auto_dereference', - '_search_text', 'only_fields', '_max_time_ms', - '_comment', '_batch_size') + copy_props = ( + "_mongo_query", + "_initial_query", + "_none", + "_query_obj", + "_where_clause", + "_loaded_fields", + "_ordering", + "_snapshot", + "_timeout", + "_class_check", + "_slave_okay", + "_read_preference", + "_iter", + "_scalar", + "_as_pymongo", + "_limit", + "_skip", + "_hint", + "_auto_dereference", + "_search_text", + "only_fields", + "_max_time_ms", + "_comment", + "_batch_size", + ) for prop in copy_props: val = getattr(self, prop) @@ -868,37 +914,43 @@ class BaseQuerySet(object): except LookUpError: pass - distinct = self._dereference(queryset._cursor.distinct(field), 1, - name=field, instance=self._document) + distinct = self._dereference( + queryset._cursor.distinct(field), 1, name=field, instance=self._document + ) - doc_field = self._document._fields.get(field.split('.', 1)[0]) + doc_field = self._document._fields.get(field.split(".", 1)[0]) instance = None # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) - EmbeddedDocumentField = _import_class('EmbeddedDocumentField') - ListField = _import_class('ListField') - GenericEmbeddedDocumentField = _import_class('GenericEmbeddedDocumentField') + EmbeddedDocumentField = _import_class("EmbeddedDocumentField") + ListField = _import_class("ListField") + GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField") if isinstance(doc_field, ListField): - doc_field = getattr(doc_field, 'field', doc_field) + doc_field = getattr(doc_field, "field", doc_field) if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): - instance = getattr(doc_field, 'document_type', None) + instance = getattr(doc_field, "document_type", None) # handle distinct on subdocuments - if '.' in field: - for field_part in field.split('.')[1:]: + if "." in field: + for field_part in field.split(".")[1:]: # if looping on embedded document, get the document type instance - if instance and isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): + if instance and isinstance( + doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField) + ): doc_field = instance # now get the subdocument doc_field = getattr(doc_field, field_part, doc_field) # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) if isinstance(doc_field, ListField): - doc_field = getattr(doc_field, 'field', doc_field) - if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): - instance = getattr(doc_field, 'document_type', None) + doc_field = getattr(doc_field, "field", doc_field) + if isinstance( + doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField) + ): + instance = getattr(doc_field, "document_type", None) - if instance and isinstance(doc_field, (EmbeddedDocumentField, - GenericEmbeddedDocumentField)): + if instance and isinstance( + doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField) + ): distinct = [instance(**doc) for doc in distinct] return distinct @@ -970,14 +1022,14 @@ class BaseQuerySet(object): """ # Check for an operator and transform to mongo-style if there is - operators = ['slice'] + operators = ["slice"] cleaned_fields = [] for key, value in kwargs.items(): - parts = key.split('__') + parts = key.split("__") if parts[0] in operators: op = parts.pop(0) - value = {'$' + op: value} - key = '.'.join(parts) + value = {"$" + op: value} + key = ".".join(parts) cleaned_fields.append((key, value)) # Sort fields by their values, explicitly excluded fields first, then @@ -998,7 +1050,8 @@ class BaseQuerySet(object): fields = [field for field, value in group] fields = queryset._fields_to_dbfields(fields) queryset._loaded_fields += QueryFieldList( - fields, value=value, _only_called=_only_called) + fields, value=value, _only_called=_only_called + ) return queryset @@ -1012,7 +1065,8 @@ class BaseQuerySet(object): """ queryset = self.clone() queryset._loaded_fields = QueryFieldList( - always_include=queryset._loaded_fields.always_include) + always_include=queryset._loaded_fields.always_include + ) return queryset def order_by(self, *keys): @@ -1053,7 +1107,7 @@ class BaseQuerySet(object): See https://docs.mongodb.com/manual/reference/method/cursor.comment/#cursor.comment for details. """ - return self._chainable_method('comment', text) + return self._chainable_method("comment", text) def explain(self, format=False): """Return an explain plan record for the @@ -1066,8 +1120,10 @@ class BaseQuerySet(object): # TODO remove this option completely - it's useless. If somebody # wants to pretty-print the output, they easily can. if format: - msg = ('"format" param of BaseQuerySet.explain has been ' - 'deprecated and will be removed in future versions.') + msg = ( + '"format" param of BaseQuerySet.explain has been ' + "deprecated and will be removed in future versions." + ) warnings.warn(msg, DeprecationWarning) plan = pprint.pformat(plan) @@ -1082,7 +1138,7 @@ class BaseQuerySet(object): ..versionchanged:: 0.5 - made chainable .. deprecated:: Ignored with PyMongo 3+ """ - msg = 'snapshot is deprecated as it has no impact when using PyMongo 3+.' + msg = "snapshot is deprecated as it has no impact when using PyMongo 3+." warnings.warn(msg, DeprecationWarning) queryset = self.clone() queryset._snapshot = enabled @@ -1107,7 +1163,7 @@ class BaseQuerySet(object): .. deprecated:: Ignored with PyMongo 3+ """ - msg = 'slave_okay is deprecated as it has no impact when using PyMongo 3+.' + msg = "slave_okay is deprecated as it has no impact when using PyMongo 3+." warnings.warn(msg, DeprecationWarning) queryset = self.clone() queryset._slave_okay = enabled @@ -1119,10 +1175,12 @@ class BaseQuerySet(object): :param read_preference: override ReplicaSetConnection-level preference. """ - validate_read_preference('read_preference', read_preference) + validate_read_preference("read_preference", read_preference) queryset = self.clone() queryset._read_preference = read_preference - queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_preference + queryset._cursor_obj = ( + None + ) # we need to re-create the cursor object whenever we apply read_preference return queryset def scalar(self, *fields): @@ -1168,7 +1226,7 @@ class BaseQuerySet(object): :param ms: the number of milliseconds before killing the query on the server """ - return self._chainable_method('max_time_ms', ms) + return self._chainable_method("max_time_ms", ms) # JSON Helpers @@ -1179,7 +1237,10 @@ class BaseQuerySet(object): def from_json(self, json_data): """Converts json data to unsaved objects""" son_data = json_util.loads(json_data) - return [self._document._from_son(data, only_fields=self.only_fields) for data in son_data] + return [ + self._document._from_son(data, only_fields=self.only_fields) + for data in son_data + ] def aggregate(self, *pipeline, **kwargs): """ @@ -1192,32 +1253,34 @@ class BaseQuerySet(object): initial_pipeline = [] if self._query: - initial_pipeline.append({'$match': self._query}) + initial_pipeline.append({"$match": self._query}) if self._ordering: - initial_pipeline.append({'$sort': dict(self._ordering)}) + initial_pipeline.append({"$sort": dict(self._ordering)}) if self._limit is not None: # As per MongoDB Documentation (https://docs.mongodb.com/manual/reference/operator/aggregation/limit/), # keeping limit stage right after sort stage is more efficient. But this leads to wrong set of documents # for a skip stage that might succeed these. So we need to maintain more documents in memory in such a # case (https://stackoverflow.com/a/24161461). - initial_pipeline.append({'$limit': self._limit + (self._skip or 0)}) + initial_pipeline.append({"$limit": self._limit + (self._skip or 0)}) if self._skip is not None: - initial_pipeline.append({'$skip': self._skip}) + initial_pipeline.append({"$skip": self._skip}) pipeline = initial_pipeline + list(pipeline) if self._read_preference is not None: - return self._collection.with_options(read_preference=self._read_preference) \ - .aggregate(pipeline, cursor={}, **kwargs) + return self._collection.with_options( + read_preference=self._read_preference + ).aggregate(pipeline, cursor={}, **kwargs) return self._collection.aggregate(pipeline, cursor={}, **kwargs) # JS functionality - def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None, - scope=None): + def map_reduce( + self, map_f, reduce_f, output, finalize_f=None, limit=None, scope=None + ): """Perform a map/reduce query using the current query spec and ordering. While ``map_reduce`` respects ``QuerySet`` chaining, it must be the last call made, as it does not return a maleable @@ -1257,10 +1320,10 @@ class BaseQuerySet(object): """ queryset = self.clone() - MapReduceDocument = _import_class('MapReduceDocument') + MapReduceDocument = _import_class("MapReduceDocument") - if not hasattr(self._collection, 'map_reduce'): - raise NotImplementedError('Requires MongoDB >= 1.7.1') + if not hasattr(self._collection, "map_reduce"): + raise NotImplementedError("Requires MongoDB >= 1.7.1") map_f_scope = {} if isinstance(map_f, Code): @@ -1275,7 +1338,7 @@ class BaseQuerySet(object): reduce_f_code = queryset._sub_js_fields(reduce_f) reduce_f = Code(reduce_f_code, reduce_f_scope) - mr_args = {'query': queryset._query} + mr_args = {"query": queryset._query} if finalize_f: finalize_f_scope = {} @@ -1284,39 +1347,39 @@ class BaseQuerySet(object): finalize_f = six.text_type(finalize_f) finalize_f_code = queryset._sub_js_fields(finalize_f) finalize_f = Code(finalize_f_code, finalize_f_scope) - mr_args['finalize'] = finalize_f + mr_args["finalize"] = finalize_f if scope: - mr_args['scope'] = scope + mr_args["scope"] = scope if limit: - mr_args['limit'] = limit + mr_args["limit"] = limit - if output == 'inline' and not queryset._ordering: - map_reduce_function = 'inline_map_reduce' + if output == "inline" and not queryset._ordering: + map_reduce_function = "inline_map_reduce" else: - map_reduce_function = 'map_reduce' + map_reduce_function = "map_reduce" if isinstance(output, six.string_types): - mr_args['out'] = output + mr_args["out"] = output elif isinstance(output, dict): ordered_output = [] - for part in ('replace', 'merge', 'reduce'): + for part in ("replace", "merge", "reduce"): value = output.get(part) if value: ordered_output.append((part, value)) break else: - raise OperationError('actionData not specified for output') + raise OperationError("actionData not specified for output") - db_alias = output.get('db_alias') - remaing_args = ['db', 'sharded', 'nonAtomic'] + db_alias = output.get("db_alias") + remaing_args = ["db", "sharded", "nonAtomic"] if db_alias: - ordered_output.append(('db', get_db(db_alias).name)) + ordered_output.append(("db", get_db(db_alias).name)) del remaing_args[0] for part in remaing_args: @@ -1324,20 +1387,22 @@ class BaseQuerySet(object): if value: ordered_output.append((part, value)) - mr_args['out'] = SON(ordered_output) + mr_args["out"] = SON(ordered_output) results = getattr(queryset._collection, map_reduce_function)( - map_f, reduce_f, **mr_args) + map_f, reduce_f, **mr_args + ) - if map_reduce_function == 'map_reduce': + if map_reduce_function == "map_reduce": results = results.find() if queryset._ordering: results = results.sort(queryset._ordering) for doc in results: - yield MapReduceDocument(queryset._document, queryset._collection, - doc['_id'], doc['value']) + yield MapReduceDocument( + queryset._document, queryset._collection, doc["_id"], doc["value"] + ) def exec_js(self, code, *fields, **options): """Execute a Javascript function on the server. A list of fields may be @@ -1368,16 +1433,13 @@ class BaseQuerySet(object): fields = [queryset._document._translate_field_name(f) for f in fields] collection = queryset._document._get_collection_name() - scope = { - 'collection': collection, - 'options': options or {}, - } + scope = {"collection": collection, "options": options or {}} query = queryset._query if queryset._where_clause: - query['$where'] = queryset._where_clause + query["$where"] = queryset._where_clause - scope['query'] = query + scope["query"] = query code = Code(code, scope=scope) db = queryset._document._get_db() @@ -1407,22 +1469,22 @@ class BaseQuerySet(object): """ db_field = self._fields_to_dbfields([field]).pop() pipeline = [ - {'$match': self._query}, - {'$group': {'_id': 'sum', 'total': {'$sum': '$' + db_field}}} + {"$match": self._query}, + {"$group": {"_id": "sum", "total": {"$sum": "$" + db_field}}}, ] # if we're performing a sum over a list field, we sum up all the # elements in the list, hence we need to $unwind the arrays first - ListField = _import_class('ListField') - field_parts = field.split('.') + ListField = _import_class("ListField") + field_parts = field.split(".") field_instances = self._document._lookup_field(field_parts) if isinstance(field_instances[-1], ListField): - pipeline.insert(1, {'$unwind': '$' + field}) + pipeline.insert(1, {"$unwind": "$" + field}) result = tuple(self._document._get_collection().aggregate(pipeline)) if result: - return result[0]['total'] + return result[0]["total"] return 0 def average(self, field): @@ -1433,22 +1495,22 @@ class BaseQuerySet(object): """ db_field = self._fields_to_dbfields([field]).pop() pipeline = [ - {'$match': self._query}, - {'$group': {'_id': 'avg', 'total': {'$avg': '$' + db_field}}} + {"$match": self._query}, + {"$group": {"_id": "avg", "total": {"$avg": "$" + db_field}}}, ] # if we're performing an average over a list field, we average out # all the elements in the list, hence we need to $unwind the arrays # first - ListField = _import_class('ListField') - field_parts = field.split('.') + ListField = _import_class("ListField") + field_parts = field.split(".") field_instances = self._document._lookup_field(field_parts) if isinstance(field_instances[-1], ListField): - pipeline.insert(1, {'$unwind': '$' + field}) + pipeline.insert(1, {"$unwind": "$" + field}) result = tuple(self._document._get_collection().aggregate(pipeline)) if result: - return result[0]['total'] + return result[0]["total"] return 0 def item_frequencies(self, field, normalize=False, map_reduce=True): @@ -1474,8 +1536,7 @@ class BaseQuerySet(object): document lookups """ if map_reduce: - return self._item_frequencies_map_reduce(field, - normalize=normalize) + return self._item_frequencies_map_reduce(field, normalize=normalize) return self._item_frequencies_exec_js(field, normalize=normalize) # Iterator helpers @@ -1492,15 +1553,17 @@ class BaseQuerySet(object): return raw_doc doc = self._document._from_son( - raw_doc, _auto_dereference=self._auto_dereference, - only_fields=self.only_fields) + raw_doc, + _auto_dereference=self._auto_dereference, + only_fields=self.only_fields, + ) if self._scalar: return self._get_scalar(doc) return doc - next = __next__ # For Python2 support + next = __next__ # For Python2 support def rewind(self): """Rewind the cursor to its unevaluated state. @@ -1521,15 +1584,13 @@ class BaseQuerySet(object): @property def _cursor_args(self): - fields_name = 'projection' + fields_name = "projection" # snapshot is not handled at all by PyMongo 3+ # TODO: evaluate similar possibilities using modifiers if self._snapshot: - msg = 'The snapshot option is not anymore available with PyMongo 3+' + msg = "The snapshot option is not anymore available with PyMongo 3+" warnings.warn(msg, DeprecationWarning) - cursor_args = { - 'no_cursor_timeout': not self._timeout - } + cursor_args = {"no_cursor_timeout": not self._timeout} if self._loaded_fields: cursor_args[fields_name] = self._loaded_fields.as_dict() @@ -1538,7 +1599,7 @@ class BaseQuerySet(object): if fields_name not in cursor_args: cursor_args[fields_name] = {} - cursor_args[fields_name]['_text_score'] = {'$meta': 'textScore'} + cursor_args[fields_name]["_text_score"] = {"$meta": "textScore"} return cursor_args @@ -1555,12 +1616,11 @@ class BaseQuerySet(object): # level, not a cursor level. Thus, we need to get a cloned collection # object using `with_options` first. if self._read_preference is not None: - self._cursor_obj = self._collection\ - .with_options(read_preference=self._read_preference)\ - .find(self._query, **self._cursor_args) + self._cursor_obj = self._collection.with_options( + read_preference=self._read_preference + ).find(self._query, **self._cursor_args) else: - self._cursor_obj = self._collection.find(self._query, - **self._cursor_args) + self._cursor_obj = self._collection.find(self._query, **self._cursor_args) # Apply "where" clauses to cursor if self._where_clause: where_clause = self._sub_js_fields(self._where_clause) @@ -1576,9 +1636,9 @@ class BaseQuerySet(object): if self._ordering: # explicit ordering self._cursor_obj.sort(self._ordering) - elif self._ordering is None and self._document._meta['ordering']: + elif self._ordering is None and self._document._meta["ordering"]: # default ordering - order = self._get_order_by(self._document._meta['ordering']) + order = self._get_order_by(self._document._meta["ordering"]) self._cursor_obj.sort(order) if self._limit is not None: @@ -1607,8 +1667,10 @@ class BaseQuerySet(object): if self._mongo_query is None: self._mongo_query = self._query_obj.to_query(self._document) if self._class_check and self._initial_query: - if '_cls' in self._mongo_query: - self._mongo_query = {'$and': [self._initial_query, self._mongo_query]} + if "_cls" in self._mongo_query: + self._mongo_query = { + "$and": [self._initial_query, self._mongo_query] + } else: self._mongo_query.update(self._initial_query) return self._mongo_query @@ -1616,7 +1678,7 @@ class BaseQuerySet(object): @property def _dereference(self): if not self.__dereference: - self.__dereference = _import_class('DeReference')() + self.__dereference = _import_class("DeReference")() return self.__dereference def no_dereference(self): @@ -1649,7 +1711,9 @@ class BaseQuerySet(object): emit(null, 1); } } - """ % {'field': field} + """ % { + "field": field + } reduce_func = """ function(key, values) { var total = 0; @@ -1660,7 +1724,7 @@ class BaseQuerySet(object): return total; } """ - values = self.map_reduce(map_func, reduce_func, 'inline') + values = self.map_reduce(map_func, reduce_func, "inline") frequencies = {} for f in values: key = f.key @@ -1671,8 +1735,7 @@ class BaseQuerySet(object): if normalize: count = sum(frequencies.values()) - frequencies = {k: float(v) / count - for k, v in frequencies.items()} + frequencies = {k: float(v) / count for k, v in frequencies.items()} return frequencies @@ -1742,15 +1805,14 @@ class BaseQuerySet(object): def _fields_to_dbfields(self, fields): """Translate fields' paths to their db equivalents.""" subclasses = [] - if self._document._meta['allow_inheritance']: - subclasses = [get_document(x) - for x in self._document._subclasses][1:] + if self._document._meta["allow_inheritance"]: + subclasses = [get_document(x) for x in self._document._subclasses][1:] db_field_paths = [] for field in fields: - field_parts = field.split('.') + field_parts = field.split(".") try: - field = '.'.join( + field = ".".join( f if isinstance(f, six.string_types) else f.db_field for f in self._document._lookup_field(field_parts) ) @@ -1762,7 +1824,7 @@ class BaseQuerySet(object): # through its subclasses and see if it exists on any of them. for subdoc in subclasses: try: - subfield = '.'.join( + subfield = ".".join( f if isinstance(f, six.string_types) else f.db_field for f in subdoc._lookup_field(field_parts) ) @@ -1790,18 +1852,18 @@ class BaseQuerySet(object): if not key: continue - if key == '$text_score': - key_list.append(('_text_score', {'$meta': 'textScore'})) + if key == "$text_score": + key_list.append(("_text_score", {"$meta": "textScore"})) continue direction = pymongo.ASCENDING - if key[0] == '-': + if key[0] == "-": direction = pymongo.DESCENDING - if key[0] in ('-', '+'): + if key[0] in ("-", "+"): key = key[1:] - key = key.replace('__', '.') + key = key.replace("__", ".") try: key = self._document._translate_field_name(key) except Exception: @@ -1813,9 +1875,8 @@ class BaseQuerySet(object): return key_list def _get_scalar(self, doc): - def lookup(obj, name): - chunks = name.split('__') + chunks = name.split("__") for chunk in chunks: obj = getattr(obj, chunk) return obj @@ -1835,21 +1896,20 @@ class BaseQuerySet(object): def field_sub(match): # Extract just the field name, and look up the field objects - field_name = match.group(1).split('.') + field_name = match.group(1).split(".") fields = self._document._lookup_field(field_name) # Substitute the correct name for the field into the javascript return u'["%s"]' % fields[-1].db_field def field_path_sub(match): # Extract just the field name, and look up the field objects - field_name = match.group(1).split('.') + field_name = match.group(1).split(".") fields = self._document._lookup_field(field_name) # Substitute the correct name for the field into the javascript - return '.'.join([f.db_field for f in fields]) + return ".".join([f.db_field for f in fields]) - code = re.sub(r'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) - code = re.sub(r'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, - code) + code = re.sub(r"\[\s*~([A-z_][A-z_0-9.]+?)\s*\]", field_sub, code) + code = re.sub(r"\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}", field_path_sub, code) return code def _chainable_method(self, method_name, val): @@ -1866,22 +1926,26 @@ class BaseQuerySet(object): getattr(cursor, method_name)(val) # Cache the value on the queryset._{method_name} - setattr(queryset, '_' + method_name, val) + setattr(queryset, "_" + method_name, val) return queryset # Deprecated def ensure_index(self, **kwargs): """Deprecated use :func:`Document.ensure_index`""" - msg = ('Doc.objects()._ensure_index() is deprecated. ' - 'Use Doc.ensure_index() instead.') + msg = ( + "Doc.objects()._ensure_index() is deprecated. " + "Use Doc.ensure_index() instead." + ) warnings.warn(msg, DeprecationWarning) self._document.__class__.ensure_index(**kwargs) return self def _ensure_indexes(self): """Deprecated use :func:`~Document.ensure_indexes`""" - msg = ('Doc.objects()._ensure_indexes() is deprecated. ' - 'Use Doc.ensure_indexes() instead.') + msg = ( + "Doc.objects()._ensure_indexes() is deprecated. " + "Use Doc.ensure_indexes() instead." + ) warnings.warn(msg, DeprecationWarning) self._document.__class__.ensure_indexes() diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py index dba724af..5c3ff222 100644 --- a/mongoengine/queryset/field_list.py +++ b/mongoengine/queryset/field_list.py @@ -1,12 +1,15 @@ -__all__ = ('QueryFieldList',) +__all__ = ("QueryFieldList",) class QueryFieldList(object): """Object that handles combinations of .only() and .exclude() calls""" + ONLY = 1 EXCLUDE = 0 - def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False): + def __init__( + self, fields=None, value=ONLY, always_include=None, _only_called=False + ): """The QueryFieldList builder :param fields: A list of fields used in `.only()` or `.exclude()` @@ -49,7 +52,7 @@ class QueryFieldList(object): self.fields = f.fields - self.fields self._clean_slice() - if '_id' in f.fields: + if "_id" in f.fields: self._id = f.value if self.always_include: @@ -59,7 +62,7 @@ class QueryFieldList(object): else: self.fields -= self.always_include - if getattr(f, '_only_called', False): + if getattr(f, "_only_called", False): self._only_called = True return self @@ -73,7 +76,7 @@ class QueryFieldList(object): if self.slice: field_list.update(self.slice) if self._id is not None: - field_list['_id'] = self._id + field_list["_id"] = self._id return field_list def reset(self): diff --git a/mongoengine/queryset/manager.py b/mongoengine/queryset/manager.py index f93dbb43..5067ffbf 100644 --- a/mongoengine/queryset/manager.py +++ b/mongoengine/queryset/manager.py @@ -1,7 +1,7 @@ from functools import partial from mongoengine.queryset.queryset import QuerySet -__all__ = ('queryset_manager', 'QuerySetManager') +__all__ = ("queryset_manager", "QuerySetManager") class QuerySetManager(object): @@ -33,7 +33,7 @@ class QuerySetManager(object): return self # owner is the document that contains the QuerySetManager - queryset_class = owner._meta.get('queryset_class', self.default) + queryset_class = owner._meta.get("queryset_class", self.default) queryset = queryset_class(owner, owner._get_collection()) if self.get_queryset: arg_count = self.get_queryset.__code__.co_argcount diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index c7c593b1..4ba62d46 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1,11 +1,24 @@ import six from mongoengine.errors import OperationError -from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING, - NULLIFY, PULL) +from mongoengine.queryset.base import ( + BaseQuerySet, + CASCADE, + DENY, + DO_NOTHING, + NULLIFY, + PULL, +) -__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE', - 'DENY', 'PULL') +__all__ = ( + "QuerySet", + "QuerySetNoCache", + "DO_NOTHING", + "NULLIFY", + "CASCADE", + "DENY", + "PULL", +) # The maximum number of items to display in a QuerySet.__repr__ REPR_OUTPUT_SIZE = 20 @@ -57,12 +70,12 @@ class QuerySet(BaseQuerySet): def __repr__(self): """Provide a string representation of the QuerySet""" if self._iter: - return '.. queryset mid-iteration ..' + return ".. queryset mid-iteration .." self._populate_cache() - data = self._result_cache[:REPR_OUTPUT_SIZE + 1] + data = self._result_cache[: REPR_OUTPUT_SIZE + 1] if len(data) > REPR_OUTPUT_SIZE: - data[-1] = '...(remaining elements truncated)...' + data[-1] = "...(remaining elements truncated)..." return repr(data) def _iter_results(self): @@ -143,10 +156,9 @@ class QuerySet(BaseQuerySet): .. versionadded:: 0.8.3 Convert to non caching queryset """ if self._result_cache is not None: - raise OperationError('QuerySet already cached') + raise OperationError("QuerySet already cached") - return self._clone_into(QuerySetNoCache(self._document, - self._collection)) + return self._clone_into(QuerySetNoCache(self._document, self._collection)) class QuerySetNoCache(BaseQuerySet): @@ -165,7 +177,7 @@ class QuerySetNoCache(BaseQuerySet): .. versionchanged:: 0.6.13 Now doesnt modify the cursor """ if self._iter: - return '.. queryset mid-iteration ..' + return ".. queryset mid-iteration .." data = [] for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): @@ -175,7 +187,7 @@ class QuerySetNoCache(BaseQuerySet): break if len(data) > REPR_OUTPUT_SIZE: - data[-1] = '...(remaining elements truncated)...' + data[-1] = "...(remaining elements truncated)..." self.rewind() return repr(data) diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 128a4e44..0b73e99b 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -10,21 +10,54 @@ from mongoengine.base import UPDATE_OPERATORS from mongoengine.common import _import_class from mongoengine.errors import InvalidQueryError -__all__ = ('query', 'update') +__all__ = ("query", "update") -COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', - 'all', 'size', 'exists', 'not', 'elemMatch', 'type') -GEO_OPERATORS = ('within_distance', 'within_spherical_distance', - 'within_box', 'within_polygon', 'near', 'near_sphere', - 'max_distance', 'min_distance', 'geo_within', 'geo_within_box', - 'geo_within_polygon', 'geo_within_center', - 'geo_within_sphere', 'geo_intersects') -STRING_OPERATORS = ('contains', 'icontains', 'startswith', - 'istartswith', 'endswith', 'iendswith', - 'exact', 'iexact') -CUSTOM_OPERATORS = ('match',) -MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS + - STRING_OPERATORS + CUSTOM_OPERATORS) +COMPARISON_OPERATORS = ( + "ne", + "gt", + "gte", + "lt", + "lte", + "in", + "nin", + "mod", + "all", + "size", + "exists", + "not", + "elemMatch", + "type", +) +GEO_OPERATORS = ( + "within_distance", + "within_spherical_distance", + "within_box", + "within_polygon", + "near", + "near_sphere", + "max_distance", + "min_distance", + "geo_within", + "geo_within_box", + "geo_within_polygon", + "geo_within_center", + "geo_within_sphere", + "geo_intersects", +) +STRING_OPERATORS = ( + "contains", + "icontains", + "startswith", + "istartswith", + "endswith", + "iendswith", + "exact", + "iexact", +) +CUSTOM_OPERATORS = ("match",) +MATCH_OPERATORS = ( + COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS +) # TODO make this less complex @@ -33,11 +66,11 @@ def query(_doc_cls=None, **kwargs): mongo_query = {} merge_query = defaultdict(list) for key, value in sorted(kwargs.items()): - if key == '__raw__': + if key == "__raw__": mongo_query.update(value) continue - parts = key.rsplit('__') + parts = key.rsplit("__") indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] parts = [part for part in parts if not part.isdigit()] # Check for an operator and transform to mongo-style if there is @@ -46,11 +79,11 @@ def query(_doc_cls=None, **kwargs): op = parts.pop() # Allow to escape operator-like field name by __ - if len(parts) > 1 and parts[-1] == '': + if len(parts) > 1 and parts[-1] == "": parts.pop() negate = False - if len(parts) > 1 and parts[-1] == 'not': + if len(parts) > 1 and parts[-1] == "not": parts.pop() negate = True @@ -62,8 +95,8 @@ def query(_doc_cls=None, **kwargs): raise InvalidQueryError(e) parts = [] - CachedReferenceField = _import_class('CachedReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') + CachedReferenceField = _import_class("CachedReferenceField") + GenericReferenceField = _import_class("GenericReferenceField") cleaned_fields = [] for field in fields: @@ -73,7 +106,7 @@ def query(_doc_cls=None, **kwargs): append_field = False # is last and CachedReferenceField elif isinstance(field, CachedReferenceField) and fields[-1] == field: - parts.append('%s._id' % field.db_field) + parts.append("%s._id" % field.db_field) else: parts.append(field.db_field) @@ -83,15 +116,15 @@ def query(_doc_cls=None, **kwargs): # Convert value to proper value field = cleaned_fields[-1] - singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] + singular_ops = [None, "ne", "gt", "gte", "lt", "lte", "not"] singular_ops += STRING_OPERATORS if op in singular_ops: value = field.prepare_query_value(op, value) if isinstance(field, CachedReferenceField) and value: - value = value['_id'] + value = value["_id"] - elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): + elif op in ("in", "nin", "all", "near") and not isinstance(value, dict): # Raise an error if the in/nin/all/near param is not iterable. value = _prepare_query_for_iterable(field, op, value) @@ -101,39 +134,40 @@ def query(_doc_cls=None, **kwargs): # * If the value is an ObjectId, the key should be "field_name._ref.$id". if isinstance(field, GenericReferenceField): if isinstance(value, DBRef): - parts[-1] += '._ref' + parts[-1] += "._ref" elif isinstance(value, ObjectId): - parts[-1] += '._ref.$id' + parts[-1] += "._ref.$id" # if op and op not in COMPARISON_OPERATORS: if op: if op in GEO_OPERATORS: value = _geo_operator(field, op, value) - elif op in ('match', 'elemMatch'): - ListField = _import_class('ListField') - EmbeddedDocumentField = _import_class('EmbeddedDocumentField') + elif op in ("match", "elemMatch"): + ListField = _import_class("ListField") + EmbeddedDocumentField = _import_class("EmbeddedDocumentField") if ( - isinstance(value, dict) and - isinstance(field, ListField) and - isinstance(field.field, EmbeddedDocumentField) + isinstance(value, dict) + and isinstance(field, ListField) + and isinstance(field.field, EmbeddedDocumentField) ): value = query(field.field.document_type, **value) else: value = field.prepare_query_value(op, value) - value = {'$elemMatch': value} + value = {"$elemMatch": value} elif op in CUSTOM_OPERATORS: - NotImplementedError('Custom method "%s" has not ' - 'been implemented' % op) + NotImplementedError( + 'Custom method "%s" has not ' "been implemented" % op + ) elif op not in STRING_OPERATORS: - value = {'$' + op: value} + value = {"$" + op: value} if negate: - value = {'$not': value} + value = {"$not": value} for i, part in indices: parts.insert(i, part) - key = '.'.join(parts) + key = ".".join(parts) if op is None or key not in mongo_query: mongo_query[key] = value @@ -142,30 +176,35 @@ def query(_doc_cls=None, **kwargs): mongo_query[key].update(value) # $max/minDistance needs to come last - convert to SON value_dict = mongo_query[key] - if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \ - ('$near' in value_dict or '$nearSphere' in value_dict): + if ("$maxDistance" in value_dict or "$minDistance" in value_dict) and ( + "$near" in value_dict or "$nearSphere" in value_dict + ): value_son = SON() for k, v in iteritems(value_dict): - if k == '$maxDistance' or k == '$minDistance': + if k == "$maxDistance" or k == "$minDistance": continue value_son[k] = v # Required for MongoDB >= 2.6, may fail when combining # PyMongo 3+ and MongoDB < 2.6 near_embedded = False - for near_op in ('$near', '$nearSphere'): + for near_op in ("$near", "$nearSphere"): if isinstance(value_dict.get(near_op), dict): value_son[near_op] = SON(value_son[near_op]) - if '$maxDistance' in value_dict: - value_son[near_op]['$maxDistance'] = value_dict['$maxDistance'] - if '$minDistance' in value_dict: - value_son[near_op]['$minDistance'] = value_dict['$minDistance'] + if "$maxDistance" in value_dict: + value_son[near_op]["$maxDistance"] = value_dict[ + "$maxDistance" + ] + if "$minDistance" in value_dict: + value_son[near_op]["$minDistance"] = value_dict[ + "$minDistance" + ] near_embedded = True if not near_embedded: - if '$maxDistance' in value_dict: - value_son['$maxDistance'] = value_dict['$maxDistance'] - if '$minDistance' in value_dict: - value_son['$minDistance'] = value_dict['$minDistance'] + if "$maxDistance" in value_dict: + value_son["$maxDistance"] = value_dict["$maxDistance"] + if "$minDistance" in value_dict: + value_son["$minDistance"] = value_dict["$minDistance"] mongo_query[key] = value_son else: # Store for manually merging later @@ -177,10 +216,10 @@ def query(_doc_cls=None, **kwargs): del mongo_query[k] if isinstance(v, list): value = [{k: val} for val in v] - if '$and' in mongo_query.keys(): - mongo_query['$and'].extend(value) + if "$and" in mongo_query.keys(): + mongo_query["$and"].extend(value) else: - mongo_query['$and'] = value + mongo_query["$and"] = value return mongo_query @@ -192,15 +231,15 @@ def update(_doc_cls=None, **update): mongo_update = {} for key, value in update.items(): - if key == '__raw__': + if key == "__raw__": mongo_update.update(value) continue - parts = key.split('__') + parts = key.split("__") # if there is no operator, default to 'set' if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: - parts.insert(0, 'set') + parts.insert(0, "set") # Check for an operator and transform to mongo-style if there is op = None @@ -208,13 +247,13 @@ def update(_doc_cls=None, **update): op = parts.pop(0) # Convert Pythonic names to Mongo equivalents operator_map = { - 'push_all': 'pushAll', - 'pull_all': 'pullAll', - 'dec': 'inc', - 'add_to_set': 'addToSet', - 'set_on_insert': 'setOnInsert' + "push_all": "pushAll", + "pull_all": "pullAll", + "dec": "inc", + "add_to_set": "addToSet", + "set_on_insert": "setOnInsert", } - if op == 'dec': + if op == "dec": # Support decrement by flipping a positive value's sign # and using 'inc' value = -value @@ -227,7 +266,7 @@ def update(_doc_cls=None, **update): match = parts.pop() # Allow to escape operator-like field name by __ - if len(parts) > 1 and parts[-1] == '': + if len(parts) > 1 and parts[-1] == "": parts.pop() if _doc_cls: @@ -244,8 +283,8 @@ def update(_doc_cls=None, **update): append_field = True if isinstance(field, six.string_types): # Convert the S operator to $ - if field == 'S': - field = '$' + if field == "S": + field = "$" parts.append(field) append_field = False else: @@ -253,7 +292,7 @@ def update(_doc_cls=None, **update): if append_field: appended_sub_field = False cleaned_fields.append(field) - if hasattr(field, 'field'): + if hasattr(field, "field"): cleaned_fields.append(field.field) appended_sub_field = True @@ -263,52 +302,53 @@ def update(_doc_cls=None, **update): else: field = cleaned_fields[-1] - GeoJsonBaseField = _import_class('GeoJsonBaseField') + GeoJsonBaseField = _import_class("GeoJsonBaseField") if isinstance(field, GeoJsonBaseField): value = field.to_mongo(value) - if op == 'pull': + if op == "pull": if field.required or value is not None: - if match in ('in', 'nin') and not isinstance(value, dict): + if match in ("in", "nin") and not isinstance(value, dict): value = _prepare_query_for_iterable(field, op, value) else: value = field.prepare_query_value(op, value) - elif op == 'push' and isinstance(value, (list, tuple, set)): + elif op == "push" and isinstance(value, (list, tuple, set)): value = [field.prepare_query_value(op, v) for v in value] - elif op in (None, 'set', 'push'): + elif op in (None, "set", "push"): if field.required or value is not None: value = field.prepare_query_value(op, value) - elif op in ('pushAll', 'pullAll'): + elif op in ("pushAll", "pullAll"): value = [field.prepare_query_value(op, v) for v in value] - elif op in ('addToSet', 'setOnInsert'): + elif op in ("addToSet", "setOnInsert"): if isinstance(value, (list, tuple, set)): value = [field.prepare_query_value(op, v) for v in value] elif field.required or value is not None: value = field.prepare_query_value(op, value) - elif op == 'unset': + elif op == "unset": value = 1 - elif op == 'inc': + elif op == "inc": value = field.prepare_query_value(op, value) if match: - match = '$' + match + match = "$" + match value = {match: value} - key = '.'.join(parts) + key = ".".join(parts) - if 'pull' in op and '.' in key: + if "pull" in op and "." in key: # Dot operators don't work on pull operations # unless they point to a list field # Otherwise it uses nested dict syntax - if op == 'pullAll': - raise InvalidQueryError('pullAll operations only support ' - 'a single field depth') + if op == "pullAll": + raise InvalidQueryError( + "pullAll operations only support a single field depth" + ) # Look for the last list field and use dot notation until there field_classes = [c.__class__ for c in cleaned_fields] field_classes.reverse() - ListField = _import_class('ListField') - EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') + ListField = _import_class("ListField") + EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") if ListField in field_classes or EmbeddedDocumentListField in field_classes: # Join all fields via dot notation to the last ListField or EmbeddedDocumentListField # Then process as normal @@ -317,37 +357,36 @@ def update(_doc_cls=None, **update): else: _check_field = EmbeddedDocumentListField - last_listField = len( - cleaned_fields) - field_classes.index(_check_field) - key = '.'.join(parts[:last_listField]) + last_listField = len(cleaned_fields) - field_classes.index(_check_field) + key = ".".join(parts[:last_listField]) parts = parts[last_listField:] parts.insert(0, key) parts.reverse() for key in parts: value = {key: value} - elif op == 'addToSet' and isinstance(value, list): - value = {key: {'$each': value}} - elif op in ('push', 'pushAll'): + elif op == "addToSet" and isinstance(value, list): + value = {key: {"$each": value}} + elif op in ("push", "pushAll"): if parts[-1].isdigit(): - key = '.'.join(parts[0:-1]) + key = ".".join(parts[0:-1]) position = int(parts[-1]) # $position expects an iterable. If pushing a single value, # wrap it in a list. if not isinstance(value, (set, tuple, list)): value = [value] - value = {key: {'$each': value, '$position': position}} + value = {key: {"$each": value, "$position": position}} else: - if op == 'pushAll': - op = 'push' # convert to non-deprecated keyword + if op == "pushAll": + op = "push" # convert to non-deprecated keyword if not isinstance(value, (set, tuple, list)): value = [value] - value = {key: {'$each': value}} + value = {key: {"$each": value}} else: value = {key: value} else: value = {key: value} - key = '$' + op + key = "$" + op if key not in mongo_update: mongo_update[key] = value elif key in mongo_update and isinstance(mongo_update[key], dict): @@ -358,45 +397,45 @@ def update(_doc_cls=None, **update): def _geo_operator(field, op, value): """Helper to return the query for a given geo query.""" - if op == 'max_distance': - value = {'$maxDistance': value} - elif op == 'min_distance': - value = {'$minDistance': value} + if op == "max_distance": + value = {"$maxDistance": value} + elif op == "min_distance": + value = {"$minDistance": value} elif field._geo_index == pymongo.GEO2D: - if op == 'within_distance': - value = {'$within': {'$center': value}} - elif op == 'within_spherical_distance': - value = {'$within': {'$centerSphere': value}} - elif op == 'within_polygon': - value = {'$within': {'$polygon': value}} - elif op == 'near': - value = {'$near': value} - elif op == 'near_sphere': - value = {'$nearSphere': value} - elif op == 'within_box': - value = {'$within': {'$box': value}} - else: - raise NotImplementedError('Geo method "%s" has not been ' - 'implemented for a GeoPointField' % op) - else: - if op == 'geo_within': - value = {'$geoWithin': _infer_geometry(value)} - elif op == 'geo_within_box': - value = {'$geoWithin': {'$box': value}} - elif op == 'geo_within_polygon': - value = {'$geoWithin': {'$polygon': value}} - elif op == 'geo_within_center': - value = {'$geoWithin': {'$center': value}} - elif op == 'geo_within_sphere': - value = {'$geoWithin': {'$centerSphere': value}} - elif op == 'geo_intersects': - value = {'$geoIntersects': _infer_geometry(value)} - elif op == 'near': - value = {'$near': _infer_geometry(value)} + if op == "within_distance": + value = {"$within": {"$center": value}} + elif op == "within_spherical_distance": + value = {"$within": {"$centerSphere": value}} + elif op == "within_polygon": + value = {"$within": {"$polygon": value}} + elif op == "near": + value = {"$near": value} + elif op == "near_sphere": + value = {"$nearSphere": value} + elif op == "within_box": + value = {"$within": {"$box": value}} else: raise NotImplementedError( - 'Geo method "%s" has not been implemented for a %s ' - % (op, field._name) + 'Geo method "%s" has not been ' "implemented for a GeoPointField" % op + ) + else: + if op == "geo_within": + value = {"$geoWithin": _infer_geometry(value)} + elif op == "geo_within_box": + value = {"$geoWithin": {"$box": value}} + elif op == "geo_within_polygon": + value = {"$geoWithin": {"$polygon": value}} + elif op == "geo_within_center": + value = {"$geoWithin": {"$center": value}} + elif op == "geo_within_sphere": + value = {"$geoWithin": {"$centerSphere": value}} + elif op == "geo_intersects": + value = {"$geoIntersects": _infer_geometry(value)} + elif op == "near": + value = {"$near": _infer_geometry(value)} + else: + raise NotImplementedError( + 'Geo method "%s" has not been implemented for a %s ' % (op, field._name) ) return value @@ -406,51 +445,58 @@ def _infer_geometry(value): given value. """ if isinstance(value, dict): - if '$geometry' in value: + if "$geometry" in value: return value - elif 'coordinates' in value and 'type' in value: - return {'$geometry': value} - raise InvalidQueryError('Invalid $geometry dictionary should have ' - 'type and coordinates keys') + elif "coordinates" in value and "type" in value: + return {"$geometry": value} + raise InvalidQueryError( + "Invalid $geometry dictionary should have type and coordinates keys" + ) elif isinstance(value, (list, set)): # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? try: value[0][0][0] - return {'$geometry': {'type': 'Polygon', 'coordinates': value}} + return {"$geometry": {"type": "Polygon", "coordinates": value}} except (TypeError, IndexError): pass try: value[0][0] - return {'$geometry': {'type': 'LineString', 'coordinates': value}} + return {"$geometry": {"type": "LineString", "coordinates": value}} except (TypeError, IndexError): pass try: value[0] - return {'$geometry': {'type': 'Point', 'coordinates': value}} + return {"$geometry": {"type": "Point", "coordinates": value}} except (TypeError, IndexError): pass - raise InvalidQueryError('Invalid $geometry data. Can be either a ' - 'dictionary or (nested) lists of coordinate(s)') + raise InvalidQueryError( + "Invalid $geometry data. Can be either a " + "dictionary or (nested) lists of coordinate(s)" + ) def _prepare_query_for_iterable(field, op, value): # We need a special check for BaseDocument, because - although it's iterable - using # it as such in the context of this method is most definitely a mistake. - BaseDocument = _import_class('BaseDocument') + BaseDocument = _import_class("BaseDocument") if isinstance(value, BaseDocument): - raise TypeError("When using the `in`, `nin`, `all`, or " - "`near`-operators you can\'t use a " - "`Document`, you must wrap your object " - "in a list (object -> [object]).") + raise TypeError( + "When using the `in`, `nin`, `all`, or " + "`near`-operators you can't use a " + "`Document`, you must wrap your object " + "in a list (object -> [object])." + ) - if not hasattr(value, '__iter__'): - raise TypeError("The `in`, `nin`, `all`, or " - "`near`-operators must be applied to an " - "iterable (e.g. a list).") + if not hasattr(value, "__iter__"): + raise TypeError( + "The `in`, `nin`, `all`, or " + "`near`-operators must be applied to an " + "iterable (e.g. a list)." + ) return [field.prepare_query_value(op, v) for v in value] diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 9d97094b..0fe139fd 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -3,7 +3,7 @@ import copy from mongoengine.errors import InvalidQueryError from mongoengine.queryset import transform -__all__ = ('Q', 'QNode') +__all__ = ("Q", "QNode") class QNodeVisitor(object): @@ -69,9 +69,9 @@ class QueryCompilerVisitor(QNodeVisitor): self.document = document def visit_combination(self, combination): - operator = '$and' + operator = "$and" if combination.operation == combination.OR: - operator = '$or' + operator = "$or" return {operator: combination.children} def visit_query(self, query): @@ -96,7 +96,7 @@ class QNode(object): """Combine this node with another node into a QCombination object. """ - if getattr(other, 'empty', True): + if getattr(other, "empty", True): return self if self.empty: @@ -132,8 +132,8 @@ class QCombination(QNode): self.children.append(node) def __repr__(self): - op = ' & ' if self.operation is self.AND else ' | ' - return '(%s)' % op.join([repr(node) for node in self.children]) + op = " & " if self.operation is self.AND else " | " + return "(%s)" % op.join([repr(node) for node in self.children]) def accept(self, visitor): for i in range(len(self.children)): @@ -156,7 +156,7 @@ class Q(QNode): self.query = query def __repr__(self): - return 'Q(**%s)' % repr(self.query) + return "Q(**%s)" % repr(self.query) def accept(self, visitor): return visitor.visit_query(self) diff --git a/mongoengine/signals.py b/mongoengine/signals.py index a892dec0..0db63604 100644 --- a/mongoengine/signals.py +++ b/mongoengine/signals.py @@ -1,5 +1,12 @@ -__all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation', - 'post_save', 'pre_delete', 'post_delete') +__all__ = ( + "pre_init", + "post_init", + "pre_save", + "pre_save_post_validation", + "post_save", + "pre_delete", + "post_delete", +) signals_available = False try: @@ -7,6 +14,7 @@ try: signals_available = True except ImportError: + class Namespace(object): def signal(self, name, doc=None): return _FakeSignal(name, doc) @@ -23,13 +31,16 @@ except ImportError: self.__doc__ = doc def _fail(self, *args, **kwargs): - raise RuntimeError('signalling support is unavailable ' - 'because the blinker library is ' - 'not installed.') + raise RuntimeError( + "signalling support is unavailable " + "because the blinker library is " + "not installed." + ) send = lambda *a, **kw: None # noqa - connect = disconnect = has_receivers_for = receivers_for = \ - temporarily_connected_to = _fail + connect = ( + disconnect + ) = has_receivers_for = receivers_for = temporarily_connected_to = _fail del _fail @@ -37,12 +48,12 @@ except ImportError: # not put signals in here. Create your own namespace instead. _signals = Namespace() -pre_init = _signals.signal('pre_init') -post_init = _signals.signal('post_init') -pre_save = _signals.signal('pre_save') -pre_save_post_validation = _signals.signal('pre_save_post_validation') -post_save = _signals.signal('post_save') -pre_delete = _signals.signal('pre_delete') -post_delete = _signals.signal('post_delete') -pre_bulk_insert = _signals.signal('pre_bulk_insert') -post_bulk_insert = _signals.signal('post_bulk_insert') +pre_init = _signals.signal("pre_init") +post_init = _signals.signal("post_init") +pre_save = _signals.signal("pre_save") +pre_save_post_validation = _signals.signal("pre_save_post_validation") +post_save = _signals.signal("post_save") +pre_delete = _signals.signal("pre_delete") +post_delete = _signals.signal("post_delete") +pre_bulk_insert = _signals.signal("pre_bulk_insert") +post_bulk_insert = _signals.signal("post_bulk_insert") diff --git a/requirements.txt b/requirements.txt index 9bb319a5..62ad8766 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +black nose pymongo>=3.4 six==1.10.0 diff --git a/setup.cfg b/setup.cfg index 84086601..4bded428 100644 --- a/setup.cfg +++ b/setup.cfg @@ -5,7 +5,7 @@ detailed-errors=1 cover-package=mongoengine [flake8] -ignore=E501,F401,F403,F405,I201,I202,W504, W605 +ignore=E501,F401,F403,F405,I201,I202,W504, W605, W503 exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests max-complexity=47 application-import-names=mongoengine,tests diff --git a/setup.py b/setup.py index f1f5dea7..c73a93ff 100644 --- a/setup.py +++ b/setup.py @@ -8,13 +8,10 @@ try: except ImportError: pass -DESCRIPTION = ( - 'MongoEngine is a Python Object-Document ' - 'Mapper for working with MongoDB.' -) +DESCRIPTION = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." try: - with open('README.rst') as fin: + with open("README.rst") as fin: LONG_DESCRIPTION = fin.read() except Exception: LONG_DESCRIPTION = None @@ -24,23 +21,23 @@ def get_version(version_tuple): """Return the version tuple as a string, e.g. for (0, 10, 7), return '0.10.7'. """ - return '.'.join(map(str, version_tuple)) + return ".".join(map(str, version_tuple)) # Dirty hack to get version number from monogengine/__init__.py - we can't # import it as it depends on PyMongo and PyMongo isn't installed until this # file is read -init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') -version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0] +init = os.path.join(os.path.dirname(__file__), "mongoengine", "__init__.py") +version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0] -VERSION = get_version(eval(version_line.split('=')[-1])) +VERSION = get_version(eval(version_line.split("=")[-1])) CLASSIFIERS = [ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: MIT License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", @@ -48,39 +45,40 @@ CLASSIFIERS = [ "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", - 'Topic :: Database', - 'Topic :: Software Development :: Libraries :: Python Modules', + "Topic :: Database", + "Topic :: Software Development :: Libraries :: Python Modules", ] extra_opts = { - 'packages': find_packages(exclude=['tests', 'tests.*']), - 'tests_require': ['nose', 'coverage==4.2', 'blinker', 'Pillow>=2.0.0'] + "packages": find_packages(exclude=["tests", "tests.*"]), + "tests_require": ["nose", "coverage==4.2", "blinker", "Pillow>=2.0.0"], } if sys.version_info[0] == 3: - extra_opts['use_2to3'] = True - if 'test' in sys.argv or 'nosetests' in sys.argv: - extra_opts['packages'] = find_packages() - extra_opts['package_data'] = { - 'tests': ['fields/mongoengine.png', 'fields/mongodb_leaf.png']} + extra_opts["use_2to3"] = True + if "test" in sys.argv or "nosetests" in sys.argv: + extra_opts["packages"] = find_packages() + extra_opts["package_data"] = { + "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"] + } else: - extra_opts['tests_require'] += ['python-dateutil'] + extra_opts["tests_require"] += ["python-dateutil"] setup( - name='mongoengine', + name="mongoengine", version=VERSION, - author='Harry Marr', - author_email='harry.marr@gmail.com', + author="Harry Marr", + author_email="harry.marr@gmail.com", maintainer="Stefan Wojcik", maintainer_email="wojcikstefan@gmail.com", - url='http://mongoengine.org/', - download_url='https://github.com/MongoEngine/mongoengine/tarball/master', - license='MIT', + url="http://mongoengine.org/", + download_url="https://github.com/MongoEngine/mongoengine/tarball/master", + license="MIT", include_package_data=True, description=DESCRIPTION, long_description=LONG_DESCRIPTION, - platforms=['any'], + platforms=["any"], classifiers=CLASSIFIERS, - install_requires=['pymongo>=3.4', 'six'], - test_suite='nose.collector', + install_requires=["pymongo>=3.4", "six"], + test_suite="nose.collector", **extra_opts ) diff --git a/tests/all_warnings/__init__.py b/tests/all_warnings/__init__.py index 3aebe4ba..a755e7a3 100644 --- a/tests/all_warnings/__init__.py +++ b/tests/all_warnings/__init__.py @@ -9,34 +9,32 @@ import warnings from mongoengine import * -__all__ = ('AllWarnings', ) +__all__ = ("AllWarnings",) class AllWarnings(unittest.TestCase): - def setUp(self): - connect(db='mongoenginetest') + connect(db="mongoenginetest") self.warning_list = [] self.showwarning_default = warnings.showwarning warnings.showwarning = self.append_to_warning_list def append_to_warning_list(self, message, category, *args): - self.warning_list.append({"message": message, - "category": category}) + self.warning_list.append({"message": message, "category": category}) def tearDown(self): # restore default handling of warnings warnings.showwarning = self.showwarning_default def test_document_collection_syntax_warning(self): - class NonAbstractBase(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class InheritedDocumentFailTest(NonAbstractBase): - meta = {'collection': 'fail'} + meta = {"collection": "fail"} warning = self.warning_list[0] self.assertEqual(SyntaxWarning, warning["category"]) - self.assertEqual('non_abstract_base', - InheritedDocumentFailTest._get_collection_name()) + self.assertEqual( + "non_abstract_base", InheritedDocumentFailTest._get_collection_name() + ) diff --git a/tests/document/__init__.py b/tests/document/__init__.py index dc35c969..f2230c48 100644 --- a/tests/document/__init__.py +++ b/tests/document/__init__.py @@ -9,5 +9,5 @@ from .instance import * from .json_serialisation import * from .validation import * -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/document/class_methods.py b/tests/document/class_methods.py index 4fc648b7..87f1215b 100644 --- a/tests/document/class_methods.py +++ b/tests/document/class_methods.py @@ -7,13 +7,12 @@ from mongoengine.pymongo_support import list_collection_names from mongoengine.queryset import NULLIFY, PULL from mongoengine.connection import get_db -__all__ = ("ClassMethodsTest", ) +__all__ = ("ClassMethodsTest",) class ClassMethodsTest(unittest.TestCase): - def setUp(self): - connect(db='mongoenginetest') + connect(db="mongoenginetest") self.db = get_db() class Person(Document): @@ -33,11 +32,13 @@ class ClassMethodsTest(unittest.TestCase): def test_definition(self): """Ensure that document may be defined using fields. """ - self.assertEqual(['_cls', 'age', 'id', 'name'], - sorted(self.Person._fields.keys())) - self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"], - sorted([x.__class__.__name__ for x in - self.Person._fields.values()])) + self.assertEqual( + ["_cls", "age", "id", "name"], sorted(self.Person._fields.keys()) + ) + self.assertEqual( + ["IntField", "ObjectIdField", "StringField", "StringField"], + sorted([x.__class__.__name__ for x in self.Person._fields.values()]), + ) def test_get_db(self): """Ensure that get_db returns the expected db. @@ -49,21 +50,21 @@ class ClassMethodsTest(unittest.TestCase): """Ensure that get_collection_name returns the expected collection name. """ - collection_name = 'person' + collection_name = "person" self.assertEqual(collection_name, self.Person._get_collection_name()) def test_get_collection(self): """Ensure that get_collection returns the expected collection. """ - collection_name = 'person' + collection_name = "person" collection = self.Person._get_collection() self.assertEqual(self.db[collection_name], collection) def test_drop_collection(self): """Ensure that the collection may be dropped from the database. """ - collection_name = 'person' - self.Person(name='Test').save() + collection_name = "person" + self.Person(name="Test").save() self.assertIn(collection_name, list_collection_names(self.db)) self.Person.drop_collection() @@ -73,14 +74,16 @@ class ClassMethodsTest(unittest.TestCase): """Ensure that register delete rule adds a delete rule to the document meta. """ + class Job(Document): employee = ReferenceField(self.Person) - self.assertEqual(self.Person._meta.get('delete_rules'), None) + self.assertEqual(self.Person._meta.get("delete_rules"), None) - self.Person.register_delete_rule(Job, 'employee', NULLIFY) - self.assertEqual(self.Person._meta['delete_rules'], - {(Job, 'employee'): NULLIFY}) + self.Person.register_delete_rule(Job, "employee", NULLIFY) + self.assertEqual( + self.Person._meta["delete_rules"], {(Job, "employee"): NULLIFY} + ) def test_compare_indexes(self): """ Ensure that the indexes are properly created and that @@ -93,23 +96,27 @@ class ClassMethodsTest(unittest.TestCase): description = StringField() tags = StringField() - meta = { - 'indexes': [('author', 'title')] - } + meta = {"indexes": [("author", "title")]} BlogPost.drop_collection() BlogPost.ensure_indexes() - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) + self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []}) - BlogPost.ensure_index(['author', 'description']) - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('author', 1), ('description', 1)]]}) + BlogPost.ensure_index(["author", "description"]) + self.assertEqual( + BlogPost.compare_indexes(), + {"missing": [], "extra": [[("author", 1), ("description", 1)]]}, + ) - BlogPost._get_collection().drop_index('author_1_description_1') - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) + BlogPost._get_collection().drop_index("author_1_description_1") + self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []}) - BlogPost._get_collection().drop_index('author_1_title_1') - self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('author', 1), ('title', 1)]], 'extra': []}) + BlogPost._get_collection().drop_index("author_1_title_1") + self.assertEqual( + BlogPost.compare_indexes(), + {"missing": [[("author", 1), ("title", 1)]], "extra": []}, + ) def test_compare_indexes_inheritance(self): """ Ensure that the indexes are properly created and that @@ -122,32 +129,34 @@ class ClassMethodsTest(unittest.TestCase): title = StringField() description = StringField() - meta = { - 'allow_inheritance': True - } + meta = {"allow_inheritance": True} class BlogPostWithTags(BlogPost): tags = StringField() tag_list = ListField(StringField()) - meta = { - 'indexes': [('author', 'tags')] - } + meta = {"indexes": [("author", "tags")]} BlogPost.drop_collection() BlogPost.ensure_indexes() BlogPostWithTags.ensure_indexes() - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) + self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []}) - BlogPostWithTags.ensure_index(['author', 'tag_list']) - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]]}) + BlogPostWithTags.ensure_index(["author", "tag_list"]) + self.assertEqual( + BlogPost.compare_indexes(), + {"missing": [], "extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]]}, + ) - BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1') - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) + BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tag_list_1") + self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []}) - BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1') - self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': []}) + BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tags_1") + self.assertEqual( + BlogPost.compare_indexes(), + {"missing": [[("_cls", 1), ("author", 1), ("tags", 1)]], "extra": []}, + ) def test_compare_indexes_multiple_subclasses(self): """ Ensure that compare_indexes behaves correctly if called from a @@ -159,32 +168,30 @@ class ClassMethodsTest(unittest.TestCase): title = StringField() description = StringField() - meta = { - 'allow_inheritance': True - } + meta = {"allow_inheritance": True} class BlogPostWithTags(BlogPost): tags = StringField() tag_list = ListField(StringField()) - meta = { - 'indexes': [('author', 'tags')] - } + meta = {"indexes": [("author", "tags")]} class BlogPostWithCustomField(BlogPost): custom = DictField() - meta = { - 'indexes': [('author', 'custom')] - } + meta = {"indexes": [("author", "custom")]} BlogPost.ensure_indexes() BlogPostWithTags.ensure_indexes() BlogPostWithCustomField.ensure_indexes() - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) - self.assertEqual(BlogPostWithTags.compare_indexes(), {'missing': [], 'extra': []}) - self.assertEqual(BlogPostWithCustomField.compare_indexes(), {'missing': [], 'extra': []}) + self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []}) + self.assertEqual( + BlogPostWithTags.compare_indexes(), {"missing": [], "extra": []} + ) + self.assertEqual( + BlogPostWithCustomField.compare_indexes(), {"missing": [], "extra": []} + ) def test_compare_indexes_for_text_indexes(self): """ Ensure that compare_indexes behaves correctly for text indexes """ @@ -192,17 +199,20 @@ class ClassMethodsTest(unittest.TestCase): class Doc(Document): a = StringField() b = StringField() - meta = {'indexes': [ - {'fields': ['$a', "$b"], - 'default_language': 'english', - 'weights': {'a': 10, 'b': 2} - } - ]} + meta = { + "indexes": [ + { + "fields": ["$a", "$b"], + "default_language": "english", + "weights": {"a": 10, "b": 2}, + } + ] + } Doc.drop_collection() Doc.ensure_indexes() actual = Doc.compare_indexes() - expected = {'missing': [], 'extra': []} + expected = {"missing": [], "extra": []} self.assertEqual(actual, expected) def test_list_indexes_inheritance(self): @@ -215,23 +225,17 @@ class ClassMethodsTest(unittest.TestCase): title = StringField() description = StringField() - meta = { - 'allow_inheritance': True - } + meta = {"allow_inheritance": True} class BlogPostWithTags(BlogPost): tags = StringField() - meta = { - 'indexes': [('author', 'tags')] - } + meta = {"indexes": [("author", "tags")]} class BlogPostWithTagsAndExtraText(BlogPostWithTags): extra_text = StringField() - meta = { - 'indexes': [('author', 'tags', 'extra_text')] - } + meta = {"indexes": [("author", "tags", "extra_text")]} BlogPost.drop_collection() @@ -239,17 +243,21 @@ class ClassMethodsTest(unittest.TestCase): BlogPostWithTags.ensure_indexes() BlogPostWithTagsAndExtraText.ensure_indexes() - self.assertEqual(BlogPost.list_indexes(), - BlogPostWithTags.list_indexes()) - self.assertEqual(BlogPost.list_indexes(), - BlogPostWithTagsAndExtraText.list_indexes()) - self.assertEqual(BlogPost.list_indexes(), - [[('_cls', 1), ('author', 1), ('tags', 1)], - [('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)], - [(u'_id', 1)], [('_cls', 1)]]) + self.assertEqual(BlogPost.list_indexes(), BlogPostWithTags.list_indexes()) + self.assertEqual( + BlogPost.list_indexes(), BlogPostWithTagsAndExtraText.list_indexes() + ) + self.assertEqual( + BlogPost.list_indexes(), + [ + [("_cls", 1), ("author", 1), ("tags", 1)], + [("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)], + [(u"_id", 1)], + [("_cls", 1)], + ], + ) def test_register_delete_rule_inherited(self): - class Vaccine(Document): name = StringField(required=True) @@ -257,15 +265,17 @@ class ClassMethodsTest(unittest.TestCase): class Animal(Document): family = StringField(required=True) - vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL)) + vaccine_made = ListField( + ReferenceField("Vaccine", reverse_delete_rule=PULL) + ) meta = {"allow_inheritance": True, "indexes": ["family"]} class Cat(Animal): name = StringField(required=True) - self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL) - self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL) + self.assertEqual(Vaccine._meta["delete_rules"][(Animal, "vaccine_made")], PULL) + self.assertEqual(Vaccine._meta["delete_rules"][(Cat, "vaccine_made")], PULL) def test_collection_naming(self): """Ensure that a collection with a specified name may be used. @@ -273,74 +283,73 @@ class ClassMethodsTest(unittest.TestCase): class DefaultNamingTest(Document): pass - self.assertEqual('default_naming_test', - DefaultNamingTest._get_collection_name()) + + self.assertEqual( + "default_naming_test", DefaultNamingTest._get_collection_name() + ) class CustomNamingTest(Document): - meta = {'collection': 'pimp_my_collection'} + meta = {"collection": "pimp_my_collection"} - self.assertEqual('pimp_my_collection', - CustomNamingTest._get_collection_name()) + self.assertEqual("pimp_my_collection", CustomNamingTest._get_collection_name()) class DynamicNamingTest(Document): - meta = {'collection': lambda c: "DYNAMO"} - self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name()) + meta = {"collection": lambda c: "DYNAMO"} + + self.assertEqual("DYNAMO", DynamicNamingTest._get_collection_name()) # Use Abstract class to handle backwards compatibility class BaseDocument(Document): - meta = { - 'abstract': True, - 'collection': lambda c: c.__name__.lower() - } + meta = {"abstract": True, "collection": lambda c: c.__name__.lower()} class OldNamingConvention(BaseDocument): pass - self.assertEqual('oldnamingconvention', - OldNamingConvention._get_collection_name()) + + self.assertEqual( + "oldnamingconvention", OldNamingConvention._get_collection_name() + ) class InheritedAbstractNamingTest(BaseDocument): - meta = {'collection': 'wibble'} - self.assertEqual('wibble', - InheritedAbstractNamingTest._get_collection_name()) + meta = {"collection": "wibble"} + + self.assertEqual("wibble", InheritedAbstractNamingTest._get_collection_name()) # Mixin tests class BaseMixin(object): - meta = { - 'collection': lambda c: c.__name__.lower() - } + meta = {"collection": lambda c: c.__name__.lower()} class OldMixinNamingConvention(Document, BaseMixin): pass - self.assertEqual('oldmixinnamingconvention', - OldMixinNamingConvention._get_collection_name()) + + self.assertEqual( + "oldmixinnamingconvention", OldMixinNamingConvention._get_collection_name() + ) class BaseMixin(object): - meta = { - 'collection': lambda c: c.__name__.lower() - } + meta = {"collection": lambda c: c.__name__.lower()} class BaseDocument(Document, BaseMixin): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class MyDocument(BaseDocument): pass - self.assertEqual('basedocument', MyDocument._get_collection_name()) + self.assertEqual("basedocument", MyDocument._get_collection_name()) def test_custom_collection_name_operations(self): """Ensure that a collection with a specified name is used as expected. """ - collection_name = 'personCollTest' + collection_name = "personCollTest" class Person(Document): name = StringField() - meta = {'collection': collection_name} + meta = {"collection": collection_name} Person(name="Test User").save() self.assertIn(collection_name, list_collection_names(self.db)) user_obj = self.db[collection_name].find_one() - self.assertEqual(user_obj['name'], "Test User") + self.assertEqual(user_obj["name"], "Test User") user_obj = Person.objects[0] self.assertEqual(user_obj.name, "Test User") @@ -354,7 +363,7 @@ class ClassMethodsTest(unittest.TestCase): class Person(Document): name = StringField(primary_key=True) - meta = {'collection': 'app'} + meta = {"collection": "app"} Person(name="Test User").save() @@ -364,5 +373,5 @@ class ClassMethodsTest(unittest.TestCase): Person.drop_collection() -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/document/delta.py b/tests/document/delta.py index 504c1707..8f1575e6 100644 --- a/tests/document/delta.py +++ b/tests/document/delta.py @@ -8,7 +8,6 @@ from tests.utils import MongoDBTestCase class DeltaTest(MongoDBTestCase): - def setUp(self): super(DeltaTest, self).setUp() @@ -31,7 +30,6 @@ class DeltaTest(MongoDBTestCase): self.delta(DynamicDocument) def delta(self, DocClass): - class Doc(DocClass): string_field = StringField() int_field = IntField() @@ -46,37 +44,37 @@ class DeltaTest(MongoDBTestCase): self.assertEqual(doc._get_changed_fields(), []) self.assertEqual(doc._delta(), ({}, {})) - doc.string_field = 'hello' - self.assertEqual(doc._get_changed_fields(), ['string_field']) - self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) + doc.string_field = "hello" + self.assertEqual(doc._get_changed_fields(), ["string_field"]) + self.assertEqual(doc._delta(), ({"string_field": "hello"}, {})) doc._changed_fields = [] doc.int_field = 1 - self.assertEqual(doc._get_changed_fields(), ['int_field']) - self.assertEqual(doc._delta(), ({'int_field': 1}, {})) + self.assertEqual(doc._get_changed_fields(), ["int_field"]) + self.assertEqual(doc._delta(), ({"int_field": 1}, {})) doc._changed_fields = [] - dict_value = {'hello': 'world', 'ping': 'pong'} + dict_value = {"hello": "world", "ping": "pong"} doc.dict_field = dict_value - self.assertEqual(doc._get_changed_fields(), ['dict_field']) - self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) + self.assertEqual(doc._get_changed_fields(), ["dict_field"]) + self.assertEqual(doc._delta(), ({"dict_field": dict_value}, {})) doc._changed_fields = [] - list_value = ['1', 2, {'hello': 'world'}] + list_value = ["1", 2, {"hello": "world"}] doc.list_field = list_value - self.assertEqual(doc._get_changed_fields(), ['list_field']) - self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) + self.assertEqual(doc._get_changed_fields(), ["list_field"]) + self.assertEqual(doc._delta(), ({"list_field": list_value}, {})) # Test unsetting doc._changed_fields = [] doc.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ['dict_field']) - self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) + self.assertEqual(doc._get_changed_fields(), ["dict_field"]) + self.assertEqual(doc._delta(), ({}, {"dict_field": 1})) doc._changed_fields = [] doc.list_field = [] - self.assertEqual(doc._get_changed_fields(), ['list_field']) - self.assertEqual(doc._delta(), ({}, {'list_field': 1})) + self.assertEqual(doc._get_changed_fields(), ["list_field"]) + self.assertEqual(doc._delta(), ({}, {"list_field": 1})) def test_delta_recursive(self): self.delta_recursive(Document, EmbeddedDocument) @@ -85,7 +83,6 @@ class DeltaTest(MongoDBTestCase): self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument) def delta_recursive(self, DocClass, EmbeddedClass): - class Embedded(EmbeddedClass): id = StringField() string_field = StringField() @@ -110,165 +107,207 @@ class DeltaTest(MongoDBTestCase): embedded_1 = Embedded() embedded_1.id = "010101" - embedded_1.string_field = 'hello' + embedded_1.string_field = "hello" embedded_1.int_field = 1 - embedded_1.dict_field = {'hello': 'world'} - embedded_1.list_field = ['1', 2, {'hello': 'world'}] + embedded_1.dict_field = {"hello": "world"} + embedded_1.list_field = ["1", 2, {"hello": "world"}] doc.embedded_field = embedded_1 - self.assertEqual(doc._get_changed_fields(), ['embedded_field']) + self.assertEqual(doc._get_changed_fields(), ["embedded_field"]) embedded_delta = { - 'id': "010101", - 'string_field': 'hello', - 'int_field': 1, - 'dict_field': {'hello': 'world'}, - 'list_field': ['1', 2, {'hello': 'world'}] + "id": "010101", + "string_field": "hello", + "int_field": 1, + "dict_field": {"hello": "world"}, + "list_field": ["1", 2, {"hello": "world"}], } self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) - self.assertEqual(doc._delta(), - ({'embedded_field': embedded_delta}, {})) + self.assertEqual(doc._delta(), ({"embedded_field": embedded_delta}, {})) doc.save() doc = doc.reload(10) doc.embedded_field.dict_field = {} - self.assertEqual(doc._get_changed_fields(), - ['embedded_field.dict_field']) - self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1})) - self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1})) + self.assertEqual(doc._get_changed_fields(), ["embedded_field.dict_field"]) + self.assertEqual(doc.embedded_field._delta(), ({}, {"dict_field": 1})) + self.assertEqual(doc._delta(), ({}, {"embedded_field.dict_field": 1})) doc.save() doc = doc.reload(10) self.assertEqual(doc.embedded_field.dict_field, {}) doc.embedded_field.list_field = [] - self.assertEqual(doc._get_changed_fields(), - ['embedded_field.list_field']) - self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1})) - self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1})) + self.assertEqual(doc._get_changed_fields(), ["embedded_field.list_field"]) + self.assertEqual(doc.embedded_field._delta(), ({}, {"list_field": 1})) + self.assertEqual(doc._delta(), ({}, {"embedded_field.list_field": 1})) doc.save() doc = doc.reload(10) self.assertEqual(doc.embedded_field.list_field, []) embedded_2 = Embedded() - embedded_2.string_field = 'hello' + embedded_2.string_field = "hello" embedded_2.int_field = 1 - embedded_2.dict_field = {'hello': 'world'} - embedded_2.list_field = ['1', 2, {'hello': 'world'}] + embedded_2.dict_field = {"hello": "world"} + embedded_2.list_field = ["1", 2, {"hello": "world"}] - doc.embedded_field.list_field = ['1', 2, embedded_2] - self.assertEqual(doc._get_changed_fields(), - ['embedded_field.list_field']) + doc.embedded_field.list_field = ["1", 2, embedded_2] + self.assertEqual(doc._get_changed_fields(), ["embedded_field.list_field"]) - self.assertEqual(doc.embedded_field._delta(), ({ - 'list_field': ['1', 2, { - '_cls': 'Embedded', - 'string_field': 'hello', - 'dict_field': {'hello': 'world'}, - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) + self.assertEqual( + doc.embedded_field._delta(), + ( + { + "list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "string_field": "hello", + "dict_field": {"hello": "world"}, + "int_field": 1, + "list_field": ["1", 2, {"hello": "world"}], + }, + ] + }, + {}, + ), + ) - self.assertEqual(doc._delta(), ({ - 'embedded_field.list_field': ['1', 2, { - '_cls': 'Embedded', - 'string_field': 'hello', - 'dict_field': {'hello': 'world'}, - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) + self.assertEqual( + doc._delta(), + ( + { + "embedded_field.list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "string_field": "hello", + "dict_field": {"hello": "world"}, + "int_field": 1, + "list_field": ["1", 2, {"hello": "world"}], + }, + ] + }, + {}, + ), + ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[0], '1') + self.assertEqual(doc.embedded_field.list_field[0], "1") self.assertEqual(doc.embedded_field.list_field[1], 2) for k in doc.embedded_field.list_field[2]._fields: - self.assertEqual(doc.embedded_field.list_field[2][k], - embedded_2[k]) + self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k]) - doc.embedded_field.list_field[2].string_field = 'world' - self.assertEqual(doc._get_changed_fields(), - ['embedded_field.list_field.2.string_field']) - self.assertEqual(doc.embedded_field._delta(), - ({'list_field.2.string_field': 'world'}, {})) - self.assertEqual(doc._delta(), - ({'embedded_field.list_field.2.string_field': 'world'}, {})) + doc.embedded_field.list_field[2].string_field = "world" + self.assertEqual( + doc._get_changed_fields(), ["embedded_field.list_field.2.string_field"] + ) + self.assertEqual( + doc.embedded_field._delta(), ({"list_field.2.string_field": "world"}, {}) + ) + self.assertEqual( + doc._delta(), ({"embedded_field.list_field.2.string_field": "world"}, {}) + ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, - 'world') + self.assertEqual(doc.embedded_field.list_field[2].string_field, "world") # Test multiple assignments - doc.embedded_field.list_field[2].string_field = 'hello world' + doc.embedded_field.list_field[2].string_field = "hello world" doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] - self.assertEqual(doc._get_changed_fields(), - ['embedded_field.list_field.2']) - self.assertEqual(doc.embedded_field._delta(), ({'list_field.2': { - '_cls': 'Embedded', - 'string_field': 'hello world', - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - 'dict_field': {'hello': 'world'}} - }, {})) - self.assertEqual(doc._delta(), ({'embedded_field.list_field.2': { - '_cls': 'Embedded', - 'string_field': 'hello world', - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - 'dict_field': {'hello': 'world'}} - }, {})) + self.assertEqual(doc._get_changed_fields(), ["embedded_field.list_field.2"]) + self.assertEqual( + doc.embedded_field._delta(), + ( + { + "list_field.2": { + "_cls": "Embedded", + "string_field": "hello world", + "int_field": 1, + "list_field": ["1", 2, {"hello": "world"}], + "dict_field": {"hello": "world"}, + } + }, + {}, + ), + ) + self.assertEqual( + doc._delta(), + ( + { + "embedded_field.list_field.2": { + "_cls": "Embedded", + "string_field": "hello world", + "int_field": 1, + "list_field": ["1", 2, {"hello": "world"}], + "dict_field": {"hello": "world"}, + } + }, + {}, + ), + ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, - 'hello world') + self.assertEqual(doc.embedded_field.list_field[2].string_field, "hello world") # Test list native methods doc.embedded_field.list_field[2].list_field.pop(0) - self.assertEqual(doc._delta(), - ({'embedded_field.list_field.2.list_field': - [2, {'hello': 'world'}]}, {})) + self.assertEqual( + doc._delta(), + ({"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]}, {}), + ) doc.save() doc = doc.reload(10) doc.embedded_field.list_field[2].list_field.append(1) - self.assertEqual(doc._delta(), - ({'embedded_field.list_field.2.list_field': - [2, {'hello': 'world'}, 1]}, {})) + self.assertEqual( + doc._delta(), + ( + {"embedded_field.list_field.2.list_field": [2, {"hello": "world"}, 1]}, + {}, + ), + ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].list_field, - [2, {'hello': 'world'}, 1]) + self.assertEqual( + doc.embedded_field.list_field[2].list_field, [2, {"hello": "world"}, 1] + ) doc.embedded_field.list_field[2].list_field.sort(key=str) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].list_field, - [1, 2, {'hello': 'world'}]) + self.assertEqual( + doc.embedded_field.list_field[2].list_field, [1, 2, {"hello": "world"}] + ) - del doc.embedded_field.list_field[2].list_field[2]['hello'] - self.assertEqual(doc._delta(), - ({}, {'embedded_field.list_field.2.list_field.2.hello': 1})) + del doc.embedded_field.list_field[2].list_field[2]["hello"] + self.assertEqual( + doc._delta(), ({}, {"embedded_field.list_field.2.list_field.2.hello": 1}) + ) doc.save() doc = doc.reload(10) del doc.embedded_field.list_field[2].list_field - self.assertEqual(doc._delta(), - ({}, {'embedded_field.list_field.2.list_field': 1})) + self.assertEqual( + doc._delta(), ({}, {"embedded_field.list_field.2.list_field": 1}) + ) doc.save() doc = doc.reload(10) - doc.dict_field['Embedded'] = embedded_1 + doc.dict_field["Embedded"] = embedded_1 doc.save() doc = doc.reload(10) - doc.dict_field['Embedded'].string_field = 'Hello World' - self.assertEqual(doc._get_changed_fields(), - ['dict_field.Embedded.string_field']) - self.assertEqual(doc._delta(), - ({'dict_field.Embedded.string_field': 'Hello World'}, {})) + doc.dict_field["Embedded"].string_field = "Hello World" + self.assertEqual( + doc._get_changed_fields(), ["dict_field.Embedded.string_field"] + ) + self.assertEqual( + doc._delta(), ({"dict_field.Embedded.string_field": "Hello World"}, {}) + ) def test_circular_reference_deltas(self): self.circular_reference_deltas(Document, Document) @@ -277,14 +316,13 @@ class DeltaTest(MongoDBTestCase): self.circular_reference_deltas(DynamicDocument, DynamicDocument) def circular_reference_deltas(self, DocClass1, DocClass2): - class Person(DocClass1): name = StringField() - owns = ListField(ReferenceField('Organization')) + owns = ListField(ReferenceField("Organization")) class Organization(DocClass2): name = StringField() - owner = ReferenceField('Person') + owner = ReferenceField("Person") Person.drop_collection() Organization.drop_collection() @@ -310,16 +348,15 @@ class DeltaTest(MongoDBTestCase): self.circular_reference_deltas_2(DynamicDocument, DynamicDocument) def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True): - class Person(DocClass1): name = StringField() - owns = ListField(ReferenceField('Organization', dbref=dbref)) - employer = ReferenceField('Organization', dbref=dbref) + owns = ListField(ReferenceField("Organization", dbref=dbref)) + employer = ReferenceField("Organization", dbref=dbref) class Organization(DocClass2): name = StringField() - owner = ReferenceField('Person', dbref=dbref) - employees = ListField(ReferenceField('Person', dbref=dbref)) + owner = ReferenceField("Person", dbref=dbref) + employees = ListField(ReferenceField("Person", dbref=dbref)) Person.drop_collection() Organization.drop_collection() @@ -353,12 +390,11 @@ class DeltaTest(MongoDBTestCase): self.delta_db_field(DynamicDocument) def delta_db_field(self, DocClass): - class Doc(DocClass): - string_field = StringField(db_field='db_string_field') - int_field = IntField(db_field='db_int_field') - dict_field = DictField(db_field='db_dict_field') - list_field = ListField(db_field='db_list_field') + string_field = StringField(db_field="db_string_field") + int_field = IntField(db_field="db_int_field") + dict_field = DictField(db_field="db_dict_field") + list_field = ListField(db_field="db_list_field") Doc.drop_collection() doc = Doc() @@ -368,53 +404,53 @@ class DeltaTest(MongoDBTestCase): self.assertEqual(doc._get_changed_fields(), []) self.assertEqual(doc._delta(), ({}, {})) - doc.string_field = 'hello' - self.assertEqual(doc._get_changed_fields(), ['db_string_field']) - self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {})) + doc.string_field = "hello" + self.assertEqual(doc._get_changed_fields(), ["db_string_field"]) + self.assertEqual(doc._delta(), ({"db_string_field": "hello"}, {})) doc._changed_fields = [] doc.int_field = 1 - self.assertEqual(doc._get_changed_fields(), ['db_int_field']) - self.assertEqual(doc._delta(), ({'db_int_field': 1}, {})) + self.assertEqual(doc._get_changed_fields(), ["db_int_field"]) + self.assertEqual(doc._delta(), ({"db_int_field": 1}, {})) doc._changed_fields = [] - dict_value = {'hello': 'world', 'ping': 'pong'} + dict_value = {"hello": "world", "ping": "pong"} doc.dict_field = dict_value - self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) - self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {})) + self.assertEqual(doc._get_changed_fields(), ["db_dict_field"]) + self.assertEqual(doc._delta(), ({"db_dict_field": dict_value}, {})) doc._changed_fields = [] - list_value = ['1', 2, {'hello': 'world'}] + list_value = ["1", 2, {"hello": "world"}] doc.list_field = list_value - self.assertEqual(doc._get_changed_fields(), ['db_list_field']) - self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {})) + self.assertEqual(doc._get_changed_fields(), ["db_list_field"]) + self.assertEqual(doc._delta(), ({"db_list_field": list_value}, {})) # Test unsetting doc._changed_fields = [] doc.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) - self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1})) + self.assertEqual(doc._get_changed_fields(), ["db_dict_field"]) + self.assertEqual(doc._delta(), ({}, {"db_dict_field": 1})) doc._changed_fields = [] doc.list_field = [] - self.assertEqual(doc._get_changed_fields(), ['db_list_field']) - self.assertEqual(doc._delta(), ({}, {'db_list_field': 1})) + self.assertEqual(doc._get_changed_fields(), ["db_list_field"]) + self.assertEqual(doc._delta(), ({}, {"db_list_field": 1})) # Test it saves that data doc = Doc() doc.save() - doc.string_field = 'hello' + doc.string_field = "hello" doc.int_field = 1 - doc.dict_field = {'hello': 'world'} - doc.list_field = ['1', 2, {'hello': 'world'}] + doc.dict_field = {"hello": "world"} + doc.list_field = ["1", 2, {"hello": "world"}] doc.save() doc = doc.reload(10) - self.assertEqual(doc.string_field, 'hello') + self.assertEqual(doc.string_field, "hello") self.assertEqual(doc.int_field, 1) - self.assertEqual(doc.dict_field, {'hello': 'world'}) - self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}]) + self.assertEqual(doc.dict_field, {"hello": "world"}) + self.assertEqual(doc.list_field, ["1", 2, {"hello": "world"}]) def test_delta_recursive_db_field(self): self.delta_recursive_db_field(Document, EmbeddedDocument) @@ -423,20 +459,20 @@ class DeltaTest(MongoDBTestCase): self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument) def delta_recursive_db_field(self, DocClass, EmbeddedClass): - class Embedded(EmbeddedClass): - string_field = StringField(db_field='db_string_field') - int_field = IntField(db_field='db_int_field') - dict_field = DictField(db_field='db_dict_field') - list_field = ListField(db_field='db_list_field') + string_field = StringField(db_field="db_string_field") + int_field = IntField(db_field="db_int_field") + dict_field = DictField(db_field="db_dict_field") + list_field = ListField(db_field="db_list_field") class Doc(DocClass): - string_field = StringField(db_field='db_string_field') - int_field = IntField(db_field='db_int_field') - dict_field = DictField(db_field='db_dict_field') - list_field = ListField(db_field='db_list_field') - embedded_field = EmbeddedDocumentField(Embedded, - db_field='db_embedded_field') + string_field = StringField(db_field="db_string_field") + int_field = IntField(db_field="db_int_field") + dict_field = DictField(db_field="db_dict_field") + list_field = ListField(db_field="db_list_field") + embedded_field = EmbeddedDocumentField( + Embedded, db_field="db_embedded_field" + ) Doc.drop_collection() doc = Doc() @@ -447,171 +483,228 @@ class DeltaTest(MongoDBTestCase): self.assertEqual(doc._delta(), ({}, {})) embedded_1 = Embedded() - embedded_1.string_field = 'hello' + embedded_1.string_field = "hello" embedded_1.int_field = 1 - embedded_1.dict_field = {'hello': 'world'} - embedded_1.list_field = ['1', 2, {'hello': 'world'}] + embedded_1.dict_field = {"hello": "world"} + embedded_1.list_field = ["1", 2, {"hello": "world"}] doc.embedded_field = embedded_1 - self.assertEqual(doc._get_changed_fields(), ['db_embedded_field']) + self.assertEqual(doc._get_changed_fields(), ["db_embedded_field"]) embedded_delta = { - 'db_string_field': 'hello', - 'db_int_field': 1, - 'db_dict_field': {'hello': 'world'}, - 'db_list_field': ['1', 2, {'hello': 'world'}] + "db_string_field": "hello", + "db_int_field": 1, + "db_dict_field": {"hello": "world"}, + "db_list_field": ["1", 2, {"hello": "world"}], } self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) - self.assertEqual(doc._delta(), - ({'db_embedded_field': embedded_delta}, {})) + self.assertEqual(doc._delta(), ({"db_embedded_field": embedded_delta}, {})) doc.save() doc = doc.reload(10) doc.embedded_field.dict_field = {} - self.assertEqual(doc._get_changed_fields(), - ['db_embedded_field.db_dict_field']) - self.assertEqual(doc.embedded_field._delta(), - ({}, {'db_dict_field': 1})) - self.assertEqual(doc._delta(), - ({}, {'db_embedded_field.db_dict_field': 1})) + self.assertEqual(doc._get_changed_fields(), ["db_embedded_field.db_dict_field"]) + self.assertEqual(doc.embedded_field._delta(), ({}, {"db_dict_field": 1})) + self.assertEqual(doc._delta(), ({}, {"db_embedded_field.db_dict_field": 1})) doc.save() doc = doc.reload(10) self.assertEqual(doc.embedded_field.dict_field, {}) doc.embedded_field.list_field = [] - self.assertEqual(doc._get_changed_fields(), - ['db_embedded_field.db_list_field']) - self.assertEqual(doc.embedded_field._delta(), - ({}, {'db_list_field': 1})) - self.assertEqual(doc._delta(), - ({}, {'db_embedded_field.db_list_field': 1})) + self.assertEqual(doc._get_changed_fields(), ["db_embedded_field.db_list_field"]) + self.assertEqual(doc.embedded_field._delta(), ({}, {"db_list_field": 1})) + self.assertEqual(doc._delta(), ({}, {"db_embedded_field.db_list_field": 1})) doc.save() doc = doc.reload(10) self.assertEqual(doc.embedded_field.list_field, []) embedded_2 = Embedded() - embedded_2.string_field = 'hello' + embedded_2.string_field = "hello" embedded_2.int_field = 1 - embedded_2.dict_field = {'hello': 'world'} - embedded_2.list_field = ['1', 2, {'hello': 'world'}] + embedded_2.dict_field = {"hello": "world"} + embedded_2.list_field = ["1", 2, {"hello": "world"}] - doc.embedded_field.list_field = ['1', 2, embedded_2] - self.assertEqual(doc._get_changed_fields(), - ['db_embedded_field.db_list_field']) - self.assertEqual(doc.embedded_field._delta(), ({ - 'db_list_field': ['1', 2, { - '_cls': 'Embedded', - 'db_string_field': 'hello', - 'db_dict_field': {'hello': 'world'}, - 'db_int_field': 1, - 'db_list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) + doc.embedded_field.list_field = ["1", 2, embedded_2] + self.assertEqual(doc._get_changed_fields(), ["db_embedded_field.db_list_field"]) + self.assertEqual( + doc.embedded_field._delta(), + ( + { + "db_list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "db_string_field": "hello", + "db_dict_field": {"hello": "world"}, + "db_int_field": 1, + "db_list_field": ["1", 2, {"hello": "world"}], + }, + ] + }, + {}, + ), + ) - self.assertEqual(doc._delta(), ({ - 'db_embedded_field.db_list_field': ['1', 2, { - '_cls': 'Embedded', - 'db_string_field': 'hello', - 'db_dict_field': {'hello': 'world'}, - 'db_int_field': 1, - 'db_list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) + self.assertEqual( + doc._delta(), + ( + { + "db_embedded_field.db_list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "db_string_field": "hello", + "db_dict_field": {"hello": "world"}, + "db_int_field": 1, + "db_list_field": ["1", 2, {"hello": "world"}], + }, + ] + }, + {}, + ), + ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[0], '1') + self.assertEqual(doc.embedded_field.list_field[0], "1") self.assertEqual(doc.embedded_field.list_field[1], 2) for k in doc.embedded_field.list_field[2]._fields: - self.assertEqual(doc.embedded_field.list_field[2][k], - embedded_2[k]) + self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k]) - doc.embedded_field.list_field[2].string_field = 'world' - self.assertEqual(doc._get_changed_fields(), - ['db_embedded_field.db_list_field.2.db_string_field']) - self.assertEqual(doc.embedded_field._delta(), - ({'db_list_field.2.db_string_field': 'world'}, {})) - self.assertEqual(doc._delta(), - ({'db_embedded_field.db_list_field.2.db_string_field': 'world'}, - {})) + doc.embedded_field.list_field[2].string_field = "world" + self.assertEqual( + doc._get_changed_fields(), + ["db_embedded_field.db_list_field.2.db_string_field"], + ) + self.assertEqual( + doc.embedded_field._delta(), + ({"db_list_field.2.db_string_field": "world"}, {}), + ) + self.assertEqual( + doc._delta(), + ({"db_embedded_field.db_list_field.2.db_string_field": "world"}, {}), + ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, - 'world') + self.assertEqual(doc.embedded_field.list_field[2].string_field, "world") # Test multiple assignments - doc.embedded_field.list_field[2].string_field = 'hello world' + doc.embedded_field.list_field[2].string_field = "hello world" doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] - self.assertEqual(doc._get_changed_fields(), - ['db_embedded_field.db_list_field.2']) - self.assertEqual(doc.embedded_field._delta(), ({'db_list_field.2': { - '_cls': 'Embedded', - 'db_string_field': 'hello world', - 'db_int_field': 1, - 'db_list_field': ['1', 2, {'hello': 'world'}], - 'db_dict_field': {'hello': 'world'}}}, {})) - self.assertEqual(doc._delta(), ({ - 'db_embedded_field.db_list_field.2': { - '_cls': 'Embedded', - 'db_string_field': 'hello world', - 'db_int_field': 1, - 'db_list_field': ['1', 2, {'hello': 'world'}], - 'db_dict_field': {'hello': 'world'}} - }, {})) + self.assertEqual( + doc._get_changed_fields(), ["db_embedded_field.db_list_field.2"] + ) + self.assertEqual( + doc.embedded_field._delta(), + ( + { + "db_list_field.2": { + "_cls": "Embedded", + "db_string_field": "hello world", + "db_int_field": 1, + "db_list_field": ["1", 2, {"hello": "world"}], + "db_dict_field": {"hello": "world"}, + } + }, + {}, + ), + ) + self.assertEqual( + doc._delta(), + ( + { + "db_embedded_field.db_list_field.2": { + "_cls": "Embedded", + "db_string_field": "hello world", + "db_int_field": 1, + "db_list_field": ["1", 2, {"hello": "world"}], + "db_dict_field": {"hello": "world"}, + } + }, + {}, + ), + ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, - 'hello world') + self.assertEqual(doc.embedded_field.list_field[2].string_field, "hello world") # Test list native methods doc.embedded_field.list_field[2].list_field.pop(0) - self.assertEqual(doc._delta(), - ({'db_embedded_field.db_list_field.2.db_list_field': - [2, {'hello': 'world'}]}, {})) + self.assertEqual( + doc._delta(), + ( + { + "db_embedded_field.db_list_field.2.db_list_field": [ + 2, + {"hello": "world"}, + ] + }, + {}, + ), + ) doc.save() doc = doc.reload(10) doc.embedded_field.list_field[2].list_field.append(1) - self.assertEqual(doc._delta(), - ({'db_embedded_field.db_list_field.2.db_list_field': - [2, {'hello': 'world'}, 1]}, {})) + self.assertEqual( + doc._delta(), + ( + { + "db_embedded_field.db_list_field.2.db_list_field": [ + 2, + {"hello": "world"}, + 1, + ] + }, + {}, + ), + ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].list_field, - [2, {'hello': 'world'}, 1]) + self.assertEqual( + doc.embedded_field.list_field[2].list_field, [2, {"hello": "world"}, 1] + ) doc.embedded_field.list_field[2].list_field.sort(key=str) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].list_field, - [1, 2, {'hello': 'world'}]) + self.assertEqual( + doc.embedded_field.list_field[2].list_field, [1, 2, {"hello": "world"}] + ) - del doc.embedded_field.list_field[2].list_field[2]['hello'] - self.assertEqual(doc._delta(), - ({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1})) + del doc.embedded_field.list_field[2].list_field[2]["hello"] + self.assertEqual( + doc._delta(), + ({}, {"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1}), + ) doc.save() doc = doc.reload(10) del doc.embedded_field.list_field[2].list_field - self.assertEqual(doc._delta(), ({}, - {'db_embedded_field.db_list_field.2.db_list_field': 1})) + self.assertEqual( + doc._delta(), ({}, {"db_embedded_field.db_list_field.2.db_list_field": 1}) + ) def test_delta_for_dynamic_documents(self): class Person(DynamicDocument): name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} Person.drop_collection() p = Person(name="James", age=34) - self.assertEqual(p._delta(), ( - SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) + self.assertEqual( + p._delta(), (SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), {}) + ) p.doc = 123 del p.doc - self.assertEqual(p._delta(), ( - SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) + self.assertEqual( + p._delta(), (SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), {}) + ) p = Person() p.name = "Dean" @@ -620,20 +713,19 @@ class DeltaTest(MongoDBTestCase): p.age = 24 self.assertEqual(p.age, 24) - self.assertEqual(p._get_changed_fields(), ['age']) - self.assertEqual(p._delta(), ({'age': 24}, {})) + self.assertEqual(p._get_changed_fields(), ["age"]) + self.assertEqual(p._delta(), ({"age": 24}, {})) p = Person.objects(age=22).get() p.age = 24 self.assertEqual(p.age, 24) - self.assertEqual(p._get_changed_fields(), ['age']) - self.assertEqual(p._delta(), ({'age': 24}, {})) + self.assertEqual(p._get_changed_fields(), ["age"]) + self.assertEqual(p._delta(), ({"age": 24}, {})) p.save() self.assertEqual(1, Person.objects(age=24).count()) def test_dynamic_delta(self): - class Doc(DynamicDocument): pass @@ -645,41 +737,43 @@ class DeltaTest(MongoDBTestCase): self.assertEqual(doc._get_changed_fields(), []) self.assertEqual(doc._delta(), ({}, {})) - doc.string_field = 'hello' - self.assertEqual(doc._get_changed_fields(), ['string_field']) - self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) + doc.string_field = "hello" + self.assertEqual(doc._get_changed_fields(), ["string_field"]) + self.assertEqual(doc._delta(), ({"string_field": "hello"}, {})) doc._changed_fields = [] doc.int_field = 1 - self.assertEqual(doc._get_changed_fields(), ['int_field']) - self.assertEqual(doc._delta(), ({'int_field': 1}, {})) + self.assertEqual(doc._get_changed_fields(), ["int_field"]) + self.assertEqual(doc._delta(), ({"int_field": 1}, {})) doc._changed_fields = [] - dict_value = {'hello': 'world', 'ping': 'pong'} + dict_value = {"hello": "world", "ping": "pong"} doc.dict_field = dict_value - self.assertEqual(doc._get_changed_fields(), ['dict_field']) - self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) + self.assertEqual(doc._get_changed_fields(), ["dict_field"]) + self.assertEqual(doc._delta(), ({"dict_field": dict_value}, {})) doc._changed_fields = [] - list_value = ['1', 2, {'hello': 'world'}] + list_value = ["1", 2, {"hello": "world"}] doc.list_field = list_value - self.assertEqual(doc._get_changed_fields(), ['list_field']) - self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) + self.assertEqual(doc._get_changed_fields(), ["list_field"]) + self.assertEqual(doc._delta(), ({"list_field": list_value}, {})) # Test unsetting doc._changed_fields = [] doc.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ['dict_field']) - self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) + self.assertEqual(doc._get_changed_fields(), ["dict_field"]) + self.assertEqual(doc._delta(), ({}, {"dict_field": 1})) doc._changed_fields = [] doc.list_field = [] - self.assertEqual(doc._get_changed_fields(), ['list_field']) - self.assertEqual(doc._delta(), ({}, {'list_field': 1})) + self.assertEqual(doc._get_changed_fields(), ["list_field"]) + self.assertEqual(doc._delta(), ({}, {"list_field": 1})) def test_delta_with_dbref_true(self): - person, organization, employee = self.circular_reference_deltas_2(Document, Document, True) - employee.name = 'test' + person, organization, employee = self.circular_reference_deltas_2( + Document, Document, True + ) + employee.name = "test" self.assertEqual(organization._get_changed_fields(), []) @@ -690,11 +784,13 @@ class DeltaTest(MongoDBTestCase): organization.employees.append(person) updates, removals = organization._delta() self.assertEqual({}, removals) - self.assertIn('employees', updates) + self.assertIn("employees", updates) def test_delta_with_dbref_false(self): - person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) - employee.name = 'test' + person, organization, employee = self.circular_reference_deltas_2( + Document, Document, False + ) + employee.name = "test" self.assertEqual(organization._get_changed_fields(), []) @@ -705,7 +801,7 @@ class DeltaTest(MongoDBTestCase): organization.employees.append(person) updates, removals = organization._delta() self.assertEqual({}, removals) - self.assertIn('employees', updates) + self.assertIn("employees", updates) def test_nested_nested_fields_mark_as_changed(self): class EmbeddedDoc(EmbeddedDocument): @@ -717,11 +813,13 @@ class DeltaTest(MongoDBTestCase): MyDoc.drop_collection() - mydoc = MyDoc(name='testcase1', subs={'a': {'b': EmbeddedDoc(name='foo')}}).save() + mydoc = MyDoc( + name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}} + ).save() mydoc = MyDoc.objects.first() - subdoc = mydoc.subs['a']['b'] - subdoc.name = 'bar' + subdoc = mydoc.subs["a"]["b"] + subdoc.name = "bar" self.assertEqual(["name"], subdoc._get_changed_fields()) self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields()) @@ -741,11 +839,11 @@ class DeltaTest(MongoDBTestCase): MyDoc().save() mydoc = MyDoc.objects.first() - mydoc.subs['a'] = EmbeddedDoc() + mydoc.subs["a"] = EmbeddedDoc() self.assertEqual(["subs.a"], mydoc._get_changed_fields()) - subdoc = mydoc.subs['a'] - subdoc.name = 'bar' + subdoc = mydoc.subs["a"] + subdoc.name = "bar" self.assertEqual(["name"], subdoc._get_changed_fields()) self.assertEqual(["subs.a"], mydoc._get_changed_fields()) @@ -763,16 +861,16 @@ class DeltaTest(MongoDBTestCase): MyDoc.drop_collection() - MyDoc(subs={'a': EmbeddedDoc(name='foo')}).save() + MyDoc(subs={"a": EmbeddedDoc(name="foo")}).save() mydoc = MyDoc.objects.first() - subdoc = mydoc.subs['a'] - subdoc.name = 'bar' + subdoc = mydoc.subs["a"] + subdoc.name = "bar" self.assertEqual(["name"], subdoc._get_changed_fields()) self.assertEqual(["subs.a.name"], mydoc._get_changed_fields()) - mydoc.subs['a'] = EmbeddedDoc() + mydoc.subs["a"] = EmbeddedDoc() self.assertEqual(["subs.a"], mydoc._get_changed_fields()) mydoc.save() @@ -787,39 +885,39 @@ class DeltaTest(MongoDBTestCase): class User(Document): name = StringField() - org = ReferenceField('Organization', required=True) + org = ReferenceField("Organization", required=True) Organization.drop_collection() User.drop_collection() - org1 = Organization(name='Org 1') + org1 = Organization(name="Org 1") org1.save() - org2 = Organization(name='Org 2') + org2 = Organization(name="Org 2") org2.save() - user = User(name='Fred', org=org1) + user = User(name="Fred", org=org1) user.save() org1.reload() org2.reload() user.reload() - self.assertEqual(org1.name, 'Org 1') - self.assertEqual(org2.name, 'Org 2') - self.assertEqual(user.name, 'Fred') + self.assertEqual(org1.name, "Org 1") + self.assertEqual(org2.name, "Org 2") + self.assertEqual(user.name, "Fred") - user.name = 'Harold' + user.name = "Harold" user.org = org2 - org2.name = 'New Org 2' - self.assertEqual(org2.name, 'New Org 2') + org2.name = "New Org 2" + self.assertEqual(org2.name, "New Org 2") user.save() org2.save() - self.assertEqual(org2.name, 'New Org 2') + self.assertEqual(org2.name, "New Org 2") org2.reload() - self.assertEqual(org2.name, 'New Org 2') + self.assertEqual(org2.name, "New Org 2") def test_delta_for_nested_map_fields(self): class UInfoDocument(Document): @@ -855,10 +953,10 @@ class DeltaTest(MongoDBTestCase): self.assertEqual(True, "users.007.roles.666" in delta[0]) self.assertEqual(True, "users.007.rolist" in delta[0]) self.assertEqual(True, "users.007.info" in delta[0]) - self.assertEqual('superadmin', delta[0]["users.007.roles.666"]["type"]) - self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"]) + self.assertEqual("superadmin", delta[0]["users.007.roles.666"]["type"]) + self.assertEqual("oops", delta[0]["users.007.rolist"][0]["type"]) self.assertEqual(uinfo.id, delta[0]["users.007.info"]) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/document/dynamic.py b/tests/document/dynamic.py index 44548d27..414d3352 100644 --- a/tests/document/dynamic.py +++ b/tests/document/dynamic.py @@ -3,17 +3,16 @@ import unittest from mongoengine import * from tests.utils import MongoDBTestCase -__all__ = ("TestDynamicDocument", ) +__all__ = ("TestDynamicDocument",) class TestDynamicDocument(MongoDBTestCase): - def setUp(self): super(TestDynamicDocument, self).setUp() class Person(DynamicDocument): name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} Person.drop_collection() @@ -26,8 +25,7 @@ class TestDynamicDocument(MongoDBTestCase): p.name = "James" p.age = 34 - self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James", - "age": 34}) + self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James", "age": 34}) self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"]) p.save() self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"]) @@ -35,7 +33,7 @@ class TestDynamicDocument(MongoDBTestCase): self.assertEqual(self.Person.objects.first().age, 34) # Confirm no changes to self.Person - self.assertFalse(hasattr(self.Person, 'age')) + self.assertFalse(hasattr(self.Person, "age")) def test_change_scope_of_variable(self): """Test changing the scope of a dynamic field has no adverse effects""" @@ -45,11 +43,11 @@ class TestDynamicDocument(MongoDBTestCase): p.save() p = self.Person.objects.get() - p.misc = {'hello': 'world'} + p.misc = {"hello": "world"} p.save() p = self.Person.objects.get() - self.assertEqual(p.misc, {'hello': 'world'}) + self.assertEqual(p.misc, {"hello": "world"}) def test_delete_dynamic_field(self): """Test deleting a dynamic field works""" @@ -60,23 +58,23 @@ class TestDynamicDocument(MongoDBTestCase): p.save() p = self.Person.objects.get() - p.misc = {'hello': 'world'} + p.misc = {"hello": "world"} p.save() p = self.Person.objects.get() - self.assertEqual(p.misc, {'hello': 'world'}) + self.assertEqual(p.misc, {"hello": "world"}) collection = self.db[self.Person._get_collection_name()] obj = collection.find_one() - self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name']) + self.assertEqual(sorted(obj.keys()), ["_cls", "_id", "misc", "name"]) del p.misc p.save() p = self.Person.objects.get() - self.assertFalse(hasattr(p, 'misc')) + self.assertFalse(hasattr(p, "misc")) obj = collection.find_one() - self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name']) + self.assertEqual(sorted(obj.keys()), ["_cls", "_id", "name"]) def test_reload_after_unsetting(self): p = self.Person() @@ -91,77 +89,52 @@ class TestDynamicDocument(MongoDBTestCase): p.update(age=1) self.assertEqual(len(p._data), 3) - self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name']) + self.assertEqual(sorted(p._data.keys()), ["_cls", "id", "name"]) p.reload() self.assertEqual(len(p._data), 4) - self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name']) + self.assertEqual(sorted(p._data.keys()), ["_cls", "age", "id", "name"]) def test_fields_without_underscore(self): """Ensure we can query dynamic fields""" Person = self.Person - p = self.Person(name='Dean') + p = self.Person(name="Dean") p.save() raw_p = Person.objects.as_pymongo().get(id=p.id) - self.assertEqual( - raw_p, - { - '_cls': u'Person', - '_id': p.id, - 'name': u'Dean' - } - ) + self.assertEqual(raw_p, {"_cls": u"Person", "_id": p.id, "name": u"Dean"}) - p.name = 'OldDean' - p.newattr = 'garbage' + p.name = "OldDean" + p.newattr = "garbage" p.save() raw_p = Person.objects.as_pymongo().get(id=p.id) self.assertEqual( raw_p, - { - '_cls': u'Person', - '_id': p.id, - 'name': 'OldDean', - 'newattr': u'garbage' - } + {"_cls": u"Person", "_id": p.id, "name": "OldDean", "newattr": u"garbage"}, ) def test_fields_containing_underscore(self): """Ensure we can query dynamic fields""" + class WeirdPerson(DynamicDocument): name = StringField() _name = StringField() WeirdPerson.drop_collection() - p = WeirdPerson(name='Dean', _name='Dean') + p = WeirdPerson(name="Dean", _name="Dean") p.save() raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) - self.assertEqual( - raw_p, - { - '_id': p.id, - '_name': u'Dean', - 'name': u'Dean' - } - ) + self.assertEqual(raw_p, {"_id": p.id, "_name": u"Dean", "name": u"Dean"}) - p.name = 'OldDean' - p._name = 'NewDean' - p._newattr1 = 'garbage' # Unknown fields won't be added + p.name = "OldDean" + p._name = "NewDean" + p._newattr1 = "garbage" # Unknown fields won't be added p.save() raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) - self.assertEqual( - raw_p, - { - '_id': p.id, - '_name': u'NewDean', - 'name': u'OldDean', - } - ) + self.assertEqual(raw_p, {"_id": p.id, "_name": u"NewDean", "name": u"OldDean"}) def test_dynamic_document_queries(self): """Ensure we can query dynamic fields""" @@ -193,26 +166,25 @@ class TestDynamicDocument(MongoDBTestCase): p2.age = 10 p2.save() - self.assertEqual(Person.objects(age__icontains='ten').count(), 2) + self.assertEqual(Person.objects(age__icontains="ten").count(), 2) self.assertEqual(Person.objects(age__gte=10).count(), 1) def test_complex_data_lookups(self): """Ensure you can query dynamic document dynamic fields""" p = self.Person() - p.misc = {'hello': 'world'} + p.misc = {"hello": "world"} p.save() - self.assertEqual(1, self.Person.objects(misc__hello='world').count()) + self.assertEqual(1, self.Person.objects(misc__hello="world").count()) def test_three_level_complex_data_lookups(self): """Ensure you can query three level document dynamic fields""" - p = self.Person.objects.create( - misc={'hello': {'hello2': 'world'}} - ) - self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count()) + p = self.Person.objects.create(misc={"hello": {"hello2": "world"}}) + self.assertEqual(1, self.Person.objects(misc__hello__hello2="world").count()) def test_complex_embedded_document_validation(self): """Ensure embedded dynamic documents may be validated""" + class Embedded(DynamicEmbeddedDocument): content = URLField() @@ -222,10 +194,10 @@ class TestDynamicDocument(MongoDBTestCase): Doc.drop_collection() doc = Doc() - embedded_doc_1 = Embedded(content='http://mongoengine.org') + embedded_doc_1 = Embedded(content="http://mongoengine.org") embedded_doc_1.validate() - embedded_doc_2 = Embedded(content='this is not a url') + embedded_doc_2 = Embedded(content="this is not a url") self.assertRaises(ValidationError, embedded_doc_2.validate) doc.embedded_field_1 = embedded_doc_1 @@ -234,15 +206,17 @@ class TestDynamicDocument(MongoDBTestCase): def test_inheritance(self): """Ensure that dynamic document plays nice with inheritance""" + class Employee(self.Person): salary = IntField() Employee.drop_collection() - self.assertIn('name', Employee._fields) - self.assertIn('salary', Employee._fields) - self.assertEqual(Employee._get_collection_name(), - self.Person._get_collection_name()) + self.assertIn("name", Employee._fields) + self.assertIn("salary", Employee._fields) + self.assertEqual( + Employee._get_collection_name(), self.Person._get_collection_name() + ) joe_bloggs = Employee() joe_bloggs.name = "Joe Bloggs" @@ -258,6 +232,7 @@ class TestDynamicDocument(MongoDBTestCase): def test_embedded_dynamic_document(self): """Test dynamic embedded documents""" + class Embedded(DynamicEmbeddedDocument): pass @@ -268,78 +243,88 @@ class TestDynamicDocument(MongoDBTestCase): doc = Doc() embedded_1 = Embedded() - embedded_1.string_field = 'hello' + embedded_1.string_field = "hello" embedded_1.int_field = 1 - embedded_1.dict_field = {'hello': 'world'} - embedded_1.list_field = ['1', 2, {'hello': 'world'}] + embedded_1.dict_field = {"hello": "world"} + embedded_1.list_field = ["1", 2, {"hello": "world"}] doc.embedded_field = embedded_1 - self.assertEqual(doc.to_mongo(), { - "embedded_field": { - "_cls": "Embedded", - "string_field": "hello", - "int_field": 1, - "dict_field": {"hello": "world"}, - "list_field": ['1', 2, {'hello': 'world'}] - } - }) - doc.save() - - doc = Doc.objects.first() - self.assertEqual(doc.embedded_field.__class__, Embedded) - self.assertEqual(doc.embedded_field.string_field, "hello") - self.assertEqual(doc.embedded_field.int_field, 1) - self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) - self.assertEqual(doc.embedded_field.list_field, - ['1', 2, {'hello': 'world'}]) - - def test_complex_embedded_documents(self): - """Test complex dynamic embedded documents setups""" - class Embedded(DynamicEmbeddedDocument): - pass - - class Doc(DynamicDocument): - pass - - Doc.drop_collection() - doc = Doc() - - embedded_1 = Embedded() - embedded_1.string_field = 'hello' - embedded_1.int_field = 1 - embedded_1.dict_field = {'hello': 'world'} - - embedded_2 = Embedded() - embedded_2.string_field = 'hello' - embedded_2.int_field = 1 - embedded_2.dict_field = {'hello': 'world'} - embedded_2.list_field = ['1', 2, {'hello': 'world'}] - - embedded_1.list_field = ['1', 2, embedded_2] - doc.embedded_field = embedded_1 - - self.assertEqual(doc.to_mongo(), { - "embedded_field": { - "_cls": "Embedded", - "string_field": "hello", - "int_field": 1, - "dict_field": {"hello": "world"}, - "list_field": ['1', 2, - {"_cls": "Embedded", + self.assertEqual( + doc.to_mongo(), + { + "embedded_field": { + "_cls": "Embedded", "string_field": "hello", "int_field": 1, "dict_field": {"hello": "world"}, - "list_field": ['1', 2, {'hello': 'world'}]} - ] - } - }) + "list_field": ["1", 2, {"hello": "world"}], + } + }, + ) + doc.save() + + doc = Doc.objects.first() + self.assertEqual(doc.embedded_field.__class__, Embedded) + self.assertEqual(doc.embedded_field.string_field, "hello") + self.assertEqual(doc.embedded_field.int_field, 1) + self.assertEqual(doc.embedded_field.dict_field, {"hello": "world"}) + self.assertEqual(doc.embedded_field.list_field, ["1", 2, {"hello": "world"}]) + + def test_complex_embedded_documents(self): + """Test complex dynamic embedded documents setups""" + + class Embedded(DynamicEmbeddedDocument): + pass + + class Doc(DynamicDocument): + pass + + Doc.drop_collection() + doc = Doc() + + embedded_1 = Embedded() + embedded_1.string_field = "hello" + embedded_1.int_field = 1 + embedded_1.dict_field = {"hello": "world"} + + embedded_2 = Embedded() + embedded_2.string_field = "hello" + embedded_2.int_field = 1 + embedded_2.dict_field = {"hello": "world"} + embedded_2.list_field = ["1", 2, {"hello": "world"}] + + embedded_1.list_field = ["1", 2, embedded_2] + doc.embedded_field = embedded_1 + + self.assertEqual( + doc.to_mongo(), + { + "embedded_field": { + "_cls": "Embedded", + "string_field": "hello", + "int_field": 1, + "dict_field": {"hello": "world"}, + "list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "string_field": "hello", + "int_field": 1, + "dict_field": {"hello": "world"}, + "list_field": ["1", 2, {"hello": "world"}], + }, + ], + } + }, + ) doc.save() doc = Doc.objects.first() self.assertEqual(doc.embedded_field.__class__, Embedded) self.assertEqual(doc.embedded_field.string_field, "hello") self.assertEqual(doc.embedded_field.int_field, 1) - self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) - self.assertEqual(doc.embedded_field.list_field[0], '1') + self.assertEqual(doc.embedded_field.dict_field, {"hello": "world"}) + self.assertEqual(doc.embedded_field.list_field[0], "1") self.assertEqual(doc.embedded_field.list_field[1], 2) embedded_field = doc.embedded_field.list_field[2] @@ -347,9 +332,8 @@ class TestDynamicDocument(MongoDBTestCase): self.assertEqual(embedded_field.__class__, Embedded) self.assertEqual(embedded_field.string_field, "hello") self.assertEqual(embedded_field.int_field, 1) - self.assertEqual(embedded_field.dict_field, {'hello': 'world'}) - self.assertEqual(embedded_field.list_field, ['1', 2, - {'hello': 'world'}]) + self.assertEqual(embedded_field.dict_field, {"hello": "world"}) + self.assertEqual(embedded_field.list_field, ["1", 2, {"hello": "world"}]) def test_dynamic_and_embedded(self): """Ensure embedded documents play nicely""" @@ -392,10 +376,15 @@ class TestDynamicDocument(MongoDBTestCase): Person.drop_collection() - Person(name="Eric", address=Address(city="San Francisco", street_number="1337")).save() + Person( + name="Eric", address=Address(city="San Francisco", street_number="1337") + ).save() - self.assertEqual(Person.objects.first().address.street_number, '1337') - self.assertEqual(Person.objects.only('address__street_number').first().address.street_number, '1337') + self.assertEqual(Person.objects.first().address.street_number, "1337") + self.assertEqual( + Person.objects.only("address__street_number").first().address.street_number, + "1337", + ) def test_dynamic_and_embedded_dict_access(self): """Ensure embedded dynamic documents work with dict[] style access""" @@ -435,5 +424,5 @@ class TestDynamicDocument(MongoDBTestCase): self.assertEqual(Person.objects.first().age, 35) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/document/indexes.py b/tests/document/indexes.py index 764ef0c5..570e619e 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -10,13 +10,12 @@ from six import iteritems from mongoengine import * from mongoengine.connection import get_db -__all__ = ("IndexesTest", ) +__all__ = ("IndexesTest",) class IndexesTest(unittest.TestCase): - def setUp(self): - self.connection = connect(db='mongoenginetest') + self.connection = connect(db="mongoenginetest") self.db = get_db() class Person(Document): @@ -45,52 +44,43 @@ class IndexesTest(unittest.TestCase): self._index_test(DynamicDocument) def _index_test(self, InheritFrom): - class BlogPost(InheritFrom): - date = DateTimeField(db_field='addDate', default=datetime.now) + date = DateTimeField(db_field="addDate", default=datetime.now) category = StringField() tags = ListField(StringField()) - meta = { - 'indexes': [ - '-date', - 'tags', - ('category', '-date') - ] - } + meta = {"indexes": ["-date", "tags", ("category", "-date")]} - expected_specs = [{'fields': [('addDate', -1)]}, - {'fields': [('tags', 1)]}, - {'fields': [('category', 1), ('addDate', -1)]}] - self.assertEqual(expected_specs, BlogPost._meta['index_specs']) + expected_specs = [ + {"fields": [("addDate", -1)]}, + {"fields": [("tags", 1)]}, + {"fields": [("category", 1), ("addDate", -1)]}, + ] + self.assertEqual(expected_specs, BlogPost._meta["index_specs"]) BlogPost.ensure_indexes() info = BlogPost.objects._collection.index_information() # _id, '-date', 'tags', ('cat', 'date') self.assertEqual(len(info), 4) - info = [value['key'] for key, value in iteritems(info)] + info = [value["key"] for key, value in iteritems(info)] for expected in expected_specs: - self.assertIn(expected['fields'], info) + self.assertIn(expected["fields"], info) def _index_test_inheritance(self, InheritFrom): - class BlogPost(InheritFrom): - date = DateTimeField(db_field='addDate', default=datetime.now) + date = DateTimeField(db_field="addDate", default=datetime.now) category = StringField() tags = ListField(StringField()) meta = { - 'indexes': [ - '-date', - 'tags', - ('category', '-date') - ], - 'allow_inheritance': True + "indexes": ["-date", "tags", ("category", "-date")], + "allow_inheritance": True, } - expected_specs = [{'fields': [('_cls', 1), ('addDate', -1)]}, - {'fields': [('_cls', 1), ('tags', 1)]}, - {'fields': [('_cls', 1), ('category', 1), - ('addDate', -1)]}] - self.assertEqual(expected_specs, BlogPost._meta['index_specs']) + expected_specs = [ + {"fields": [("_cls", 1), ("addDate", -1)]}, + {"fields": [("_cls", 1), ("tags", 1)]}, + {"fields": [("_cls", 1), ("category", 1), ("addDate", -1)]}, + ] + self.assertEqual(expected_specs, BlogPost._meta["index_specs"]) BlogPost.ensure_indexes() info = BlogPost.objects._collection.index_information() @@ -99,24 +89,24 @@ class IndexesTest(unittest.TestCase): # the indices on -date and tags will both contain # _cls as first element in the key self.assertEqual(len(info), 4) - info = [value['key'] for key, value in iteritems(info)] + info = [value["key"] for key, value in iteritems(info)] for expected in expected_specs: - self.assertIn(expected['fields'], info) + self.assertIn(expected["fields"], info) class ExtendedBlogPost(BlogPost): title = StringField() - meta = {'indexes': ['title']} + meta = {"indexes": ["title"]} - expected_specs.append({'fields': [('_cls', 1), ('title', 1)]}) - self.assertEqual(expected_specs, ExtendedBlogPost._meta['index_specs']) + expected_specs.append({"fields": [("_cls", 1), ("title", 1)]}) + self.assertEqual(expected_specs, ExtendedBlogPost._meta["index_specs"]) BlogPost.drop_collection() ExtendedBlogPost.ensure_indexes() info = ExtendedBlogPost.objects._collection.index_information() - info = [value['key'] for key, value in iteritems(info)] + info = [value["key"] for key, value in iteritems(info)] for expected in expected_specs: - self.assertIn(expected['fields'], info) + self.assertIn(expected["fields"], info) def test_indexes_document_inheritance(self): """Ensure that indexes are used when meta[indexes] is specified for @@ -135,21 +125,15 @@ class IndexesTest(unittest.TestCase): class A(Document): title = StringField() - meta = { - 'indexes': [ - { - 'fields': ('title',), - }, - ], - 'allow_inheritance': True, - } + meta = {"indexes": [{"fields": ("title",)}], "allow_inheritance": True} class B(A): description = StringField() - self.assertEqual(A._meta['index_specs'], B._meta['index_specs']) - self.assertEqual([{'fields': [('_cls', 1), ('title', 1)]}], - A._meta['index_specs']) + self.assertEqual(A._meta["index_specs"], B._meta["index_specs"]) + self.assertEqual( + [{"fields": [("_cls", 1), ("title", 1)]}], A._meta["index_specs"] + ) def test_index_no_cls(self): """Ensure index specs are inhertited correctly""" @@ -157,14 +141,12 @@ class IndexesTest(unittest.TestCase): class A(Document): title = StringField() meta = { - 'indexes': [ - {'fields': ('title',), 'cls': False}, - ], - 'allow_inheritance': True, - 'index_cls': False - } + "indexes": [{"fields": ("title",), "cls": False}], + "allow_inheritance": True, + "index_cls": False, + } - self.assertEqual([('title', 1)], A._meta['index_specs'][0]['fields']) + self.assertEqual([("title", 1)], A._meta["index_specs"][0]["fields"]) A._get_collection().drop_indexes() A.ensure_indexes() info = A._get_collection().index_information() @@ -174,34 +156,30 @@ class IndexesTest(unittest.TestCase): c = StringField() d = StringField() meta = { - 'indexes': [{'fields': ['c']}, {'fields': ['d'], 'cls': True}], - 'allow_inheritance': True + "indexes": [{"fields": ["c"]}, {"fields": ["d"], "cls": True}], + "allow_inheritance": True, } - self.assertEqual([('c', 1)], B._meta['index_specs'][1]['fields']) - self.assertEqual([('_cls', 1), ('d', 1)], B._meta['index_specs'][2]['fields']) + + self.assertEqual([("c", 1)], B._meta["index_specs"][1]["fields"]) + self.assertEqual([("_cls", 1), ("d", 1)], B._meta["index_specs"][2]["fields"]) def test_build_index_spec_is_not_destructive(self): - class MyDoc(Document): keywords = StringField() - meta = { - 'indexes': ['keywords'], - 'allow_inheritance': False - } + meta = {"indexes": ["keywords"], "allow_inheritance": False} - self.assertEqual(MyDoc._meta['index_specs'], - [{'fields': [('keywords', 1)]}]) + self.assertEqual(MyDoc._meta["index_specs"], [{"fields": [("keywords", 1)]}]) # Force index creation MyDoc.ensure_indexes() - self.assertEqual(MyDoc._meta['index_specs'], - [{'fields': [('keywords', 1)]}]) + self.assertEqual(MyDoc._meta["index_specs"], [{"fields": [("keywords", 1)]}]) def test_embedded_document_index_meta(self): """Ensure that embedded document indexes are created explicitly """ + class Rank(EmbeddedDocument): title = StringField(required=True) @@ -209,138 +187,123 @@ class IndexesTest(unittest.TestCase): name = StringField(required=True) rank = EmbeddedDocumentField(Rank, required=False) - meta = { - 'indexes': [ - 'rank.title', - ], - 'allow_inheritance': False - } + meta = {"indexes": ["rank.title"], "allow_inheritance": False} - self.assertEqual([{'fields': [('rank.title', 1)]}], - Person._meta['index_specs']) + self.assertEqual([{"fields": [("rank.title", 1)]}], Person._meta["index_specs"]) Person.drop_collection() # Indexes are lazy so use list() to perform query list(Person.objects) info = Person.objects._collection.index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('rank.title', 1)], info) + info = [value["key"] for key, value in iteritems(info)] + self.assertIn([("rank.title", 1)], info) def test_explicit_geo2d_index(self): """Ensure that geo2d indexes work when created via meta[indexes] """ + class Place(Document): location = DictField() - meta = { - 'allow_inheritance': True, - 'indexes': [ - '*location.point', - ] - } + meta = {"allow_inheritance": True, "indexes": ["*location.point"]} - self.assertEqual([{'fields': [('location.point', '2d')]}], - Place._meta['index_specs']) + self.assertEqual( + [{"fields": [("location.point", "2d")]}], Place._meta["index_specs"] + ) Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('location.point', '2d')], info) + info = [value["key"] for key, value in iteritems(info)] + self.assertIn([("location.point", "2d")], info) def test_explicit_geo2d_index_embedded(self): """Ensure that geo2d indexes work when created via meta[indexes] """ + class EmbeddedLocation(EmbeddedDocument): location = DictField() class Place(Document): - current = DictField(field=EmbeddedDocumentField('EmbeddedLocation')) - meta = { - 'allow_inheritance': True, - 'indexes': [ - '*current.location.point', - ] - } + current = DictField(field=EmbeddedDocumentField("EmbeddedLocation")) + meta = {"allow_inheritance": True, "indexes": ["*current.location.point"]} - self.assertEqual([{'fields': [('current.location.point', '2d')]}], - Place._meta['index_specs']) + self.assertEqual( + [{"fields": [("current.location.point", "2d")]}], Place._meta["index_specs"] + ) Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('current.location.point', '2d')], info) + info = [value["key"] for key, value in iteritems(info)] + self.assertIn([("current.location.point", "2d")], info) def test_explicit_geosphere_index(self): """Ensure that geosphere indexes work when created via meta[indexes] """ + class Place(Document): location = DictField() - meta = { - 'allow_inheritance': True, - 'indexes': [ - '(location.point', - ] - } + meta = {"allow_inheritance": True, "indexes": ["(location.point"]} - self.assertEqual([{'fields': [('location.point', '2dsphere')]}], - Place._meta['index_specs']) + self.assertEqual( + [{"fields": [("location.point", "2dsphere")]}], Place._meta["index_specs"] + ) Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('location.point', '2dsphere')], info) + info = [value["key"] for key, value in iteritems(info)] + self.assertIn([("location.point", "2dsphere")], info) def test_explicit_geohaystack_index(self): """Ensure that geohaystack indexes work when created via meta[indexes] """ - raise SkipTest('GeoHaystack index creation is not supported for now' - 'from meta, as it requires a bucketSize parameter.') + raise SkipTest( + "GeoHaystack index creation is not supported for now" + "from meta, as it requires a bucketSize parameter." + ) class Place(Document): location = DictField() name = StringField() - meta = { - 'indexes': [ - (')location.point', 'name') - ] - } - self.assertEqual([{'fields': [('location.point', 'geoHaystack'), ('name', 1)]}], - Place._meta['index_specs']) + meta = {"indexes": [(")location.point", "name")]} + + self.assertEqual( + [{"fields": [("location.point", "geoHaystack"), ("name", 1)]}], + Place._meta["index_specs"], + ) Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('location.point', 'geoHaystack')], info) + info = [value["key"] for key, value in iteritems(info)] + self.assertIn([("location.point", "geoHaystack")], info) def test_create_geohaystack_index(self): """Ensure that geohaystack indexes can be created """ + class Place(Document): location = DictField() name = StringField() - Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10) + Place.create_index({"fields": (")location.point", "name")}, bucketSize=10) info = Place._get_collection().index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('location.point', 'geoHaystack'), ('name', 1)], info) + info = [value["key"] for key, value in iteritems(info)] + self.assertIn([("location.point", "geoHaystack"), ("name", 1)], info) def test_dictionary_indexes(self): """Ensure that indexes are used when meta[indexes] contains dictionaries instead of lists. """ + class BlogPost(Document): - date = DateTimeField(db_field='addDate', default=datetime.now) + date = DateTimeField(db_field="addDate", default=datetime.now) category = StringField() tags = ListField(StringField()) - meta = { - 'indexes': [ - {'fields': ['-date'], 'unique': True, 'sparse': True}, - ], - } + meta = {"indexes": [{"fields": ["-date"], "unique": True, "sparse": True}]} - self.assertEqual([{'fields': [('addDate', -1)], 'unique': True, - 'sparse': True}], - BlogPost._meta['index_specs']) + self.assertEqual( + [{"fields": [("addDate", -1)], "unique": True, "sparse": True}], + BlogPost._meta["index_specs"], + ) BlogPost.drop_collection() @@ -351,48 +314,48 @@ class IndexesTest(unittest.TestCase): # Indexes are lazy so use list() to perform query list(BlogPost.objects) info = BlogPost.objects._collection.index_information() - info = [(value['key'], - value.get('unique', False), - value.get('sparse', False)) - for key, value in iteritems(info)] - self.assertIn(([('addDate', -1)], True, True), info) + info = [ + (value["key"], value.get("unique", False), value.get("sparse", False)) + for key, value in iteritems(info) + ] + self.assertIn(([("addDate", -1)], True, True), info) BlogPost.drop_collection() def test_abstract_index_inheritance(self): - class UserBase(Document): user_guid = StringField(required=True) meta = { - 'abstract': True, - 'indexes': ['user_guid'], - 'allow_inheritance': True + "abstract": True, + "indexes": ["user_guid"], + "allow_inheritance": True, } class Person(UserBase): name = StringField() - meta = { - 'indexes': ['name'], - } + meta = {"indexes": ["name"]} + Person.drop_collection() - Person(name="test", user_guid='123').save() + Person(name="test", user_guid="123").save() self.assertEqual(1, Person.objects.count()) info = Person.objects._collection.index_information() - self.assertEqual(sorted(info.keys()), - ['_cls_1_name_1', '_cls_1_user_guid_1', '_id_']) + self.assertEqual( + sorted(info.keys()), ["_cls_1_name_1", "_cls_1_user_guid_1", "_id_"] + ) def test_disable_index_creation(self): """Tests setting auto_create_index to False on the connection will disable any index generation. """ + class User(Document): meta = { - 'allow_inheritance': True, - 'indexes': ['user_guid'], - 'auto_create_index': False + "allow_inheritance": True, + "indexes": ["user_guid"], + "auto_create_index": False, } user_guid = StringField(required=True) @@ -401,88 +364,81 @@ class IndexesTest(unittest.TestCase): User.drop_collection() - User(user_guid='123').save() - MongoUser(user_guid='123').save() + User(user_guid="123").save() + MongoUser(user_guid="123").save() self.assertEqual(2, User.objects.count()) info = User.objects._collection.index_information() - self.assertEqual(list(info.keys()), ['_id_']) + self.assertEqual(list(info.keys()), ["_id_"]) User.ensure_indexes() info = User.objects._collection.index_information() - self.assertEqual(sorted(info.keys()), ['_cls_1_user_guid_1', '_id_']) + self.assertEqual(sorted(info.keys()), ["_cls_1_user_guid_1", "_id_"]) def test_embedded_document_index(self): """Tests settings an index on an embedded document """ + class Date(EmbeddedDocument): - year = IntField(db_field='yr') + year = IntField(db_field="yr") class BlogPost(Document): title = StringField() date = EmbeddedDocumentField(Date) - meta = { - 'indexes': [ - '-date.year' - ], - } + meta = {"indexes": ["-date.year"]} BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() - self.assertEqual(sorted(info.keys()), ['_id_', 'date.yr_-1']) + self.assertEqual(sorted(info.keys()), ["_id_", "date.yr_-1"]) def test_list_embedded_document_index(self): """Ensure list embedded documents can be indexed """ + class Tag(EmbeddedDocument): - name = StringField(db_field='tag') + name = StringField(db_field="tag") class BlogPost(Document): title = StringField() tags = ListField(EmbeddedDocumentField(Tag)) - meta = { - 'indexes': [ - 'tags.name' - ] - } + meta = {"indexes": ["tags.name"]} BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() # we don't use _cls in with list fields by default - self.assertEqual(sorted(info.keys()), ['_id_', 'tags.tag_1']) + self.assertEqual(sorted(info.keys()), ["_id_", "tags.tag_1"]) - post1 = BlogPost(title="Embedded Indexes tests in place", - tags=[Tag(name="about"), Tag(name="time")]) + post1 = BlogPost( + title="Embedded Indexes tests in place", + tags=[Tag(name="about"), Tag(name="time")], + ) post1.save() def test_recursive_embedded_objects_dont_break_indexes(self): - class RecursiveObject(EmbeddedDocument): - obj = EmbeddedDocumentField('self') + obj = EmbeddedDocumentField("self") class RecursiveDocument(Document): recursive_obj = EmbeddedDocumentField(RecursiveObject) - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} RecursiveDocument.ensure_indexes() info = RecursiveDocument._get_collection().index_information() - self.assertEqual(sorted(info.keys()), ['_cls_1', '_id_']) + self.assertEqual(sorted(info.keys()), ["_cls_1", "_id_"]) def test_covered_index(self): """Ensure that covered indexes can be used """ + class Test(Document): a = IntField() b = IntField() - meta = { - 'indexes': ['a'], - 'allow_inheritance': False - } + meta = {"indexes": ["a"], "allow_inheritance": False} Test.drop_collection() @@ -491,45 +447,51 @@ class IndexesTest(unittest.TestCase): # Need to be explicit about covered indexes as mongoDB doesn't know if # the documents returned might have more keys in that here. - query_plan = Test.objects(id=obj.id).exclude('a').explain() + query_plan = Test.objects(id=obj.id).exclude("a").explain() self.assertEqual( - query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), - 'IDHACK' + query_plan.get("queryPlanner") + .get("winningPlan") + .get("inputStage") + .get("stage"), + "IDHACK", ) - query_plan = Test.objects(id=obj.id).only('id').explain() + query_plan = Test.objects(id=obj.id).only("id").explain() self.assertEqual( - query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), - 'IDHACK' + query_plan.get("queryPlanner") + .get("winningPlan") + .get("inputStage") + .get("stage"), + "IDHACK", ) - query_plan = Test.objects(a=1).only('a').exclude('id').explain() + query_plan = Test.objects(a=1).only("a").exclude("id").explain() self.assertEqual( - query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), - 'IXSCAN' + query_plan.get("queryPlanner") + .get("winningPlan") + .get("inputStage") + .get("stage"), + "IXSCAN", ) self.assertEqual( - query_plan.get('queryPlanner').get('winningPlan').get('stage'), - 'PROJECTION' + query_plan.get("queryPlanner").get("winningPlan").get("stage"), "PROJECTION" ) query_plan = Test.objects(a=1).explain() self.assertEqual( - query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), - 'IXSCAN' + query_plan.get("queryPlanner") + .get("winningPlan") + .get("inputStage") + .get("stage"), + "IXSCAN", ) self.assertEqual( - query_plan.get('queryPlanner').get('winningPlan').get('stage'), - 'FETCH' + query_plan.get("queryPlanner").get("winningPlan").get("stage"), "FETCH" ) def test_index_on_id(self): class BlogPost(Document): - meta = { - 'indexes': [ - ['categories', 'id'] - ] - } + meta = {"indexes": [["categories", "id"]]} title = StringField(required=True) description = StringField(required=True) @@ -538,22 +500,16 @@ class IndexesTest(unittest.TestCase): BlogPost.drop_collection() indexes = BlogPost.objects._collection.index_information() - self.assertEqual(indexes['categories_1__id_1']['key'], - [('categories', 1), ('_id', 1)]) + self.assertEqual( + indexes["categories_1__id_1"]["key"], [("categories", 1), ("_id", 1)] + ) def test_hint(self): - TAGS_INDEX_NAME = 'tags_1' + TAGS_INDEX_NAME = "tags_1" class BlogPost(Document): tags = ListField(StringField()) - meta = { - 'indexes': [ - { - 'fields': ['tags'], - 'name': TAGS_INDEX_NAME - } - ], - } + meta = {"indexes": [{"fields": ["tags"], "name": TAGS_INDEX_NAME}]} BlogPost.drop_collection() @@ -562,41 +518,42 @@ class IndexesTest(unittest.TestCase): BlogPost(tags=tags).save() # Hinting by shape should work. - self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10) + self.assertEqual(BlogPost.objects.hint([("tags", 1)]).count(), 10) # Hinting by index name should work. self.assertEqual(BlogPost.objects.hint(TAGS_INDEX_NAME).count(), 10) # Clearing the hint should work fine. self.assertEqual(BlogPost.objects.hint().count(), 10) - self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).hint().count(), 10) + self.assertEqual(BlogPost.objects.hint([("ZZ", 1)]).hint().count(), 10) # Hinting on a non-existent index shape should fail. with self.assertRaises(OperationFailure): - BlogPost.objects.hint([('ZZ', 1)]).count() + BlogPost.objects.hint([("ZZ", 1)]).count() # Hinting on a non-existent index name should fail. with self.assertRaises(OperationFailure): - BlogPost.objects.hint('Bad Name').count() + BlogPost.objects.hint("Bad Name").count() # Invalid shape argument (missing list brackets) should fail. with self.assertRaises(ValueError): - BlogPost.objects.hint(('tags', 1)).count() + BlogPost.objects.hint(("tags", 1)).count() def test_unique(self): """Ensure that uniqueness constraints are applied to fields. """ + class BlogPost(Document): title = StringField() slug = StringField(unique=True) BlogPost.drop_collection() - post1 = BlogPost(title='test1', slug='test') + post1 = BlogPost(title="test1", slug="test") post1.save() # Two posts with the same slug is not allowed - post2 = BlogPost(title='test2', slug='test') + post2 = BlogPost(title="test2", slug="test") self.assertRaises(NotUniqueError, post2.save) self.assertRaises(NotUniqueError, BlogPost.objects.insert, post2) @@ -605,54 +562,62 @@ class IndexesTest(unittest.TestCase): def test_primary_key_unique_not_working(self): """Relates to #1445""" + class Blog(Document): id = StringField(primary_key=True, unique=True) Blog.drop_collection() with self.assertRaises(OperationFailure) as ctx_err: - Blog(id='garbage').save() + Blog(id="garbage").save() # One of the errors below should happen. Which one depends on the # PyMongo version and dict order. err_msg = str(ctx_err.exception) self.assertTrue( - any([ - "The field 'unique' is not valid for an _id index specification" in err_msg, - "The field 'background' is not valid for an _id index specification" in err_msg, - "The field 'sparse' is not valid for an _id index specification" in err_msg, - ]) + any( + [ + "The field 'unique' is not valid for an _id index specification" + in err_msg, + "The field 'background' is not valid for an _id index specification" + in err_msg, + "The field 'sparse' is not valid for an _id index specification" + in err_msg, + ] + ) ) def test_unique_with(self): """Ensure that unique_with constraints are applied to fields. """ + class Date(EmbeddedDocument): - year = IntField(db_field='yr') + year = IntField(db_field="yr") class BlogPost(Document): title = StringField() date = EmbeddedDocumentField(Date) - slug = StringField(unique_with='date.year') + slug = StringField(unique_with="date.year") BlogPost.drop_collection() - post1 = BlogPost(title='test1', date=Date(year=2009), slug='test') + post1 = BlogPost(title="test1", date=Date(year=2009), slug="test") post1.save() # day is different so won't raise exception - post2 = BlogPost(title='test2', date=Date(year=2010), slug='test') + post2 = BlogPost(title="test2", date=Date(year=2010), slug="test") post2.save() # Now there will be two docs with the same slug and the same day: fail - post3 = BlogPost(title='test3', date=Date(year=2010), slug='test') + post3 = BlogPost(title="test3", date=Date(year=2010), slug="test") self.assertRaises(OperationError, post3.save) def test_unique_embedded_document(self): """Ensure that uniqueness constraints are applied to fields on embedded documents. """ + class SubDocument(EmbeddedDocument): - year = IntField(db_field='yr') + year = IntField(db_field="yr") slug = StringField(unique=True) class BlogPost(Document): @@ -661,18 +626,15 @@ class IndexesTest(unittest.TestCase): BlogPost.drop_collection() - post1 = BlogPost(title='test1', - sub=SubDocument(year=2009, slug="test")) + post1 = BlogPost(title="test1", sub=SubDocument(year=2009, slug="test")) post1.save() # sub.slug is different so won't raise exception - post2 = BlogPost(title='test2', - sub=SubDocument(year=2010, slug='another-slug')) + post2 = BlogPost(title="test2", sub=SubDocument(year=2010, slug="another-slug")) post2.save() # Now there will be two docs with the same sub.slug - post3 = BlogPost(title='test3', - sub=SubDocument(year=2010, slug='test')) + post3 = BlogPost(title="test3", sub=SubDocument(year=2010, slug="test")) self.assertRaises(NotUniqueError, post3.save) def test_unique_embedded_document_in_list(self): @@ -681,8 +643,9 @@ class IndexesTest(unittest.TestCase): embedded documents, even when the embedded documents in in a list field. """ + class SubDocument(EmbeddedDocument): - year = IntField(db_field='yr') + year = IntField(db_field="yr") slug = StringField(unique=True) class BlogPost(Document): @@ -692,16 +655,15 @@ class IndexesTest(unittest.TestCase): BlogPost.drop_collection() post1 = BlogPost( - title='test1', subs=[ - SubDocument(year=2009, slug='conflict'), - SubDocument(year=2009, slug='conflict') - ] + title="test1", + subs=[ + SubDocument(year=2009, slug="conflict"), + SubDocument(year=2009, slug="conflict"), + ], ) post1.save() - post2 = BlogPost( - title='test2', subs=[SubDocument(year=2014, slug='conflict')] - ) + post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")]) self.assertRaises(NotUniqueError, post2.save) @@ -711,33 +673,32 @@ class IndexesTest(unittest.TestCase): embedded documents, even when the embedded documents in a sorted list field. """ + class SubDocument(EmbeddedDocument): year = IntField() slug = StringField(unique=True) class BlogPost(Document): title = StringField() - subs = SortedListField(EmbeddedDocumentField(SubDocument), - ordering='year') + subs = SortedListField(EmbeddedDocumentField(SubDocument), ordering="year") BlogPost.drop_collection() post1 = BlogPost( - title='test1', subs=[ - SubDocument(year=2009, slug='conflict'), - SubDocument(year=2009, slug='conflict') - ] + title="test1", + subs=[ + SubDocument(year=2009, slug="conflict"), + SubDocument(year=2009, slug="conflict"), + ], ) post1.save() # confirm that the unique index is created indexes = BlogPost._get_collection().index_information() - self.assertIn('subs.slug_1', indexes) - self.assertTrue(indexes['subs.slug_1']['unique']) + self.assertIn("subs.slug_1", indexes) + self.assertTrue(indexes["subs.slug_1"]["unique"]) - post2 = BlogPost( - title='test2', subs=[SubDocument(year=2014, slug='conflict')] - ) + post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")]) self.assertRaises(NotUniqueError, post2.save) @@ -747,6 +708,7 @@ class IndexesTest(unittest.TestCase): embedded documents, even when the embedded documents in an embedded list field. """ + class SubDocument(EmbeddedDocument): year = IntField() slug = StringField(unique=True) @@ -758,21 +720,20 @@ class IndexesTest(unittest.TestCase): BlogPost.drop_collection() post1 = BlogPost( - title='test1', subs=[ - SubDocument(year=2009, slug='conflict'), - SubDocument(year=2009, slug='conflict') - ] + title="test1", + subs=[ + SubDocument(year=2009, slug="conflict"), + SubDocument(year=2009, slug="conflict"), + ], ) post1.save() # confirm that the unique index is created indexes = BlogPost._get_collection().index_information() - self.assertIn('subs.slug_1', indexes) - self.assertTrue(indexes['subs.slug_1']['unique']) + self.assertIn("subs.slug_1", indexes) + self.assertTrue(indexes["subs.slug_1"]["unique"]) - post2 = BlogPost( - title='test2', subs=[SubDocument(year=2014, slug='conflict')] - ) + post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")]) self.assertRaises(NotUniqueError, post2.save) @@ -780,60 +741,51 @@ class IndexesTest(unittest.TestCase): """Ensure that uniqueness constraints are applied to fields on embedded documents. And work with unique_with as well. """ + class SubDocument(EmbeddedDocument): - year = IntField(db_field='yr') + year = IntField(db_field="yr") slug = StringField(unique=True) class BlogPost(Document): - title = StringField(unique_with='sub.year') + title = StringField(unique_with="sub.year") sub = EmbeddedDocumentField(SubDocument) BlogPost.drop_collection() - post1 = BlogPost(title='test1', - sub=SubDocument(year=2009, slug="test")) + post1 = BlogPost(title="test1", sub=SubDocument(year=2009, slug="test")) post1.save() # sub.slug is different so won't raise exception - post2 = BlogPost(title='test2', - sub=SubDocument(year=2010, slug='another-slug')) + post2 = BlogPost(title="test2", sub=SubDocument(year=2010, slug="another-slug")) post2.save() # Now there will be two docs with the same sub.slug - post3 = BlogPost(title='test3', - sub=SubDocument(year=2010, slug='test')) + post3 = BlogPost(title="test3", sub=SubDocument(year=2010, slug="test")) self.assertRaises(NotUniqueError, post3.save) # Now there will be two docs with the same title and year - post3 = BlogPost(title='test1', - sub=SubDocument(year=2009, slug='test-1')) + post3 = BlogPost(title="test1", sub=SubDocument(year=2009, slug="test-1")) self.assertRaises(NotUniqueError, post3.save) def test_ttl_indexes(self): - class Log(Document): created = DateTimeField(default=datetime.now) - meta = { - 'indexes': [ - {'fields': ['created'], 'expireAfterSeconds': 3600} - ] - } + meta = {"indexes": [{"fields": ["created"], "expireAfterSeconds": 3600}]} Log.drop_collection() # Indexes are lazy so use list() to perform query list(Log.objects) info = Log.objects._collection.index_information() - self.assertEqual(3600, - info['created_1']['expireAfterSeconds']) + self.assertEqual(3600, info["created_1"]["expireAfterSeconds"]) def test_index_drop_dups_silently_ignored(self): class Customer(Document): cust_id = IntField(unique=True, required=True) meta = { - 'indexes': ['cust_id'], - 'index_drop_dups': True, - 'allow_inheritance': False, + "indexes": ["cust_id"], + "index_drop_dups": True, + "allow_inheritance": False, } Customer.drop_collection() @@ -843,12 +795,10 @@ class IndexesTest(unittest.TestCase): """Ensure that 'unique' constraints aren't overridden by meta.indexes. """ + class Customer(Document): cust_id = IntField(unique=True, required=True) - meta = { - 'indexes': ['cust_id'], - 'allow_inheritance': False, - } + meta = {"indexes": ["cust_id"], "allow_inheritance": False} Customer.drop_collection() cust = Customer(cust_id=1) @@ -870,37 +820,39 @@ class IndexesTest(unittest.TestCase): """If you set a field as primary, then unexpected behaviour can occur. You won't create a duplicate but you will update an existing document. """ + class User(Document): name = StringField(primary_key=True) password = StringField() User.drop_collection() - user = User(name='huangz', password='secret') + user = User(name="huangz", password="secret") user.save() - user = User(name='huangz', password='secret2') + user = User(name="huangz", password="secret2") user.save() self.assertEqual(User.objects.count(), 1) - self.assertEqual(User.objects.get().password, 'secret2') + self.assertEqual(User.objects.get().password, "secret2") def test_unique_and_primary_create(self): """Create a new record with a duplicate primary key throws an exception """ + class User(Document): name = StringField(primary_key=True) password = StringField() User.drop_collection() - User.objects.create(name='huangz', password='secret') + User.objects.create(name="huangz", password="secret") with self.assertRaises(NotUniqueError): - User.objects.create(name='huangz', password='secret2') + User.objects.create(name="huangz", password="secret2") self.assertEqual(User.objects.count(), 1) - self.assertEqual(User.objects.get().password, 'secret') + self.assertEqual(User.objects.get().password, "secret") def test_index_with_pk(self): """Ensure you can use `pk` as part of a query""" @@ -909,21 +861,24 @@ class IndexesTest(unittest.TestCase): comment_id = IntField(required=True) try: + class BlogPost(Document): comments = EmbeddedDocumentField(Comment) - meta = {'indexes': [ - {'fields': ['pk', 'comments.comment_id'], - 'unique': True}]} + meta = { + "indexes": [ + {"fields": ["pk", "comments.comment_id"], "unique": True} + ] + } + except UnboundLocalError: - self.fail('Unbound local error at index + pk definition') + self.fail("Unbound local error at index + pk definition") info = BlogPost.objects._collection.index_information() - info = [value['key'] for key, value in iteritems(info)] - index_item = [('_id', 1), ('comments.comment_id', 1)] + info = [value["key"] for key, value in iteritems(info)] + index_item = [("_id", 1), ("comments.comment_id", 1)] self.assertIn(index_item, info) def test_compound_key_embedded(self): - class CompoundKey(EmbeddedDocument): name = StringField(required=True) term = StringField(required=True) @@ -935,12 +890,12 @@ class IndexesTest(unittest.TestCase): my_key = CompoundKey(name="n", term="ok") report = ReportEmbedded(text="OK", key=my_key).save() - self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, - report.to_mongo()) + self.assertEqual( + {"text": "OK", "_id": {"term": "ok", "name": "n"}}, report.to_mongo() + ) self.assertEqual(report, ReportEmbedded.objects.get(pk=my_key)) def test_compound_key_dictfield(self): - class ReportDictField(Document): key = DictField(primary_key=True) text = StringField() @@ -948,65 +903,60 @@ class IndexesTest(unittest.TestCase): my_key = {"name": "n", "term": "ok"} report = ReportDictField(text="OK", key=my_key).save() - self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, - report.to_mongo()) + self.assertEqual( + {"text": "OK", "_id": {"term": "ok", "name": "n"}}, report.to_mongo() + ) # We can't directly call ReportDictField.objects.get(pk=my_key), # because dicts are unordered, and if the order in MongoDB is # different than the one in `my_key`, this test will fail. - self.assertEqual(report, ReportDictField.objects.get(pk__name=my_key['name'])) - self.assertEqual(report, ReportDictField.objects.get(pk__term=my_key['term'])) + self.assertEqual(report, ReportDictField.objects.get(pk__name=my_key["name"])) + self.assertEqual(report, ReportDictField.objects.get(pk__term=my_key["term"])) def test_string_indexes(self): - class MyDoc(Document): provider_ids = DictField() - meta = { - "indexes": ["provider_ids.foo", "provider_ids.bar"], - } + meta = {"indexes": ["provider_ids.foo", "provider_ids.bar"]} info = MyDoc.objects._collection.index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('provider_ids.foo', 1)], info) - self.assertIn([('provider_ids.bar', 1)], info) + info = [value["key"] for key, value in iteritems(info)] + self.assertIn([("provider_ids.foo", 1)], info) + self.assertIn([("provider_ids.bar", 1)], info) def test_sparse_compound_indexes(self): - class MyDoc(Document): provider_ids = DictField() meta = { - "indexes": [{'fields': ("provider_ids.foo", "provider_ids.bar"), - 'sparse': True}], + "indexes": [ + {"fields": ("provider_ids.foo", "provider_ids.bar"), "sparse": True} + ] } info = MyDoc.objects._collection.index_information() - self.assertEqual([('provider_ids.foo', 1), ('provider_ids.bar', 1)], - info['provider_ids.foo_1_provider_ids.bar_1']['key']) - self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse']) + self.assertEqual( + [("provider_ids.foo", 1), ("provider_ids.bar", 1)], + info["provider_ids.foo_1_provider_ids.bar_1"]["key"], + ) + self.assertTrue(info["provider_ids.foo_1_provider_ids.bar_1"]["sparse"]) def test_text_indexes(self): class Book(Document): title = DictField() - meta = { - "indexes": ["$title"], - } + meta = {"indexes": ["$title"]} indexes = Book.objects._collection.index_information() self.assertIn("title_text", indexes) key = indexes["title_text"]["key"] - self.assertIn(('_fts', 'text'), key) + self.assertIn(("_fts", "text"), key) def test_hashed_indexes(self): - class Book(Document): ref_id = StringField() - meta = { - "indexes": ["#ref_id"], - } + meta = {"indexes": ["#ref_id"]} indexes = Book.objects._collection.index_information() self.assertIn("ref_id_hashed", indexes) - self.assertIn(('ref_id', 'hashed'), indexes["ref_id_hashed"]["key"]) + self.assertIn(("ref_id", "hashed"), indexes["ref_id_hashed"]["key"]) def test_indexes_after_database_drop(self): """ @@ -1017,35 +967,36 @@ class IndexesTest(unittest.TestCase): """ # Use a new connection and database since dropping the database could # cause concurrent tests to fail. - connection = connect(db='tempdatabase', - alias='test_indexes_after_database_drop') + connection = connect( + db="tempdatabase", alias="test_indexes_after_database_drop" + ) class BlogPost(Document): title = StringField() slug = StringField(unique=True) - meta = {'db_alias': 'test_indexes_after_database_drop'} + meta = {"db_alias": "test_indexes_after_database_drop"} try: BlogPost.drop_collection() # Create Post #1 - post1 = BlogPost(title='test1', slug='test') + post1 = BlogPost(title="test1", slug="test") post1.save() # Drop the Database - connection.drop_database('tempdatabase') + connection.drop_database("tempdatabase") # Re-create Post #1 - post1 = BlogPost(title='test1', slug='test') + post1 = BlogPost(title="test1", slug="test") post1.save() # Create Post #2 - post2 = BlogPost(title='test2', slug='test') + post2 = BlogPost(title="test2", slug="test") self.assertRaises(NotUniqueError, post2.save) finally: # Drop the temporary database at the end - connection.drop_database('tempdatabase') + connection.drop_database("tempdatabase") def test_index_dont_send_cls_option(self): """ @@ -1057,24 +1008,19 @@ class IndexesTest(unittest.TestCase): options that are passed to ensureIndex. For more details, see: https://jira.mongodb.org/browse/SERVER-769 """ + class TestDoc(Document): txt = StringField() meta = { - 'allow_inheritance': True, - 'indexes': [ - {'fields': ('txt',), 'cls': False} - ] + "allow_inheritance": True, + "indexes": [{"fields": ("txt",), "cls": False}], } class TestChildDoc(TestDoc): txt2 = StringField() - meta = { - 'indexes': [ - {'fields': ('txt2',), 'cls': False} - ] - } + meta = {"indexes": [{"fields": ("txt2",), "cls": False}]} TestDoc.drop_collection() TestDoc.ensure_indexes() @@ -1082,54 +1028,51 @@ class IndexesTest(unittest.TestCase): index_info = TestDoc._get_collection().index_information() for key in index_info: - del index_info[key]['v'] # drop the index version - we don't care about that here - if 'ns' in index_info[key]: - del index_info[key]['ns'] # drop the index namespace - we don't care about that here, MongoDB 3+ - if 'dropDups' in index_info[key]: - del index_info[key]['dropDups'] # drop the index dropDups - it is deprecated in MongoDB 3+ + del index_info[key][ + "v" + ] # drop the index version - we don't care about that here + if "ns" in index_info[key]: + del index_info[key][ + "ns" + ] # drop the index namespace - we don't care about that here, MongoDB 3+ + if "dropDups" in index_info[key]: + del index_info[key][ + "dropDups" + ] # drop the index dropDups - it is deprecated in MongoDB 3+ - self.assertEqual(index_info, { - 'txt_1': { - 'key': [('txt', 1)], - 'background': False + self.assertEqual( + index_info, + { + "txt_1": {"key": [("txt", 1)], "background": False}, + "_id_": {"key": [("_id", 1)]}, + "txt2_1": {"key": [("txt2", 1)], "background": False}, + "_cls_1": {"key": [("_cls", 1)], "background": False}, }, - '_id_': { - 'key': [('_id', 1)], - }, - 'txt2_1': { - 'key': [('txt2', 1)], - 'background': False - }, - '_cls_1': { - 'key': [('_cls', 1)], - 'background': False, - } - }) + ) def test_compound_index_underscore_cls_not_overwritten(self): """ Test that the compound index doesn't get another _cls when it is specified """ + class TestDoc(Document): shard_1 = StringField() txt_1 = StringField() meta = { - 'collection': 'test', - 'allow_inheritance': True, - 'sparse': True, - 'shard_key': 'shard_1', - 'indexes': [ - ('shard_1', '_cls', 'txt_1'), - ] + "collection": "test", + "allow_inheritance": True, + "sparse": True, + "shard_key": "shard_1", + "indexes": [("shard_1", "_cls", "txt_1")], } TestDoc.drop_collection() TestDoc.ensure_indexes() index_info = TestDoc._get_collection().index_information() - self.assertIn('shard_1_1__cls_1_txt_1_1', index_info) + self.assertIn("shard_1_1__cls_1_txt_1_1", index_info) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py index d81039f4..4f21d5f4 100644 --- a/tests/document/inheritance.py +++ b/tests/document/inheritance.py @@ -4,18 +4,24 @@ import warnings from six import iteritems -from mongoengine import (BooleanField, Document, EmbeddedDocument, - EmbeddedDocumentField, GenericReferenceField, - IntField, ReferenceField, StringField) +from mongoengine import ( + BooleanField, + Document, + EmbeddedDocument, + EmbeddedDocumentField, + GenericReferenceField, + IntField, + ReferenceField, + StringField, +) from mongoengine.pymongo_support import list_collection_names from tests.utils import MongoDBTestCase from tests.fixtures import Base -__all__ = ('InheritanceTest', ) +__all__ = ("InheritanceTest",) class InheritanceTest(MongoDBTestCase): - def tearDown(self): for collection in list_collection_names(self.db): self.db.drop_collection(collection) @@ -25,16 +31,16 @@ class InheritanceTest(MongoDBTestCase): # and when object gets reloaded (prevent regression of #1950) class EmbedData(EmbeddedDocument): data = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class DataDoc(Document): name = StringField() embed = EmbeddedDocumentField(EmbedData) - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} - test_doc = DataDoc(name='test', embed=EmbedData(data='data')) - self.assertEqual(test_doc._cls, 'DataDoc') - self.assertEqual(test_doc.embed._cls, 'EmbedData') + test_doc = DataDoc(name="test", embed=EmbedData(data="data")) + self.assertEqual(test_doc._cls, "DataDoc") + self.assertEqual(test_doc.embed._cls, "EmbedData") test_doc.save() saved_doc = DataDoc.objects.with_id(test_doc.id) self.assertEqual(test_doc._cls, saved_doc._cls) @@ -44,163 +50,234 @@ class InheritanceTest(MongoDBTestCase): def test_superclasses(self): """Ensure that the correct list of superclasses is assembled. """ + class Animal(Document): - meta = {'allow_inheritance': True} - class Fish(Animal): pass - class Guppy(Fish): pass - class Mammal(Animal): pass - class Dog(Mammal): pass - class Human(Mammal): pass + meta = {"allow_inheritance": True} + + class Fish(Animal): + pass + + class Guppy(Fish): + pass + + class Mammal(Animal): + pass + + class Dog(Mammal): + pass + + class Human(Mammal): + pass self.assertEqual(Animal._superclasses, ()) - self.assertEqual(Fish._superclasses, ('Animal',)) - self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish')) - self.assertEqual(Mammal._superclasses, ('Animal',)) - self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal')) - self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal')) + self.assertEqual(Fish._superclasses, ("Animal",)) + self.assertEqual(Guppy._superclasses, ("Animal", "Animal.Fish")) + self.assertEqual(Mammal._superclasses, ("Animal",)) + self.assertEqual(Dog._superclasses, ("Animal", "Animal.Mammal")) + self.assertEqual(Human._superclasses, ("Animal", "Animal.Mammal")) def test_external_superclasses(self): """Ensure that the correct list of super classes is assembled when importing part of the model. """ - class Animal(Base): pass - class Fish(Animal): pass - class Guppy(Fish): pass - class Mammal(Animal): pass - class Dog(Mammal): pass - class Human(Mammal): pass - self.assertEqual(Animal._superclasses, ('Base', )) - self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',)) - self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal', - 'Base.Animal.Fish')) - self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',)) - self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal', - 'Base.Animal.Mammal')) - self.assertEqual(Human._superclasses, ('Base', 'Base.Animal', - 'Base.Animal.Mammal')) + class Animal(Base): + pass + + class Fish(Animal): + pass + + class Guppy(Fish): + pass + + class Mammal(Animal): + pass + + class Dog(Mammal): + pass + + class Human(Mammal): + pass + + self.assertEqual(Animal._superclasses, ("Base",)) + self.assertEqual(Fish._superclasses, ("Base", "Base.Animal")) + self.assertEqual( + Guppy._superclasses, ("Base", "Base.Animal", "Base.Animal.Fish") + ) + self.assertEqual(Mammal._superclasses, ("Base", "Base.Animal")) + self.assertEqual( + Dog._superclasses, ("Base", "Base.Animal", "Base.Animal.Mammal") + ) + self.assertEqual( + Human._superclasses, ("Base", "Base.Animal", "Base.Animal.Mammal") + ) def test_subclasses(self): """Ensure that the correct list of _subclasses (subclasses) is assembled. """ - class Animal(Document): - meta = {'allow_inheritance': True} - class Fish(Animal): pass - class Guppy(Fish): pass - class Mammal(Animal): pass - class Dog(Mammal): pass - class Human(Mammal): pass - self.assertEqual(Animal._subclasses, ('Animal', - 'Animal.Fish', - 'Animal.Fish.Guppy', - 'Animal.Mammal', - 'Animal.Mammal.Dog', - 'Animal.Mammal.Human')) - self.assertEqual(Fish._subclasses, ('Animal.Fish', - 'Animal.Fish.Guppy',)) - self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',)) - self.assertEqual(Mammal._subclasses, ('Animal.Mammal', - 'Animal.Mammal.Dog', - 'Animal.Mammal.Human')) - self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',)) + class Animal(Document): + meta = {"allow_inheritance": True} + + class Fish(Animal): + pass + + class Guppy(Fish): + pass + + class Mammal(Animal): + pass + + class Dog(Mammal): + pass + + class Human(Mammal): + pass + + self.assertEqual( + Animal._subclasses, + ( + "Animal", + "Animal.Fish", + "Animal.Fish.Guppy", + "Animal.Mammal", + "Animal.Mammal.Dog", + "Animal.Mammal.Human", + ), + ) + self.assertEqual(Fish._subclasses, ("Animal.Fish", "Animal.Fish.Guppy")) + self.assertEqual(Guppy._subclasses, ("Animal.Fish.Guppy",)) + self.assertEqual( + Mammal._subclasses, + ("Animal.Mammal", "Animal.Mammal.Dog", "Animal.Mammal.Human"), + ) + self.assertEqual(Human._subclasses, ("Animal.Mammal.Human",)) def test_external_subclasses(self): """Ensure that the correct list of _subclasses (subclasses) is assembled when importing part of the model. """ - class Animal(Base): pass - class Fish(Animal): pass - class Guppy(Fish): pass - class Mammal(Animal): pass - class Dog(Mammal): pass - class Human(Mammal): pass - self.assertEqual(Animal._subclasses, ('Base.Animal', - 'Base.Animal.Fish', - 'Base.Animal.Fish.Guppy', - 'Base.Animal.Mammal', - 'Base.Animal.Mammal.Dog', - 'Base.Animal.Mammal.Human')) - self.assertEqual(Fish._subclasses, ('Base.Animal.Fish', - 'Base.Animal.Fish.Guppy',)) - self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',)) - self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal', - 'Base.Animal.Mammal.Dog', - 'Base.Animal.Mammal.Human')) - self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',)) + class Animal(Base): + pass + + class Fish(Animal): + pass + + class Guppy(Fish): + pass + + class Mammal(Animal): + pass + + class Dog(Mammal): + pass + + class Human(Mammal): + pass + + self.assertEqual( + Animal._subclasses, + ( + "Base.Animal", + "Base.Animal.Fish", + "Base.Animal.Fish.Guppy", + "Base.Animal.Mammal", + "Base.Animal.Mammal.Dog", + "Base.Animal.Mammal.Human", + ), + ) + self.assertEqual( + Fish._subclasses, ("Base.Animal.Fish", "Base.Animal.Fish.Guppy") + ) + self.assertEqual(Guppy._subclasses, ("Base.Animal.Fish.Guppy",)) + self.assertEqual( + Mammal._subclasses, + ( + "Base.Animal.Mammal", + "Base.Animal.Mammal.Dog", + "Base.Animal.Mammal.Human", + ), + ) + self.assertEqual(Human._subclasses, ("Base.Animal.Mammal.Human",)) def test_dynamic_declarations(self): """Test that declaring an extra class updates meta data""" class Animal(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} self.assertEqual(Animal._superclasses, ()) - self.assertEqual(Animal._subclasses, ('Animal',)) + self.assertEqual(Animal._subclasses, ("Animal",)) # Test dynamically adding a class changes the meta data class Fish(Animal): pass self.assertEqual(Animal._superclasses, ()) - self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish')) + self.assertEqual(Animal._subclasses, ("Animal", "Animal.Fish")) - self.assertEqual(Fish._superclasses, ('Animal', )) - self.assertEqual(Fish._subclasses, ('Animal.Fish',)) + self.assertEqual(Fish._superclasses, ("Animal",)) + self.assertEqual(Fish._subclasses, ("Animal.Fish",)) # Test dynamically adding an inherited class changes the meta data class Pike(Fish): pass self.assertEqual(Animal._superclasses, ()) - self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish', - 'Animal.Fish.Pike')) + self.assertEqual( + Animal._subclasses, ("Animal", "Animal.Fish", "Animal.Fish.Pike") + ) - self.assertEqual(Fish._superclasses, ('Animal', )) - self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike')) + self.assertEqual(Fish._superclasses, ("Animal",)) + self.assertEqual(Fish._subclasses, ("Animal.Fish", "Animal.Fish.Pike")) - self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish')) - self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',)) + self.assertEqual(Pike._superclasses, ("Animal", "Animal.Fish")) + self.assertEqual(Pike._subclasses, ("Animal.Fish.Pike",)) def test_inheritance_meta_data(self): """Ensure that document may inherit fields from a superclass document. """ + class Person(Document): name = StringField() age = IntField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class Employee(Person): salary = IntField() - self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'], - sorted(Employee._fields.keys())) - self.assertEqual(Employee._get_collection_name(), - Person._get_collection_name()) + self.assertEqual( + ["_cls", "age", "id", "name", "salary"], sorted(Employee._fields.keys()) + ) + self.assertEqual(Employee._get_collection_name(), Person._get_collection_name()) def test_inheritance_to_mongo_keys(self): """Ensure that document may inherit fields from a superclass document. """ + class Person(Document): name = StringField() age = IntField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class Employee(Person): salary = IntField() - self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'], - sorted(Employee._fields.keys())) - self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(), - ['_cls', 'name', 'age']) - self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(), - ['_cls', 'name', 'age', 'salary']) - self.assertEqual(Employee._get_collection_name(), - Person._get_collection_name()) + self.assertEqual( + ["_cls", "age", "id", "name", "salary"], sorted(Employee._fields.keys()) + ) + self.assertEqual( + Person(name="Bob", age=35).to_mongo().keys(), ["_cls", "name", "age"] + ) + self.assertEqual( + Employee(name="Bob", age=35, salary=0).to_mongo().keys(), + ["_cls", "name", "age", "salary"], + ) + self.assertEqual(Employee._get_collection_name(), Person._get_collection_name()) def test_indexes_and_multiple_inheritance(self): """ Ensure that all of the indexes are created for a document with @@ -210,18 +287,12 @@ class InheritanceTest(MongoDBTestCase): class A(Document): a = StringField() - meta = { - 'allow_inheritance': True, - 'indexes': ['a'] - } + meta = {"allow_inheritance": True, "indexes": ["a"]} class B(Document): b = StringField() - meta = { - 'allow_inheritance': True, - 'indexes': ['b'] - } + meta = {"allow_inheritance": True, "indexes": ["b"]} class C(A, B): pass @@ -233,8 +304,12 @@ class InheritanceTest(MongoDBTestCase): C.ensure_indexes() self.assertEqual( - sorted([idx['key'] for idx in C._get_collection().index_information().values()]), - sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]]) + sorted( + [idx["key"] for idx in C._get_collection().index_information().values()] + ), + sorted( + [[(u"_cls", 1), (u"b", 1)], [(u"_id", 1)], [(u"_cls", 1), (u"a", 1)]] + ), ) def test_polymorphic_queries(self): @@ -242,11 +317,19 @@ class InheritanceTest(MongoDBTestCase): """ class Animal(Document): - meta = {'allow_inheritance': True} - class Fish(Animal): pass - class Mammal(Animal): pass - class Dog(Mammal): pass - class Human(Mammal): pass + meta = {"allow_inheritance": True} + + class Fish(Animal): + pass + + class Mammal(Animal): + pass + + class Dog(Mammal): + pass + + class Human(Mammal): + pass Animal.drop_collection() @@ -269,58 +352,68 @@ class InheritanceTest(MongoDBTestCase): """Ensure that inheritance is disabled by default on simple classes and that _cls will not be used. """ + class Animal(Document): name = StringField() # can't inherit because Animal didn't explicitly allow inheritance with self.assertRaises(ValueError) as cm: + class Dog(Animal): pass + self.assertIn("Document Animal may not be subclassed", str(cm.exception)) # Check that _cls etc aren't present on simple documents - dog = Animal(name='dog').save() - self.assertEqual(dog.to_mongo().keys(), ['_id', 'name']) + dog = Animal(name="dog").save() + self.assertEqual(dog.to_mongo().keys(), ["_id", "name"]) collection = self.db[Animal._get_collection_name()] obj = collection.find_one() - self.assertNotIn('_cls', obj) + self.assertNotIn("_cls", obj) def test_cant_turn_off_inheritance_on_subclass(self): """Ensure if inheritance is on in a subclass you cant turn it off. """ + class Animal(Document): name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} with self.assertRaises(ValueError) as cm: + class Mammal(Animal): - meta = {'allow_inheritance': False} - self.assertEqual(str(cm.exception), 'Only direct subclasses of Document may set "allow_inheritance" to False') + meta = {"allow_inheritance": False} + + self.assertEqual( + str(cm.exception), + 'Only direct subclasses of Document may set "allow_inheritance" to False', + ) def test_allow_inheritance_abstract_document(self): """Ensure that abstract documents can set inheritance rules and that _cls will not be used. """ + class FinalDocument(Document): - meta = {'abstract': True, - 'allow_inheritance': False} + meta = {"abstract": True, "allow_inheritance": False} class Animal(FinalDocument): name = StringField() with self.assertRaises(ValueError) as cm: + class Mammal(Animal): pass # Check that _cls isn't present in simple documents - doc = Animal(name='dog') - self.assertNotIn('_cls', doc.to_mongo()) + doc = Animal(name="dog") + self.assertNotIn("_cls", doc.to_mongo()) def test_using_abstract_class_in_reference_field(self): # Ensures no regression of #1920 class AbstractHuman(Document): - meta = {'abstract': True} + meta = {"abstract": True} class Dad(AbstractHuman): name = StringField() @@ -329,130 +422,122 @@ class InheritanceTest(MongoDBTestCase): dad = ReferenceField(AbstractHuman) # Referencing the abstract class address = StringField() - dad = Dad(name='5').save() - Home(dad=dad, address='street').save() + dad = Dad(name="5").save() + Home(dad=dad, address="street").save() home = Home.objects.first() - home.address = 'garbage' - home.save() # Was failing with ValidationError + home.address = "garbage" + home.save() # Was failing with ValidationError def test_abstract_class_referencing_self(self): # Ensures no regression of #1920 class Human(Document): - meta = {'abstract': True} - creator = ReferenceField('self', dbref=True) + meta = {"abstract": True} + creator = ReferenceField("self", dbref=True) class User(Human): name = StringField() - user = User(name='John').save() - user2 = User(name='Foo', creator=user).save() + user = User(name="John").save() + user2 = User(name="Foo", creator=user).save() user2 = User.objects.with_id(user2.id) - user2.name = 'Bar' - user2.save() # Was failing with ValidationError + user2.name = "Bar" + user2.save() # Was failing with ValidationError def test_abstract_handle_ids_in_metaclass_properly(self): - class City(Document): continent = StringField() - meta = {'abstract': True, - 'allow_inheritance': False} + meta = {"abstract": True, "allow_inheritance": False} class EuropeanCity(City): name = StringField() - berlin = EuropeanCity(name='Berlin', continent='Europe') + berlin = EuropeanCity(name="Berlin", continent="Europe") self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) self.assertEqual(len(berlin._fields_ordered), 3) - self.assertEqual(berlin._fields_ordered[0], 'id') + self.assertEqual(berlin._fields_ordered[0], "id") def test_auto_id_not_set_if_specific_in_parent_class(self): - class City(Document): continent = StringField() city_id = IntField(primary_key=True) - meta = {'abstract': True, - 'allow_inheritance': False} + meta = {"abstract": True, "allow_inheritance": False} class EuropeanCity(City): name = StringField() - berlin = EuropeanCity(name='Berlin', continent='Europe') + berlin = EuropeanCity(name="Berlin", continent="Europe") self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) self.assertEqual(len(berlin._fields_ordered), 3) - self.assertEqual(berlin._fields_ordered[0], 'city_id') + self.assertEqual(berlin._fields_ordered[0], "city_id") def test_auto_id_vs_non_pk_id_field(self): - class City(Document): continent = StringField() id = IntField() - meta = {'abstract': True, - 'allow_inheritance': False} + meta = {"abstract": True, "allow_inheritance": False} class EuropeanCity(City): name = StringField() - berlin = EuropeanCity(name='Berlin', continent='Europe') + berlin = EuropeanCity(name="Berlin", continent="Europe") self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) self.assertEqual(len(berlin._fields_ordered), 4) - self.assertEqual(berlin._fields_ordered[0], 'auto_id_0') + self.assertEqual(berlin._fields_ordered[0], "auto_id_0") berlin.save() self.assertEqual(berlin.pk, berlin.auto_id_0) def test_abstract_document_creation_does_not_fail(self): class City(Document): continent = StringField() - meta = {'abstract': True, - 'allow_inheritance': False} + meta = {"abstract": True, "allow_inheritance": False} - city = City(continent='asia') + city = City(continent="asia") self.assertEqual(None, city.pk) # TODO: expected error? Shouldn't we create a new error type? with self.assertRaises(KeyError): - setattr(city, 'pk', 1) + setattr(city, "pk", 1) def test_allow_inheritance_embedded_document(self): """Ensure embedded documents respect inheritance.""" + class Comment(EmbeddedDocument): content = StringField() with self.assertRaises(ValueError): + class SpecialComment(Comment): pass - doc = Comment(content='test') - self.assertNotIn('_cls', doc.to_mongo()) + doc = Comment(content="test") + self.assertNotIn("_cls", doc.to_mongo()) class Comment(EmbeddedDocument): content = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} - doc = Comment(content='test') - self.assertIn('_cls', doc.to_mongo()) + doc = Comment(content="test") + self.assertIn("_cls", doc.to_mongo()) def test_document_inheritance(self): """Ensure mutliple inheritance of abstract documents """ + class DateCreatedDocument(Document): - meta = { - 'allow_inheritance': True, - 'abstract': True, - } + meta = {"allow_inheritance": True, "abstract": True} class DateUpdatedDocument(Document): - meta = { - 'allow_inheritance': True, - 'abstract': True, - } + meta = {"allow_inheritance": True, "abstract": True} try: + class MyDocument(DateCreatedDocument, DateUpdatedDocument): pass + except Exception: self.assertTrue(False, "Couldn't create MyDocument class") @@ -460,47 +545,55 @@ class InheritanceTest(MongoDBTestCase): """Ensure that a document superclass can be marked as abstract thereby not using it as the name for the collection.""" - defaults = {'index_background': True, - 'index_drop_dups': True, - 'index_opts': {'hello': 'world'}, - 'allow_inheritance': True, - 'queryset_class': 'QuerySet', - 'db_alias': 'myDB', - 'shard_key': ('hello', 'world')} + defaults = { + "index_background": True, + "index_drop_dups": True, + "index_opts": {"hello": "world"}, + "allow_inheritance": True, + "queryset_class": "QuerySet", + "db_alias": "myDB", + "shard_key": ("hello", "world"), + } - meta_settings = {'abstract': True} + meta_settings = {"abstract": True} meta_settings.update(defaults) class Animal(Document): name = StringField() meta = meta_settings - class Fish(Animal): pass - class Guppy(Fish): pass + class Fish(Animal): + pass + + class Guppy(Fish): + pass class Mammal(Animal): - meta = {'abstract': True} - class Human(Mammal): pass + meta = {"abstract": True} + + class Human(Mammal): + pass for k, v in iteritems(defaults): for cls in [Animal, Fish, Guppy]: self.assertEqual(cls._meta[k], v) - self.assertNotIn('collection', Animal._meta) - self.assertNotIn('collection', Mammal._meta) + self.assertNotIn("collection", Animal._meta) + self.assertNotIn("collection", Mammal._meta) self.assertEqual(Animal._get_collection_name(), None) self.assertEqual(Mammal._get_collection_name(), None) - self.assertEqual(Fish._get_collection_name(), 'fish') - self.assertEqual(Guppy._get_collection_name(), 'fish') - self.assertEqual(Human._get_collection_name(), 'human') + self.assertEqual(Fish._get_collection_name(), "fish") + self.assertEqual(Guppy._get_collection_name(), "fish") + self.assertEqual(Human._get_collection_name(), "human") # ensure that a subclass of a non-abstract class can't be abstract with self.assertRaises(ValueError): + class EvilHuman(Human): evil = BooleanField(default=True) - meta = {'abstract': True} + meta = {"abstract": True} def test_abstract_embedded_documents(self): # 789: EmbeddedDocument shouldn't inherit abstract @@ -519,7 +612,7 @@ class InheritanceTest(MongoDBTestCase): class Drink(Document): name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class Drinker(Document): drink = GenericReferenceField() @@ -528,13 +621,13 @@ class InheritanceTest(MongoDBTestCase): warnings.simplefilter("error") class AcloholicDrink(Drink): - meta = {'collection': 'booze'} + meta = {"collection": "booze"} except SyntaxWarning: warnings.simplefilter("ignore") class AlcoholicDrink(Drink): - meta = {'collection': 'booze'} + meta = {"collection": "booze"} else: raise AssertionError("SyntaxWarning should be triggered") @@ -545,13 +638,13 @@ class InheritanceTest(MongoDBTestCase): AlcoholicDrink.drop_collection() Drinker.drop_collection() - red_bull = Drink(name='Red Bull') + red_bull = Drink(name="Red Bull") red_bull.save() programmer = Drinker(drink=red_bull) programmer.save() - beer = AlcoholicDrink(name='Beer') + beer = AlcoholicDrink(name="Beer") beer.save() real_person = Drinker(drink=beer) real_person.save() @@ -560,5 +653,5 @@ class InheritanceTest(MongoDBTestCase): self.assertEqual(Drinker.objects[1].drink.name, beer.name) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/document/instance.py b/tests/document/instance.py index 06f65076..49606cff 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -16,24 +16,33 @@ from mongoengine import signals from mongoengine.base import _document_registry, get_document from mongoengine.connection import get_db from mongoengine.context_managers import query_counter, switch_db -from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, \ - InvalidQueryError, NotRegistered, NotUniqueError, SaveConditionError) +from mongoengine.errors import ( + FieldDoesNotExist, + InvalidDocumentError, + InvalidQueryError, + NotRegistered, + NotUniqueError, + SaveConditionError, +) from mongoengine.mongodb_support import MONGODB_34, MONGODB_36, get_mongodb_version from mongoengine.pymongo_support import list_collection_names from mongoengine.queryset import NULLIFY, Q from tests import fixtures -from tests.fixtures import (PickleDynamicEmbedded, PickleDynamicTest, \ - PickleEmbedded, PickleSignalsTest, PickleTest) +from tests.fixtures import ( + PickleDynamicEmbedded, + PickleDynamicTest, + PickleEmbedded, + PickleSignalsTest, + PickleTest, +) from tests.utils import MongoDBTestCase, get_as_pymongo -TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), - '../fields/mongoengine.png') +TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "../fields/mongoengine.png") __all__ = ("InstanceTest",) class InstanceTest(MongoDBTestCase): - def setUp(self): class Job(EmbeddedDocument): name = StringField() @@ -58,7 +67,8 @@ class InstanceTest(MongoDBTestCase): def assertDbEqual(self, docs): self.assertEqual( list(self.Person._get_collection().find().sort("id")), - sorted(docs, key=lambda doc: doc["_id"])) + sorted(docs, key=lambda doc: doc["_id"]), + ) def assertHasInstance(self, field, instance): self.assertTrue(hasattr(field, "_instance")) @@ -70,12 +80,10 @@ class InstanceTest(MongoDBTestCase): def test_capped_collection(self): """Ensure that capped collections work properly.""" + class Log(Document): date = DateTimeField(default=datetime.now) - meta = { - 'max_documents': 10, - 'max_size': 4096, - } + meta = {"max_documents": 10, "max_size": 4096} Log.drop_collection() @@ -90,16 +98,14 @@ class InstanceTest(MongoDBTestCase): self.assertEqual(Log.objects.count(), 10) options = Log.objects._collection.options() - self.assertEqual(options['capped'], True) - self.assertEqual(options['max'], 10) - self.assertEqual(options['size'], 4096) + self.assertEqual(options["capped"], True) + self.assertEqual(options["max"], 10) + self.assertEqual(options["size"], 4096) # Check that the document cannot be redefined with different options class Log(Document): date = DateTimeField(default=datetime.now) - meta = { - 'max_documents': 11, - } + meta = {"max_documents": 11} # Accessing Document.objects creates the collection with self.assertRaises(InvalidCollectionError): @@ -107,11 +113,10 @@ class InstanceTest(MongoDBTestCase): def test_capped_collection_default(self): """Ensure that capped collections defaults work properly.""" + class Log(Document): date = DateTimeField(default=datetime.now) - meta = { - 'max_documents': 10, - } + meta = {"max_documents": 10} Log.drop_collection() @@ -119,16 +124,14 @@ class InstanceTest(MongoDBTestCase): Log().save() options = Log.objects._collection.options() - self.assertEqual(options['capped'], True) - self.assertEqual(options['max'], 10) - self.assertEqual(options['size'], 10 * 2**20) + self.assertEqual(options["capped"], True) + self.assertEqual(options["max"], 10) + self.assertEqual(options["size"], 10 * 2 ** 20) # Check that the document with default value can be recreated class Log(Document): date = DateTimeField(default=datetime.now) - meta = { - 'max_documents': 10, - } + meta = {"max_documents": 10} # Create the collection by accessing Document.objects Log.objects @@ -138,11 +141,10 @@ class InstanceTest(MongoDBTestCase): MongoDB rounds up max_size to next multiple of 256, recreating a doc with the same spec failed in mongoengine <0.10 """ + class Log(Document): date = DateTimeField(default=datetime.now) - meta = { - 'max_size': 10000, - } + meta = {"max_size": 10000} Log.drop_collection() @@ -150,15 +152,13 @@ class InstanceTest(MongoDBTestCase): Log().save() options = Log.objects._collection.options() - self.assertEqual(options['capped'], True) - self.assertTrue(options['size'] >= 10000) + self.assertEqual(options["capped"], True) + self.assertTrue(options["size"] >= 10000) # Check that the document with odd max_size value can be recreated class Log(Document): date = DateTimeField(default=datetime.now) - meta = { - 'max_size': 10000, - } + meta = {"max_size": 10000} # Create the collection by accessing Document.objects Log.objects @@ -166,26 +166,28 @@ class InstanceTest(MongoDBTestCase): def test_repr(self): """Ensure that unicode representation works """ + class Article(Document): title = StringField() def __unicode__(self): return self.title - doc = Article(title=u'привет мир') + doc = Article(title=u"привет мир") - self.assertEqual('', repr(doc)) + self.assertEqual("", repr(doc)) def test_repr_none(self): """Ensure None values are handled correctly.""" + class Article(Document): title = StringField() def __str__(self): return None - doc = Article(title=u'привет мир') - self.assertEqual('', repr(doc)) + doc = Article(title=u"привет мир") + self.assertEqual("", repr(doc)) def test_queryset_resurrects_dropped_collection(self): self.Person.drop_collection() @@ -203,8 +205,9 @@ class InstanceTest(MongoDBTestCase): """Ensure that the correct subclasses are returned from a query when using references / generic references """ + class Animal(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class Fish(Animal): pass @@ -255,7 +258,7 @@ class InstanceTest(MongoDBTestCase): class Stats(Document): created = DateTimeField(default=datetime.now) - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} class CompareStats(Document): generated = DateTimeField(default=datetime.now) @@ -278,6 +281,7 @@ class InstanceTest(MongoDBTestCase): def test_db_field_load(self): """Ensure we load data correctly from the right db field.""" + class Person(Document): name = StringField(required=True) _rank = StringField(required=False, db_field="rank") @@ -297,14 +301,13 @@ class InstanceTest(MongoDBTestCase): def test_db_embedded_doc_field_load(self): """Ensure we load embedded document data correctly.""" + class Rank(EmbeddedDocument): title = StringField(required=True) class Person(Document): name = StringField(required=True) - rank_ = EmbeddedDocumentField(Rank, - required=False, - db_field='rank') + rank_ = EmbeddedDocumentField(Rank, required=False, db_field="rank") @property def rank(self): @@ -322,45 +325,50 @@ class InstanceTest(MongoDBTestCase): def test_custom_id_field(self): """Ensure that documents may be created with custom primary keys.""" + class User(Document): username = StringField(primary_key=True) name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} User.drop_collection() - self.assertEqual(User._fields['username'].db_field, '_id') - self.assertEqual(User._meta['id_field'], 'username') + self.assertEqual(User._fields["username"].db_field, "_id") + self.assertEqual(User._meta["id_field"], "username") - User.objects.create(username='test', name='test user') + User.objects.create(username="test", name="test user") user = User.objects.first() - self.assertEqual(user.id, 'test') - self.assertEqual(user.pk, 'test') + self.assertEqual(user.id, "test") + self.assertEqual(user.pk, "test") user_dict = User.objects._collection.find_one() - self.assertEqual(user_dict['_id'], 'test') + self.assertEqual(user_dict["_id"], "test") def test_change_custom_id_field_in_subclass(self): """Subclasses cannot override which field is the primary key.""" + class User(Document): username = StringField(primary_key=True) name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} with self.assertRaises(ValueError) as e: + class EmailUser(User): email = StringField(primary_key=True) + exc = e.exception - self.assertEqual(str(exc), 'Cannot override primary key field') + self.assertEqual(str(exc), "Cannot override primary key field") def test_custom_id_field_is_required(self): """Ensure the custom primary key field is required.""" + class User(Document): username = StringField(primary_key=True) name = StringField() with self.assertRaises(ValidationError) as e: - User(name='test').save() + User(name="test").save() exc = e.exception self.assertTrue("Field is required: ['username']" in str(exc)) @@ -368,7 +376,7 @@ class InstanceTest(MongoDBTestCase): class Place(Document): name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class NicePlace(Place): pass @@ -380,7 +388,7 @@ class InstanceTest(MongoDBTestCase): # Mimic Place and NicePlace definitions being in a different file # and the NicePlace model not being imported in at query time. - del(_document_registry['Place.NicePlace']) + del _document_registry["Place.NicePlace"] with self.assertRaises(NotRegistered): list(Place.objects.all()) @@ -388,10 +396,10 @@ class InstanceTest(MongoDBTestCase): def test_document_registry_regressions(self): class Location(Document): name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class Area(Location): - location = ReferenceField('Location', dbref=True) + location = ReferenceField("Location", dbref=True) Location.drop_collection() @@ -413,18 +421,19 @@ class InstanceTest(MongoDBTestCase): def test_key_like_attribute_access(self): person = self.Person(age=30) - self.assertEqual(person['age'], 30) + self.assertEqual(person["age"], 30) with self.assertRaises(KeyError): - person['unknown_attr'] + person["unknown_attr"] def test_save_abstract_document(self): """Saving an abstract document should fail.""" + class Doc(Document): name = StringField() - meta = {'abstract': True} + meta = {"abstract": True} with self.assertRaises(InvalidDocumentError): - Doc(name='aaa').save() + Doc(name="aaa").save() def test_reload(self): """Ensure that attributes may be reloaded.""" @@ -439,7 +448,7 @@ class InstanceTest(MongoDBTestCase): self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 20) - person.reload('age') + person.reload("age") self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 21) @@ -454,19 +463,22 @@ class InstanceTest(MongoDBTestCase): def test_reload_sharded(self): class Animal(Document): superphylum = StringField() - meta = {'shard_key': ('superphylum',)} + meta = {"shard_key": ("superphylum",)} Animal.drop_collection() - doc = Animal(superphylum='Deuterostomia') + doc = Animal(superphylum="Deuterostomia") doc.save() mongo_db = get_mongodb_version() - CMD_QUERY_KEY = 'command' if mongo_db >= MONGODB_36 else 'query' + CMD_QUERY_KEY = "command" if mongo_db >= MONGODB_36 else "query" with query_counter() as q: doc.reload() - query_op = q.db.system.profile.find({'ns': 'mongoenginetest.animal'})[0] - self.assertEqual(set(query_op[CMD_QUERY_KEY]['filter'].keys()), set(['_id', 'superphylum'])) + query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0] + self.assertEqual( + set(query_op[CMD_QUERY_KEY]["filter"].keys()), + set(["_id", "superphylum"]), + ) Animal.drop_collection() @@ -476,10 +488,10 @@ class InstanceTest(MongoDBTestCase): class Animal(Document): superphylum = EmbeddedDocumentField(SuperPhylum) - meta = {'shard_key': ('superphylum.name',)} + meta = {"shard_key": ("superphylum.name",)} Animal.drop_collection() - doc = Animal(superphylum=SuperPhylum(name='Deuterostomia')) + doc = Animal(superphylum=SuperPhylum(name="Deuterostomia")) doc.save() doc.reload() Animal.drop_collection() @@ -488,49 +500,57 @@ class InstanceTest(MongoDBTestCase): """Ensures updating a doc with a specified shard_key includes it in the query. """ + class Animal(Document): is_mammal = BooleanField() name = StringField() - meta = {'shard_key': ('is_mammal', 'id')} + meta = {"shard_key": ("is_mammal", "id")} Animal.drop_collection() - doc = Animal(is_mammal=True, name='Dog') + doc = Animal(is_mammal=True, name="Dog") doc.save() mongo_db = get_mongodb_version() with query_counter() as q: - doc.name = 'Cat' + doc.name = "Cat" doc.save() - query_op = q.db.system.profile.find({'ns': 'mongoenginetest.animal'})[0] - self.assertEqual(query_op['op'], 'update') + query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0] + self.assertEqual(query_op["op"], "update") if mongo_db <= MONGODB_34: - self.assertEqual(set(query_op['query'].keys()), set(['_id', 'is_mammal'])) + self.assertEqual( + set(query_op["query"].keys()), set(["_id", "is_mammal"]) + ) else: - self.assertEqual(set(query_op['command']['q'].keys()), set(['_id', 'is_mammal'])) + self.assertEqual( + set(query_op["command"]["q"].keys()), set(["_id", "is_mammal"]) + ) Animal.drop_collection() def test_reload_with_changed_fields(self): """Ensures reloading will not affect changed fields""" + class User(Document): name = StringField() number = IntField() + User.drop_collection() user = User(name="Bob", number=1).save() user.name = "John" user.number = 2 - self.assertEqual(user._get_changed_fields(), ['name', 'number']) - user.reload('number') - self.assertEqual(user._get_changed_fields(), ['name']) + self.assertEqual(user._get_changed_fields(), ["name", "number"]) + user.reload("number") + self.assertEqual(user._get_changed_fields(), ["name"]) user.save() user.reload() self.assertEqual(user.name, "John") def test_reload_referencing(self): """Ensures reloading updates weakrefs correctly.""" + class Embedded(EmbeddedDocument): dict_field = DictField() list_field = ListField() @@ -542,24 +562,30 @@ class InstanceTest(MongoDBTestCase): Doc.drop_collection() doc = Doc() - doc.dict_field = {'hello': 'world'} - doc.list_field = ['1', 2, {'hello': 'world'}] + doc.dict_field = {"hello": "world"} + doc.list_field = ["1", 2, {"hello": "world"}] embedded_1 = Embedded() - embedded_1.dict_field = {'hello': 'world'} - embedded_1.list_field = ['1', 2, {'hello': 'world'}] + embedded_1.dict_field = {"hello": "world"} + embedded_1.list_field = ["1", 2, {"hello": "world"}] doc.embedded_field = embedded_1 doc.save() doc = doc.reload(10) doc.list_field.append(1) - doc.dict_field['woot'] = "woot" + doc.dict_field["woot"] = "woot" doc.embedded_field.list_field.append(1) - doc.embedded_field.dict_field['woot'] = "woot" + doc.embedded_field.dict_field["woot"] = "woot" - self.assertEqual(doc._get_changed_fields(), [ - 'list_field', 'dict_field.woot', 'embedded_field.list_field', - 'embedded_field.dict_field.woot']) + self.assertEqual( + doc._get_changed_fields(), + [ + "list_field", + "dict_field.woot", + "embedded_field.list_field", + "embedded_field.dict_field.woot", + ], + ) doc.save() self.assertEqual(len(doc.list_field), 4) @@ -572,9 +598,9 @@ class InstanceTest(MongoDBTestCase): doc.list_field.append(1) doc.save() - doc.dict_field['extra'] = 1 - doc = doc.reload(10, 'list_field') - self.assertEqual(doc._get_changed_fields(), ['dict_field.extra']) + doc.dict_field["extra"] = 1 + doc = doc.reload(10, "list_field") + self.assertEqual(doc._get_changed_fields(), ["dict_field.extra"]) self.assertEqual(len(doc.list_field), 5) self.assertEqual(len(doc.dict_field), 3) self.assertEqual(len(doc.embedded_field.list_field), 4) @@ -596,19 +622,17 @@ class InstanceTest(MongoDBTestCase): def test_reload_of_non_strict_with_special_field_name(self): """Ensures reloading works for documents with meta strict == False.""" + class Post(Document): - meta = { - 'strict': False - } + meta = {"strict": False} title = StringField() items = ListField() Post.drop_collection() - Post._get_collection().insert_one({ - "title": "Items eclipse", - "items": ["more lorem", "even more ipsum"] - }) + Post._get_collection().insert_one( + {"title": "Items eclipse", "items": ["more lorem", "even more ipsum"]} + ) post = Post.objects.first() post.reload() @@ -617,22 +641,22 @@ class InstanceTest(MongoDBTestCase): def test_dictionary_access(self): """Ensure that dictionary-style field access works properly.""" - person = self.Person(name='Test User', age=30, job=self.Job()) - self.assertEqual(person['name'], 'Test User') + person = self.Person(name="Test User", age=30, job=self.Job()) + self.assertEqual(person["name"], "Test User") - self.assertRaises(KeyError, person.__getitem__, 'salary') - self.assertRaises(KeyError, person.__setitem__, 'salary', 50) + self.assertRaises(KeyError, person.__getitem__, "salary") + self.assertRaises(KeyError, person.__setitem__, "salary", 50) - person['name'] = 'Another User' - self.assertEqual(person['name'], 'Another User') + person["name"] = "Another User" + self.assertEqual(person["name"], "Another User") # Length = length(assigned fields + id) self.assertEqual(len(person), 5) - self.assertIn('age', person) + self.assertIn("age", person) person.age = None - self.assertNotIn('age', person) - self.assertNotIn('nationality', person) + self.assertNotIn("age", person) + self.assertNotIn("nationality", person) def test_embedded_document_to_mongo(self): class Person(EmbeddedDocument): @@ -644,29 +668,33 @@ class InstanceTest(MongoDBTestCase): class Employee(Person): salary = IntField() - self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(), - ['_cls', 'name', 'age']) + self.assertEqual( + Person(name="Bob", age=35).to_mongo().keys(), ["_cls", "name", "age"] + ) self.assertEqual( Employee(name="Bob", age=35, salary=0).to_mongo().keys(), - ['_cls', 'name', 'age', 'salary']) + ["_cls", "name", "age", "salary"], + ) def test_embedded_document_to_mongo_id(self): class SubDoc(EmbeddedDocument): id = StringField(required=True) sub_doc = SubDoc(id="abc") - self.assertEqual(sub_doc.to_mongo().keys(), ['id']) + self.assertEqual(sub_doc.to_mongo().keys(), ["id"]) def test_embedded_document(self): """Ensure that embedded documents are set up correctly.""" + class Comment(EmbeddedDocument): content = StringField() - self.assertIn('content', Comment._fields) - self.assertNotIn('id', Comment._fields) + self.assertIn("content", Comment._fields) + self.assertNotIn("id", Comment._fields) def test_embedded_document_instance(self): """Ensure that embedded documents can reference parent instance.""" + class Embedded(EmbeddedDocument): string = StringField() @@ -686,6 +714,7 @@ class InstanceTest(MongoDBTestCase): """Ensure that embedded documents in complex fields can reference parent instance. """ + class Embedded(EmbeddedDocument): string = StringField() @@ -702,15 +731,19 @@ class InstanceTest(MongoDBTestCase): def test_embedded_document_complex_instance_no_use_db_field(self): """Ensure that use_db_field is propagated to list of Emb Docs.""" + class Embedded(EmbeddedDocument): - string = StringField(db_field='s') + string = StringField(db_field="s") class Doc(Document): embedded_field = ListField(EmbeddedDocumentField(Embedded)) - d = Doc(embedded_field=[Embedded(string="Hi")]).to_mongo( - use_db_field=False).to_dict() - self.assertEqual(d['embedded_field'], [{'string': 'Hi'}]) + d = ( + Doc(embedded_field=[Embedded(string="Hi")]) + .to_mongo(use_db_field=False) + .to_dict() + ) + self.assertEqual(d["embedded_field"], [{"string": "Hi"}]) def test_instance_is_set_on_setattr(self): class Email(EmbeddedDocument): @@ -722,7 +755,7 @@ class InstanceTest(MongoDBTestCase): Account.drop_collection() acc = Account() - acc.email = Email(email='test@example.com') + acc.email = Email(email="test@example.com") self.assertHasInstance(acc._data["email"], acc) acc.save() @@ -738,7 +771,7 @@ class InstanceTest(MongoDBTestCase): Account.drop_collection() acc = Account() - acc.emails = [Email(email='test@example.com')] + acc.emails = [Email(email="test@example.com")] self.assertHasInstance(acc._data["emails"][0], acc) acc.save() @@ -764,22 +797,19 @@ class InstanceTest(MongoDBTestCase): @classmethod def pre_save_post_validation(cls, sender, document, **kwargs): - document.content = 'checked' + document.content = "checked" - signals.pre_save_post_validation.connect(BlogPost.pre_save_post_validation, sender=BlogPost) + signals.pre_save_post_validation.connect( + BlogPost.pre_save_post_validation, sender=BlogPost + ) BlogPost.drop_collection() - post = BlogPost(content='unchecked').save() - self.assertEqual(post.content, 'checked') + post = BlogPost(content="unchecked").save() + self.assertEqual(post.content, "checked") # Make sure pre_save_post_validation changes makes it to the db raw_doc = get_as_pymongo(post) - self.assertEqual( - raw_doc, - { - 'content': 'checked', - '_id': post.id - }) + self.assertEqual(raw_doc, {"content": "checked", "_id": post.id}) # Important to disconnect as it could cause some assertions in test_signals # to fail (due to the garbage collection timing of this signal) @@ -810,13 +840,7 @@ class InstanceTest(MongoDBTestCase): self.assertEqual(t.cleaned, True) raw_doc = get_as_pymongo(t) # Make sure clean changes makes it to the db - self.assertEqual( - raw_doc, - { - 'status': 'published', - 'cleaned': True, - '_id': t.id - }) + self.assertEqual(raw_doc, {"status": "published", "cleaned": True, "_id": t.id}) def test_document_embedded_clean(self): class TestEmbeddedDocument(EmbeddedDocument): @@ -824,12 +848,12 @@ class InstanceTest(MongoDBTestCase): y = IntField(required=True) z = IntField(required=True) - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} def clean(self): if self.z: if self.z != self.x + self.y: - raise ValidationError('Value of z != x + y') + raise ValidationError("Value of z != x + y") else: self.z = self.x + self.y @@ -846,7 +870,7 @@ class InstanceTest(MongoDBTestCase): expected_msg = "Value of z != x + y" self.assertIn(expected_msg, cm.exception.message) - self.assertEqual(cm.exception.to_dict(), {'doc': {'__all__': expected_msg}}) + self.assertEqual(cm.exception.to_dict(), {"doc": {"__all__": expected_msg}}) t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save() self.assertEqual(t.doc.z, 35) @@ -869,7 +893,7 @@ class InstanceTest(MongoDBTestCase): docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())] with self.assertRaises(InvalidQueryError): - doc1.modify({'id': doc2.id}, set__value=20) + doc1.modify({"id": doc2.id}, set__value=20) self.assertDbEqual(docs) @@ -878,7 +902,7 @@ class InstanceTest(MongoDBTestCase): doc2 = self.Person(name="jim", age=20).save() docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())] - n_modified = doc1.modify({'name': doc2.name}, set__age=100) + n_modified = doc1.modify({"name": doc2.name}, set__age=100) self.assertEqual(n_modified, 0) self.assertDbEqual(docs) @@ -888,7 +912,7 @@ class InstanceTest(MongoDBTestCase): doc2 = self.Person(id=ObjectId(), name="jim", age=20) docs = [dict(doc1.to_mongo())] - n_modified = doc2.modify({'name': doc2.name}, set__age=100) + n_modified = doc2.modify({"name": doc2.name}, set__age=100) self.assertEqual(n_modified, 0) self.assertDbEqual(docs) @@ -896,7 +920,8 @@ class InstanceTest(MongoDBTestCase): def test_modify_update(self): other_doc = self.Person(name="bob", age=10).save() doc = self.Person( - name="jim", age=20, job=self.Job(name="10gen", years=3)).save() + name="jim", age=20, job=self.Job(name="10gen", years=3) + ).save() doc_copy = doc._from_son(doc.to_mongo()) @@ -906,7 +931,8 @@ class InstanceTest(MongoDBTestCase): doc.job.years = 3 n_modified = doc.modify( - set__age=21, set__job__name="MongoDB", unset__job__years=True) + set__age=21, set__job__name="MongoDB", unset__job__years=True + ) self.assertEqual(n_modified, 1) doc_copy.age = 21 doc_copy.job.name = "MongoDB" @@ -926,62 +952,56 @@ class InstanceTest(MongoDBTestCase): content = EmbeddedDocumentField(Content) post = BlogPost.objects.create( - tags=['python'], content=Content(keywords=['ipsum'])) - - self.assertEqual(post.tags, ['python']) - post.modify(push__tags__0=['code', 'mongo']) - self.assertEqual(post.tags, ['code', 'mongo', 'python']) - - # Assert same order of the list items is maintained in the db - self.assertEqual( - BlogPost._get_collection().find_one({'_id': post.pk})['tags'], - ['code', 'mongo', 'python'] + tags=["python"], content=Content(keywords=["ipsum"]) ) - self.assertEqual(post.content.keywords, ['ipsum']) - post.modify(push__content__keywords__0=['lorem']) - self.assertEqual(post.content.keywords, ['lorem', 'ipsum']) + self.assertEqual(post.tags, ["python"]) + post.modify(push__tags__0=["code", "mongo"]) + self.assertEqual(post.tags, ["code", "mongo", "python"]) # Assert same order of the list items is maintained in the db self.assertEqual( - BlogPost._get_collection().find_one({'_id': post.pk})['content']['keywords'], - ['lorem', 'ipsum'] + BlogPost._get_collection().find_one({"_id": post.pk})["tags"], + ["code", "mongo", "python"], + ) + + self.assertEqual(post.content.keywords, ["ipsum"]) + post.modify(push__content__keywords__0=["lorem"]) + self.assertEqual(post.content.keywords, ["lorem", "ipsum"]) + + # Assert same order of the list items is maintained in the db + self.assertEqual( + BlogPost._get_collection().find_one({"_id": post.pk})["content"][ + "keywords" + ], + ["lorem", "ipsum"], ) def test_save(self): """Ensure that a document may be saved in the database.""" # Create person object and save it to the database - person = self.Person(name='Test User', age=30) + person = self.Person(name="Test User", age=30) person.save() # Ensure that the object is in the database raw_doc = get_as_pymongo(person) self.assertEqual( raw_doc, - { - '_cls': 'Person', - 'name': 'Test User', - 'age': 30, - '_id': person.id - }) + {"_cls": "Person", "name": "Test User", "age": 30, "_id": person.id}, + ) def test_save_skip_validation(self): class Recipient(Document): email = EmailField(required=True) - recipient = Recipient(email='not-an-email') + recipient = Recipient(email="not-an-email") with self.assertRaises(ValidationError): recipient.save() recipient.save(validate=False) raw_doc = get_as_pymongo(recipient) - self.assertEqual( - raw_doc, - { - 'email': 'not-an-email', - '_id': recipient.id - }) + self.assertEqual(raw_doc, {"email": "not-an-email", "_id": recipient.id}) def test_save_with_bad_id(self): class Clown(Document): @@ -1012,8 +1032,8 @@ class InstanceTest(MongoDBTestCase): def test_save_max_recursion_not_hit(self): class Person(Document): name = StringField() - parent = ReferenceField('self') - friend = ReferenceField('self') + parent = ReferenceField("self") + friend = ReferenceField("self") Person.drop_collection() @@ -1031,28 +1051,28 @@ class InstanceTest(MongoDBTestCase): # Confirm can save and it resets the changed fields without hitting # max recursion error p0 = Person.objects.first() - p0.name = 'wpjunior' + p0.name = "wpjunior" p0.save() def test_save_max_recursion_not_hit_with_file_field(self): class Foo(Document): name = StringField() picture = FileField() - bar = ReferenceField('self') + bar = ReferenceField("self") Foo.drop_collection() - a = Foo(name='hello').save() + a = Foo(name="hello").save() a.bar = a - with open(TEST_IMAGE_PATH, 'rb') as test_image: + with open(TEST_IMAGE_PATH, "rb") as test_image: a.picture = test_image a.save() # Confirm can save and it resets the changed fields without hitting # max recursion error b = Foo.objects.with_id(a.id) - b.name = 'world' + b.name = "world" b.save() self.assertEqual(b.picture, b.bar.picture, b.bar.bar.picture) @@ -1060,7 +1080,7 @@ class InstanceTest(MongoDBTestCase): def test_save_cascades(self): class Person(Document): name = StringField() - parent = ReferenceField('self') + parent = ReferenceField("self") Person.drop_collection() @@ -1082,7 +1102,7 @@ class InstanceTest(MongoDBTestCase): def test_save_cascade_kwargs(self): class Person(Document): name = StringField() - parent = ReferenceField('self') + parent = ReferenceField("self") Person.drop_collection() @@ -1102,9 +1122,9 @@ class InstanceTest(MongoDBTestCase): def test_save_cascade_meta_false(self): class Person(Document): name = StringField() - parent = ReferenceField('self') + parent = ReferenceField("self") - meta = {'cascade': False} + meta = {"cascade": False} Person.drop_collection() @@ -1130,9 +1150,9 @@ class InstanceTest(MongoDBTestCase): def test_save_cascade_meta_true(self): class Person(Document): name = StringField() - parent = ReferenceField('self') + parent = ReferenceField("self") - meta = {'cascade': False} + meta = {"cascade": False} Person.drop_collection() @@ -1194,7 +1214,7 @@ class InstanceTest(MongoDBTestCase): w1 = Widget(toggle=False, save_id=UUID(1)) # ignore save_condition on new record creation - w1.save(save_condition={'save_id': UUID(42)}) + w1.save(save_condition={"save_id": UUID(42)}) w1.reload() self.assertFalse(w1.toggle) self.assertEqual(w1.save_id, UUID(1)) @@ -1204,8 +1224,9 @@ class InstanceTest(MongoDBTestCase): flip(w1) self.assertTrue(w1.toggle) self.assertEqual(w1.count, 1) - self.assertRaises(SaveConditionError, - w1.save, save_condition={'save_id': UUID(42)}) + self.assertRaises( + SaveConditionError, w1.save, save_condition={"save_id": UUID(42)} + ) w1.reload() self.assertFalse(w1.toggle) self.assertEqual(w1.count, 0) @@ -1214,7 +1235,7 @@ class InstanceTest(MongoDBTestCase): flip(w1) self.assertTrue(w1.toggle) self.assertEqual(w1.count, 1) - w1.save(save_condition={'save_id': UUID(1)}) + w1.save(save_condition={"save_id": UUID(1)}) w1.reload() self.assertTrue(w1.toggle) self.assertEqual(w1.count, 1) @@ -1227,27 +1248,29 @@ class InstanceTest(MongoDBTestCase): flip(w1) w1.save_id = UUID(2) - w1.save(save_condition={'save_id': old_id}) + w1.save(save_condition={"save_id": old_id}) w1.reload() self.assertFalse(w1.toggle) self.assertEqual(w1.count, 2) flip(w2) flip(w2) - self.assertRaises(SaveConditionError, - w2.save, save_condition={'save_id': old_id}) + self.assertRaises( + SaveConditionError, w2.save, save_condition={"save_id": old_id} + ) w2.reload() self.assertFalse(w2.toggle) self.assertEqual(w2.count, 2) # save_condition uses mongoengine-style operator syntax flip(w1) - w1.save(save_condition={'count__lt': w1.count}) + w1.save(save_condition={"count__lt": w1.count}) w1.reload() self.assertTrue(w1.toggle) self.assertEqual(w1.count, 3) flip(w1) - self.assertRaises(SaveConditionError, - w1.save, save_condition={'count__gte': w1.count}) + self.assertRaises( + SaveConditionError, w1.save, save_condition={"count__gte": w1.count} + ) w1.reload() self.assertTrue(w1.toggle) self.assertEqual(w1.count, 3) @@ -1259,19 +1282,19 @@ class InstanceTest(MongoDBTestCase): WildBoy.drop_collection() - WildBoy(age=12, name='John').save() + WildBoy(age=12, name="John").save() boy1 = WildBoy.objects().first() boy2 = WildBoy.objects().first() boy1.age = 99 boy1.save() - boy2.name = 'Bob' + boy2.name = "Bob" boy2.save() fresh_boy = WildBoy.objects().first() self.assertEqual(fresh_boy.age, 99) - self.assertEqual(fresh_boy.name, 'Bob') + self.assertEqual(fresh_boy.name, "Bob") def test_save_update_selectively_with_custom_pk(self): # Prevents regression of #2082 @@ -1282,30 +1305,30 @@ class InstanceTest(MongoDBTestCase): WildBoy.drop_collection() - WildBoy(pk_id='A', age=12, name='John').save() + WildBoy(pk_id="A", age=12, name="John").save() boy1 = WildBoy.objects().first() boy2 = WildBoy.objects().first() boy1.age = 99 boy1.save() - boy2.name = 'Bob' + boy2.name = "Bob" boy2.save() fresh_boy = WildBoy.objects().first() self.assertEqual(fresh_boy.age, 99) - self.assertEqual(fresh_boy.name, 'Bob') + self.assertEqual(fresh_boy.name, "Bob") def test_update(self): """Ensure that an existing document is updated instead of be overwritten. """ # Create person object and save it to the database - person = self.Person(name='Test User', age=30) + person = self.Person(name="Test User", age=30) person.save() # Create same person object, with same id, without age - same_person = self.Person(name='Test') + same_person = self.Person(name="Test") same_person.id = person.id same_person.save() @@ -1322,54 +1345,54 @@ class InstanceTest(MongoDBTestCase): self.assertEqual(person.age, same_person.age) # Confirm the saved values - self.assertEqual(person.name, 'Test') + self.assertEqual(person.name, "Test") self.assertEqual(person.age, 30) # Test only / exclude only updates included fields - person = self.Person.objects.only('name').get() - person.name = 'User' + person = self.Person.objects.only("name").get() + person.name = "User" person.save() person.reload() - self.assertEqual(person.name, 'User') + self.assertEqual(person.name, "User") self.assertEqual(person.age, 30) # test exclude only updates set fields - person = self.Person.objects.exclude('name').get() + person = self.Person.objects.exclude("name").get() person.age = 21 person.save() person.reload() - self.assertEqual(person.name, 'User') + self.assertEqual(person.name, "User") self.assertEqual(person.age, 21) # Test only / exclude can set non excluded / included fields - person = self.Person.objects.only('name').get() - person.name = 'Test' + person = self.Person.objects.only("name").get() + person.name = "Test" person.age = 30 person.save() person.reload() - self.assertEqual(person.name, 'Test') + self.assertEqual(person.name, "Test") self.assertEqual(person.age, 30) # test exclude only updates set fields - person = self.Person.objects.exclude('name').get() - person.name = 'User' + person = self.Person.objects.exclude("name").get() + person.name = "User" person.age = 21 person.save() person.reload() - self.assertEqual(person.name, 'User') + self.assertEqual(person.name, "User") self.assertEqual(person.age, 21) # Confirm does remove unrequired fields - person = self.Person.objects.exclude('name').get() + person = self.Person.objects.exclude("name").get() person.age = None person.save() person.reload() - self.assertEqual(person.name, 'User') + self.assertEqual(person.name, "User") self.assertEqual(person.age, None) person = self.Person.objects.get() @@ -1384,19 +1407,18 @@ class InstanceTest(MongoDBTestCase): def test_update_rename_operator(self): """Test the $rename operator.""" coll = self.Person._get_collection() - doc = self.Person(name='John').save() - raw_doc = coll.find_one({'_id': doc.pk}) - self.assertEqual(set(raw_doc.keys()), set(['_id', '_cls', 'name'])) + doc = self.Person(name="John").save() + raw_doc = coll.find_one({"_id": doc.pk}) + self.assertEqual(set(raw_doc.keys()), set(["_id", "_cls", "name"])) - doc.update(rename__name='first_name') - raw_doc = coll.find_one({'_id': doc.pk}) - self.assertEqual(set(raw_doc.keys()), - set(['_id', '_cls', 'first_name'])) - self.assertEqual(raw_doc['first_name'], 'John') + doc.update(rename__name="first_name") + raw_doc = coll.find_one({"_id": doc.pk}) + self.assertEqual(set(raw_doc.keys()), set(["_id", "_cls", "first_name"])) + self.assertEqual(raw_doc["first_name"], "John") def test_inserts_if_you_set_the_pk(self): - p1 = self.Person(name='p1', id=bson.ObjectId()).save() - p2 = self.Person(name='p2') + p1 = self.Person(name="p1", id=bson.ObjectId()).save() + p2 = self.Person(name="p2") p2.id = bson.ObjectId() p2.save() @@ -1410,33 +1432,34 @@ class InstanceTest(MongoDBTestCase): pass class Doc(Document): - string_field = StringField(default='1') + string_field = StringField(default="1") int_field = IntField(default=1) float_field = FloatField(default=1.1) boolean_field = BooleanField(default=True) datetime_field = DateTimeField(default=datetime.now) embedded_document_field = EmbeddedDocumentField( - EmbeddedDoc, default=lambda: EmbeddedDoc()) + EmbeddedDoc, default=lambda: EmbeddedDoc() + ) list_field = ListField(default=lambda: [1, 2, 3]) dict_field = DictField(default=lambda: {"hello": "world"}) objectid_field = ObjectIdField(default=bson.ObjectId) - reference_field = ReferenceField(Simple, default=lambda: - Simple().save()) + reference_field = ReferenceField(Simple, default=lambda: Simple().save()) map_field = MapField(IntField(), default=lambda: {"simple": 1}) decimal_field = DecimalField(default=1.0) complex_datetime_field = ComplexDateTimeField(default=datetime.now) url_field = URLField(default="http://mongoengine.org") dynamic_field = DynamicField(default=1) generic_reference_field = GenericReferenceField( - default=lambda: Simple().save()) - sorted_list_field = SortedListField(IntField(), - default=lambda: [1, 2, 3]) + default=lambda: Simple().save() + ) + sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3]) email_field = EmailField(default="ross@example.com") geo_point_field = GeoPointField(default=lambda: [1, 2]) sequence_field = SequenceField() uuid_field = UUIDField(default=uuid.uuid4) generic_embedded_document_field = GenericEmbeddedDocumentField( - default=lambda: EmbeddedDoc()) + default=lambda: EmbeddedDoc() + ) Simple.drop_collection() Doc.drop_collection() @@ -1454,13 +1477,13 @@ class InstanceTest(MongoDBTestCase): # try updating a non-saved document with self.assertRaises(OperationError): - person = self.Person(name='dcrosta') - person.update(set__name='Dan Crosta') + person = self.Person(name="dcrosta") + person.update(set__name="Dan Crosta") - author = self.Person(name='dcrosta') + author = self.Person(name="dcrosta") author.save() - author.update(set__name='Dan Crosta') + author.update(set__name="Dan Crosta") author.reload() p1 = self.Person.objects.first() @@ -1490,9 +1513,9 @@ class InstanceTest(MongoDBTestCase): def test_embedded_update(self): """Test update on `EmbeddedDocumentField` fields.""" + class Page(EmbeddedDocument): - log_message = StringField(verbose_name="Log message", - required=True) + log_message = StringField(verbose_name="Log message", required=True) class Site(Document): page = EmbeddedDocumentField(Page) @@ -1512,28 +1535,30 @@ class InstanceTest(MongoDBTestCase): def test_update_list_field(self): """Test update on `ListField` with $pull + $in. """ + class Doc(Document): foo = ListField(StringField()) Doc.drop_collection() - doc = Doc(foo=['a', 'b', 'c']) + doc = Doc(foo=["a", "b", "c"]) doc.save() # Update doc = Doc.objects.first() - doc.update(pull__foo__in=['a', 'c']) + doc.update(pull__foo__in=["a", "c"]) doc = Doc.objects.first() - self.assertEqual(doc.foo, ['b']) + self.assertEqual(doc.foo, ["b"]) def test_embedded_update_db_field(self): """Test update on `EmbeddedDocumentField` fields when db_field is other than default. """ + class Page(EmbeddedDocument): - log_message = StringField(verbose_name="Log message", - db_field="page_log_message", - required=True) + log_message = StringField( + verbose_name="Log message", db_field="page_log_message", required=True + ) class Site(Document): page = EmbeddedDocumentField(Page) @@ -1553,13 +1578,14 @@ class InstanceTest(MongoDBTestCase): def test_save_only_changed_fields(self): """Ensure save only sets / unsets changed fields.""" + class User(self.Person): active = BooleanField(default=True) User.drop_collection() # Create person object and save it to the database - user = User(name='Test User', age=30, active=True) + user = User(name="Test User", age=30, active=True) user.save() user.reload() @@ -1570,28 +1596,31 @@ class InstanceTest(MongoDBTestCase): user.age = 21 user.save() - same_person.name = 'User' + same_person.name = "User" same_person.save() person = self.Person.objects.get() - self.assertEqual(person.name, 'User') + self.assertEqual(person.name, "User") self.assertEqual(person.age, 21) self.assertEqual(person.active, False) - def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_embedded_doc(self): + def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_embedded_doc( + self + ): # Refers to Issue #1685 class EmbeddedChildModel(EmbeddedDocument): id = DictField(primary_key=True) class ParentModel(Document): - child = EmbeddedDocumentField( - EmbeddedChildModel) + child = EmbeddedDocumentField(EmbeddedChildModel) - emb = EmbeddedChildModel(id={'1': [1]}) + emb = EmbeddedChildModel(id={"1": [1]}) changed_fields = ParentModel(child=emb)._get_changed_fields() self.assertEqual(changed_fields, []) - def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_different_doc(self): + def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_different_doc( + self + ): # Refers to Issue #1685 class User(Document): id = IntField(primary_key=True) @@ -1604,12 +1633,12 @@ class InstanceTest(MongoDBTestCase): Message.drop_collection() # All objects share the same id, but each in a different collection - user = User(id=1, name='user-name').save() + user = User(id=1, name="user-name").save() message = Message(id=1, author=user).save() - message.author.name = 'tutu' + message.author.name = "tutu" self.assertEqual(message._get_changed_fields(), []) - self.assertEqual(user._get_changed_fields(), ['name']) + self.assertEqual(user._get_changed_fields(), ["name"]) def test__get_changed_fields_same_ids_embedded(self): # Refers to Issue #1768 @@ -1624,24 +1653,25 @@ class InstanceTest(MongoDBTestCase): Message.drop_collection() # All objects share the same id, but each in a different collection - user = User(id=1, name='user-name') # .save() + user = User(id=1, name="user-name") # .save() message = Message(id=1, author=user).save() - message.author.name = 'tutu' - self.assertEqual(message._get_changed_fields(), ['author.name']) + message.author.name = "tutu" + self.assertEqual(message._get_changed_fields(), ["author.name"]) message.save() message_fetched = Message.objects.with_id(message.id) - self.assertEqual(message_fetched.author.name, 'tutu') + self.assertEqual(message_fetched.author.name, "tutu") def test_query_count_when_saving(self): """Ensure references don't cause extra fetches when saving""" + class Organization(Document): name = StringField() class User(Document): name = StringField() - orgs = ListField(ReferenceField('Organization')) + orgs = ListField(ReferenceField("Organization")) class Feed(Document): name = StringField() @@ -1667,9 +1697,9 @@ class InstanceTest(MongoDBTestCase): user = User.objects.first() # Even if stored as ObjectId's internally mongoengine uses DBRefs # As ObjectId's aren't automatically derefenced - self.assertIsInstance(user._data['orgs'][0], DBRef) + self.assertIsInstance(user._data["orgs"][0], DBRef) self.assertIsInstance(user.orgs[0], Organization) - self.assertIsInstance(user._data['orgs'][0], Organization) + self.assertIsInstance(user._data["orgs"][0], Organization) # Changing a value with query_counter() as q: @@ -1731,6 +1761,7 @@ class InstanceTest(MongoDBTestCase): """Ensure that $set and $unset actions are performed in the same operation. """ + class FooBar(Document): foo = StringField(default=None) bar = StringField(default=None) @@ -1738,11 +1769,11 @@ class InstanceTest(MongoDBTestCase): FooBar.drop_collection() # write an entity with a single prop - foo = FooBar(foo='foo').save() + foo = FooBar(foo="foo").save() - self.assertEqual(foo.foo, 'foo') + self.assertEqual(foo.foo, "foo") del foo.foo - foo.bar = 'bar' + foo.bar = "bar" with query_counter() as q: self.assertEqual(0, q) @@ -1751,6 +1782,7 @@ class InstanceTest(MongoDBTestCase): def test_save_only_changed_fields_recursive(self): """Ensure save only sets / unsets changed fields.""" + class Comment(EmbeddedDocument): published = BooleanField(default=True) @@ -1762,7 +1794,7 @@ class InstanceTest(MongoDBTestCase): User.drop_collection() # Create person object and save it to the database - person = User(name='Test User', age=30, active=True) + person = User(name="Test User", age=30, active=True) person.comments.append(Comment()) person.save() person.reload() @@ -1777,17 +1809,17 @@ class InstanceTest(MongoDBTestCase): self.assertFalse(person.comments[0].published) # Simple dict w - person.comments_dict['first_post'] = Comment() + person.comments_dict["first_post"] = Comment() person.save() person = self.Person.objects.get() - self.assertTrue(person.comments_dict['first_post'].published) + self.assertTrue(person.comments_dict["first_post"].published) - person.comments_dict['first_post'].published = False + person.comments_dict["first_post"].published = False person.save() person = self.Person.objects.get() - self.assertFalse(person.comments_dict['first_post'].published) + self.assertFalse(person.comments_dict["first_post"].published) def test_delete(self): """Ensure that document may be deleted using the delete method.""" @@ -1801,31 +1833,30 @@ class InstanceTest(MongoDBTestCase): """Ensure that a document may be saved with a custom _id.""" # Create person object and save it to the database - person = self.Person(name='Test User', age=30, - id='497ce96f395f2f052a494fd4') + person = self.Person(name="Test User", age=30, id="497ce96f395f2f052a494fd4") person.save() # Ensure that the object is in the database with the correct _id collection = self.db[self.Person._get_collection_name()] - person_obj = collection.find_one({'name': 'Test User'}) - self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') + person_obj = collection.find_one({"name": "Test User"}) + self.assertEqual(str(person_obj["_id"]), "497ce96f395f2f052a494fd4") def test_save_custom_pk(self): """Ensure that a document may be saved with a custom _id using pk alias. """ # Create person object and save it to the database - person = self.Person(name='Test User', age=30, - pk='497ce96f395f2f052a494fd4') + person = self.Person(name="Test User", age=30, pk="497ce96f395f2f052a494fd4") person.save() # Ensure that the object is in the database with the correct _id collection = self.db[self.Person._get_collection_name()] - person_obj = collection.find_one({'name': 'Test User'}) - self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') + person_obj = collection.find_one({"name": "Test User"}) + self.assertEqual(str(person_obj["_id"]), "497ce96f395f2f052a494fd4") def test_save_list(self): """Ensure that a list field may be properly saved.""" + class Comment(EmbeddedDocument): content = StringField() @@ -1836,37 +1867,36 @@ class InstanceTest(MongoDBTestCase): BlogPost.drop_collection() - post = BlogPost(content='Went for a walk today...') - post.tags = tags = ['fun', 'leisure'] - comments = [Comment(content='Good for you'), Comment(content='Yay.')] + post = BlogPost(content="Went for a walk today...") + post.tags = tags = ["fun", "leisure"] + comments = [Comment(content="Good for you"), Comment(content="Yay.")] post.comments = comments post.save() collection = self.db[BlogPost._get_collection_name()] post_obj = collection.find_one() - self.assertEqual(post_obj['tags'], tags) - for comment_obj, comment in zip(post_obj['comments'], comments): - self.assertEqual(comment_obj['content'], comment['content']) + self.assertEqual(post_obj["tags"], tags) + for comment_obj, comment in zip(post_obj["comments"], comments): + self.assertEqual(comment_obj["content"], comment["content"]) def test_list_search_by_embedded(self): class User(Document): username = StringField(required=True) - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} class Comment(EmbeddedDocument): comment = StringField() - user = ReferenceField(User, - required=True) + user = ReferenceField(User, required=True) - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} class Page(Document): comments = ListField(EmbeddedDocumentField(Comment)) - meta = {'allow_inheritance': False, - 'indexes': [ - {'fields': ['comments.user']} - ]} + meta = { + "allow_inheritance": False, + "indexes": [{"fields": ["comments.user"]}], + } User.drop_collection() Page.drop_collection() @@ -1880,14 +1910,22 @@ class InstanceTest(MongoDBTestCase): u3 = User(username="hmarr") u3.save() - p1 = Page(comments=[Comment(user=u1, comment="Its very good"), - Comment(user=u2, comment="Hello world"), - Comment(user=u3, comment="Ping Pong"), - Comment(user=u1, comment="I like a beer")]) + p1 = Page( + comments=[ + Comment(user=u1, comment="Its very good"), + Comment(user=u2, comment="Hello world"), + Comment(user=u3, comment="Ping Pong"), + Comment(user=u1, comment="I like a beer"), + ] + ) p1.save() - p2 = Page(comments=[Comment(user=u1, comment="Its very good"), - Comment(user=u2, comment="Hello world")]) + p2 = Page( + comments=[ + Comment(user=u1, comment="Its very good"), + Comment(user=u2, comment="Hello world"), + ] + ) p2.save() p3 = Page(comments=[Comment(user=u3, comment="Its very good")]) @@ -1896,20 +1934,15 @@ class InstanceTest(MongoDBTestCase): p4 = Page(comments=[Comment(user=u2, comment="Heavy Metal song")]) p4.save() - self.assertEqual( - [p1, p2], - list(Page.objects.filter(comments__user=u1))) - self.assertEqual( - [p1, p2, p4], - list(Page.objects.filter(comments__user=u2))) - self.assertEqual( - [p1, p3], - list(Page.objects.filter(comments__user=u3))) + self.assertEqual([p1, p2], list(Page.objects.filter(comments__user=u1))) + self.assertEqual([p1, p2, p4], list(Page.objects.filter(comments__user=u2))) + self.assertEqual([p1, p3], list(Page.objects.filter(comments__user=u3))) def test_save_embedded_document(self): """Ensure that a document with an embedded document field may be saved in the database. """ + class EmployeeDetails(EmbeddedDocument): position = StringField() @@ -1918,26 +1951,26 @@ class InstanceTest(MongoDBTestCase): details = EmbeddedDocumentField(EmployeeDetails) # Create employee object and save it to the database - employee = Employee(name='Test Employee', age=50, salary=20000) - employee.details = EmployeeDetails(position='Developer') + employee = Employee(name="Test Employee", age=50, salary=20000) + employee.details = EmployeeDetails(position="Developer") employee.save() # Ensure that the object is in the database collection = self.db[self.Person._get_collection_name()] - employee_obj = collection.find_one({'name': 'Test Employee'}) - self.assertEqual(employee_obj['name'], 'Test Employee') - self.assertEqual(employee_obj['age'], 50) + employee_obj = collection.find_one({"name": "Test Employee"}) + self.assertEqual(employee_obj["name"], "Test Employee") + self.assertEqual(employee_obj["age"], 50) # Ensure that the 'details' embedded object saved correctly - self.assertEqual(employee_obj['details']['position'], 'Developer') + self.assertEqual(employee_obj["details"]["position"], "Developer") def test_embedded_update_after_save(self): """Test update of `EmbeddedDocumentField` attached to a newly saved document. """ + class Page(EmbeddedDocument): - log_message = StringField(verbose_name="Log message", - required=True) + log_message = StringField(verbose_name="Log message", required=True) class Site(Document): page = EmbeddedDocumentField(Page) @@ -1957,6 +1990,7 @@ class InstanceTest(MongoDBTestCase): """Ensure that a document with an embedded document field may be saved in the database. """ + class EmployeeDetails(EmbeddedDocument): position = StringField() @@ -1965,22 +1999,21 @@ class InstanceTest(MongoDBTestCase): details = EmbeddedDocumentField(EmployeeDetails) # Create employee object and save it to the database - employee = Employee(name='Test Employee', age=50, salary=20000) - employee.details = EmployeeDetails(position='Developer') + employee = Employee(name="Test Employee", age=50, salary=20000) + employee.details = EmployeeDetails(position="Developer") employee.save() # Test updating an embedded document - promoted_employee = Employee.objects.get(name='Test Employee') - promoted_employee.details.position = 'Senior Developer' + promoted_employee = Employee.objects.get(name="Test Employee") + promoted_employee.details.position = "Senior Developer" promoted_employee.save() promoted_employee.reload() - self.assertEqual(promoted_employee.name, 'Test Employee') + self.assertEqual(promoted_employee.name, "Test Employee") self.assertEqual(promoted_employee.age, 50) # Ensure that the 'details' embedded object saved correctly - self.assertEqual( - promoted_employee.details.position, 'Senior Developer') + self.assertEqual(promoted_employee.details.position, "Senior Developer") # Test removal promoted_employee.details = None @@ -1996,12 +2029,12 @@ class InstanceTest(MongoDBTestCase): class Foo(EmbeddedDocument, NameMixin): quantity = IntField() - self.assertEqual(['name', 'quantity'], sorted(Foo._fields.keys())) + self.assertEqual(["name", "quantity"], sorted(Foo._fields.keys())) class Bar(Document, NameMixin): widgets = StringField() - self.assertEqual(['id', 'name', 'widgets'], sorted(Bar._fields.keys())) + self.assertEqual(["id", "name", "widgets"], sorted(Bar._fields.keys())) def test_mixin_inheritance(self): class BaseMixIn(object): @@ -2015,8 +2048,7 @@ class InstanceTest(MongoDBTestCase): age = IntField() TestDoc.drop_collection() - t = TestDoc(count=12, data="test", - comment="great!", age=19) + t = TestDoc(count=12, data="test", comment="great!", age=19) t.save() @@ -2031,17 +2063,18 @@ class InstanceTest(MongoDBTestCase): """Ensure that a document reference field may be saved in the database. """ + class BlogPost(Document): - meta = {'collection': 'blogpost_1'} + meta = {"collection": "blogpost_1"} content = StringField() author = ReferenceField(self.Person) BlogPost.drop_collection() - author = self.Person(name='Test User') + author = self.Person(name="Test User") author.save() - post = BlogPost(content='Watched some TV today... how exciting.') + post = BlogPost(content="Watched some TV today... how exciting.") # Should only reference author when saving post.author = author post.save() @@ -2049,15 +2082,15 @@ class InstanceTest(MongoDBTestCase): post_obj = BlogPost.objects.first() # Test laziness - self.assertIsInstance(post_obj._data['author'], bson.DBRef) + self.assertIsInstance(post_obj._data["author"], bson.DBRef) self.assertIsInstance(post_obj.author, self.Person) - self.assertEqual(post_obj.author.name, 'Test User') + self.assertEqual(post_obj.author.name, "Test User") # Ensure that the dereferenced object may be changed and saved post_obj.author.age = 25 post_obj.author.save() - author = list(self.Person.objects(name='Test User'))[-1] + author = list(self.Person.objects(name="Test User"))[-1] self.assertEqual(author.age, 25) def test_duplicate_db_fields_raise_invalid_document_error(self): @@ -2065,12 +2098,14 @@ class InstanceTest(MongoDBTestCase): declare the same db_field. """ with self.assertRaises(InvalidDocumentError): + class Foo(Document): name = StringField() - name2 = StringField(db_field='name') + name2 = StringField(db_field="name") def test_invalid_son(self): """Raise an error if loading invalid data.""" + class Occurrence(EmbeddedDocument): number = IntField() @@ -2081,21 +2116,24 @@ class InstanceTest(MongoDBTestCase): occurs = ListField(EmbeddedDocumentField(Occurrence), default=list) with self.assertRaises(InvalidDocumentError): - Word._from_son({ - 'stem': [1, 2, 3], - 'forms': 1, - 'count': 'one', - 'occurs': {"hello": None} - }) + Word._from_son( + { + "stem": [1, 2, 3], + "forms": 1, + "count": "one", + "occurs": {"hello": None}, + } + ) # Tests for issue #1438: https://github.com/MongoEngine/mongoengine/issues/1438 with self.assertRaises(ValueError): - Word._from_son('this is not a valid SON dict') + Word._from_son("this is not a valid SON dict") def test_reverse_delete_rule_cascade_and_nullify(self): """Ensure that a referenced document is also deleted upon deletion. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) @@ -2104,13 +2142,13 @@ class InstanceTest(MongoDBTestCase): self.Person.drop_collection() BlogPost.drop_collection() - author = self.Person(name='Test User') + author = self.Person(name="Test User") author.save() - reviewer = self.Person(name='Re Viewer') + reviewer = self.Person(name="Re Viewer") reviewer.save() - post = BlogPost(content='Watched some TV') + post = BlogPost(content="Watched some TV") post.author = author post.reviewer = reviewer post.save() @@ -2128,24 +2166,26 @@ class InstanceTest(MongoDBTestCase): """Ensure that a referenced document is also deleted with pull. """ + class Record(Document): name = StringField() - children = ListField(ReferenceField('self', reverse_delete_rule=PULL)) + children = ListField(ReferenceField("self", reverse_delete_rule=PULL)) Record.drop_collection() - parent_record = Record(name='parent').save() - child_record = Record(name='child').save() + parent_record = Record(name="parent").save() + child_record = Record(name="child").save() parent_record.children.append(child_record) parent_record.save() child_record.delete() - self.assertEqual(Record.objects(name='parent').get().children, []) + self.assertEqual(Record.objects(name="parent").get().children, []) def test_reverse_delete_rule_with_custom_id_field(self): """Ensure that a referenced document with custom primary key is also deleted upon deletion. """ + class User(Document): name = StringField(primary_key=True) @@ -2156,8 +2196,8 @@ class InstanceTest(MongoDBTestCase): User.drop_collection() Book.drop_collection() - user = User(name='Mike').save() - reviewer = User(name='John').save() + user = User(name="Mike").save() + reviewer = User(name="John").save() book = Book(author=user, reviewer=reviewer).save() reviewer.delete() @@ -2171,6 +2211,7 @@ class InstanceTest(MongoDBTestCase): """Ensure that cascade delete rule doesn't mix id among collections. """ + class User(Document): id = IntField(primary_key=True) @@ -2203,6 +2244,7 @@ class InstanceTest(MongoDBTestCase): """Ensure that a referenced document is also deleted upon deletion of a child document. """ + class Writer(self.Person): pass @@ -2214,13 +2256,13 @@ class InstanceTest(MongoDBTestCase): self.Person.drop_collection() BlogPost.drop_collection() - author = Writer(name='Test User') + author = Writer(name="Test User") author.save() - reviewer = Writer(name='Re Viewer') + reviewer = Writer(name="Re Viewer") reviewer.save() - post = BlogPost(content='Watched some TV') + post = BlogPost(content="Watched some TV") post.author = author post.reviewer = reviewer post.save() @@ -2237,23 +2279,26 @@ class InstanceTest(MongoDBTestCase): """Ensure that a referenced document is also deleted upon deletion for complex fields. """ + class BlogPost(Document): content = StringField() - authors = ListField(ReferenceField( - self.Person, reverse_delete_rule=CASCADE)) - reviewers = ListField(ReferenceField( - self.Person, reverse_delete_rule=NULLIFY)) + authors = ListField( + ReferenceField(self.Person, reverse_delete_rule=CASCADE) + ) + reviewers = ListField( + ReferenceField(self.Person, reverse_delete_rule=NULLIFY) + ) self.Person.drop_collection() BlogPost.drop_collection() - author = self.Person(name='Test User') + author = self.Person(name="Test User") author.save() - reviewer = self.Person(name='Re Viewer') + reviewer = self.Person(name="Re Viewer") reviewer.save() - post = BlogPost(content='Watched some TV') + post = BlogPost(content="Watched some TV") post.authors = [author] post.reviewers = [reviewer] post.save() @@ -2273,6 +2318,7 @@ class InstanceTest(MongoDBTestCase): delete the author which triggers deletion of blogpost via cascade blog post's pre_delete signal alters an editor attribute. """ + class Editor(self.Person): review_queue = IntField(default=0) @@ -2292,32 +2338,32 @@ class InstanceTest(MongoDBTestCase): BlogPost.drop_collection() Editor.drop_collection() - author = self.Person(name='Will S.').save() - editor = Editor(name='Max P.', review_queue=1).save() - BlogPost(content='wrote some books', author=author, - editor=editor).save() + author = self.Person(name="Will S.").save() + editor = Editor(name="Max P.", review_queue=1).save() + BlogPost(content="wrote some books", author=author, editor=editor).save() # delete the author, the post is also deleted due to the CASCADE rule author.delete() # the pre-delete signal should have decremented the editor's queue - editor = Editor.objects(name='Max P.').get() + editor = Editor.objects(name="Max P.").get() self.assertEqual(editor.review_queue, 0) def test_two_way_reverse_delete_rule(self): """Ensure that Bi-Directional relationships work with reverse_delete_rule """ + class Bar(Document): content = StringField() - foo = ReferenceField('Foo') + foo = ReferenceField("Foo") class Foo(Document): content = StringField() bar = ReferenceField(Bar) - Bar.register_delete_rule(Foo, 'bar', NULLIFY) - Foo.register_delete_rule(Bar, 'foo', NULLIFY) + Bar.register_delete_rule(Foo, "bar", NULLIFY) + Foo.register_delete_rule(Bar, "foo", NULLIFY) Bar.drop_collection() Foo.drop_collection() @@ -2338,24 +2384,27 @@ class InstanceTest(MongoDBTestCase): def test_invalid_reverse_delete_rule_raise_errors(self): with self.assertRaises(InvalidDocumentError): + class Blog(Document): content = StringField() - authors = MapField(ReferenceField( - self.Person, reverse_delete_rule=CASCADE)) + authors = MapField( + ReferenceField(self.Person, reverse_delete_rule=CASCADE) + ) reviewers = DictField( - field=ReferenceField( - self.Person, - reverse_delete_rule=NULLIFY)) + field=ReferenceField(self.Person, reverse_delete_rule=NULLIFY) + ) with self.assertRaises(InvalidDocumentError): + class Parents(EmbeddedDocument): - father = ReferenceField('Person', reverse_delete_rule=DENY) - mother = ReferenceField('Person', reverse_delete_rule=DENY) + father = ReferenceField("Person", reverse_delete_rule=DENY) + mother = ReferenceField("Person", reverse_delete_rule=DENY) def test_reverse_delete_rule_cascade_recurs(self): """Ensure that a chain of documents is also deleted upon cascaded deletion. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) @@ -2368,14 +2417,14 @@ class InstanceTest(MongoDBTestCase): BlogPost.drop_collection() Comment.drop_collection() - author = self.Person(name='Test User') + author = self.Person(name="Test User") author.save() - post = BlogPost(content='Watched some TV') + post = BlogPost(content="Watched some TV") post.author = author post.save() - comment = Comment(text='Kudos.') + comment = Comment(text="Kudos.") comment.post = post comment.save() @@ -2388,6 +2437,7 @@ class InstanceTest(MongoDBTestCase): """Ensure that a document cannot be referenced if there are still documents referring to it. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=DENY) @@ -2395,20 +2445,22 @@ class InstanceTest(MongoDBTestCase): self.Person.drop_collection() BlogPost.drop_collection() - author = self.Person(name='Test User') + author = self.Person(name="Test User") author.save() - post = BlogPost(content='Watched some TV') + post = BlogPost(content="Watched some TV") post.author = author post.save() # Delete the Person should be denied self.assertRaises(OperationError, author.delete) # Should raise denied error - self.assertEqual(BlogPost.objects.count(), 1) # No objects may have been deleted + self.assertEqual( + BlogPost.objects.count(), 1 + ) # No objects may have been deleted self.assertEqual(self.Person.objects.count(), 1) # Other users, that don't have BlogPosts must be removable, like normal - author = self.Person(name='Another User') + author = self.Person(name="Another User") author.save() self.assertEqual(self.Person.objects.count(), 2) @@ -2434,6 +2486,7 @@ class InstanceTest(MongoDBTestCase): def test_document_hash(self): """Test document in list, dict, set.""" + class User(Document): pass @@ -2491,9 +2544,11 @@ class InstanceTest(MongoDBTestCase): self.assertEqual(len(all_user_set), 3) def test_picklable(self): - pickle_doc = PickleTest(number=1, string="One", lists=['1', '2']) + pickle_doc = PickleTest(number=1, string="One", lists=["1", "2"]) pickle_doc.embedded = PickleEmbedded() - pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved + pickled_doc = pickle.dumps( + pickle_doc + ) # make sure pickling works even before the doc is saved pickle_doc.save() pickled_doc = pickle.dumps(pickle_doc) @@ -2516,8 +2571,10 @@ class InstanceTest(MongoDBTestCase): self.assertEqual(pickle_doc.lists, ["1", "2", "3"]) def test_regular_document_pickle(self): - pickle_doc = PickleTest(number=1, string="One", lists=['1', '2']) - pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved + pickle_doc = PickleTest(number=1, string="One", lists=["1", "2"]) + pickled_doc = pickle.dumps( + pickle_doc + ) # make sure pickling works even before the doc is saved pickle_doc.save() pickled_doc = pickle.dumps(pickle_doc) @@ -2527,21 +2584,23 @@ class InstanceTest(MongoDBTestCase): fixtures.PickleTest = fixtures.NewDocumentPickleTest resurrected = pickle.loads(pickled_doc) - self.assertEqual(resurrected.__class__, - fixtures.NewDocumentPickleTest) - self.assertEqual(resurrected._fields_ordered, - fixtures.NewDocumentPickleTest._fields_ordered) - self.assertNotEqual(resurrected._fields_ordered, - pickle_doc._fields_ordered) + self.assertEqual(resurrected.__class__, fixtures.NewDocumentPickleTest) + self.assertEqual( + resurrected._fields_ordered, fixtures.NewDocumentPickleTest._fields_ordered + ) + self.assertNotEqual(resurrected._fields_ordered, pickle_doc._fields_ordered) # The local PickleTest is still a ref to the original fixtures.PickleTest = PickleTest def test_dynamic_document_pickle(self): pickle_doc = PickleDynamicTest( - name="test", number=1, string="One", lists=['1', '2']) + name="test", number=1, string="One", lists=["1", "2"] + ) pickle_doc.embedded = PickleDynamicEmbedded(foo="Bar") - pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved + pickled_doc = pickle.dumps( + pickle_doc + ) # make sure pickling works even before the doc is saved pickle_doc.save() @@ -2549,20 +2608,22 @@ class InstanceTest(MongoDBTestCase): resurrected = pickle.loads(pickled_doc) self.assertEqual(resurrected, pickle_doc) - self.assertEqual(resurrected._fields_ordered, - pickle_doc._fields_ordered) - self.assertEqual(resurrected._dynamic_fields.keys(), - pickle_doc._dynamic_fields.keys()) + self.assertEqual(resurrected._fields_ordered, pickle_doc._fields_ordered) + self.assertEqual( + resurrected._dynamic_fields.keys(), pickle_doc._dynamic_fields.keys() + ) self.assertEqual(resurrected.embedded, pickle_doc.embedded) - self.assertEqual(resurrected.embedded._fields_ordered, - pickle_doc.embedded._fields_ordered) - self.assertEqual(resurrected.embedded._dynamic_fields.keys(), - pickle_doc.embedded._dynamic_fields.keys()) + self.assertEqual( + resurrected.embedded._fields_ordered, pickle_doc.embedded._fields_ordered + ) + self.assertEqual( + resurrected.embedded._dynamic_fields.keys(), + pickle_doc.embedded._dynamic_fields.keys(), + ) def test_picklable_on_signals(self): - pickle_doc = PickleSignalsTest( - number=1, string="One", lists=['1', '2']) + pickle_doc = PickleSignalsTest(number=1, string="One", lists=["1", "2"]) pickle_doc.embedded = PickleEmbedded() pickle_doc.save() pickle_doc.delete() @@ -2572,12 +2633,13 @@ class InstanceTest(MongoDBTestCase): the "validate" method. """ with self.assertRaises(InvalidDocumentError): + class Blog(Document): validate = DictField() def test_mutating_documents(self): class B(EmbeddedDocument): - field1 = StringField(default='field1') + field1 = StringField(default="field1") class A(Document): b = EmbeddedDocumentField(B, default=lambda: B()) @@ -2587,27 +2649,28 @@ class InstanceTest(MongoDBTestCase): a = A() a.save() a.reload() - self.assertEqual(a.b.field1, 'field1') + self.assertEqual(a.b.field1, "field1") class C(EmbeddedDocument): - c_field = StringField(default='cfield') + c_field = StringField(default="cfield") class B(EmbeddedDocument): - field1 = StringField(default='field1') + field1 = StringField(default="field1") field2 = EmbeddedDocumentField(C, default=lambda: C()) class A(Document): b = EmbeddedDocumentField(B, default=lambda: B()) a = A.objects()[0] - a.b.field2.c_field = 'new value' + a.b.field2.c_field = "new value" a.save() a.reload() - self.assertEqual(a.b.field2.c_field, 'new value') + self.assertEqual(a.b.field2.c_field, "new value") def test_can_save_false_values(self): """Ensures you can save False values on save.""" + class Doc(Document): foo = StringField() archived = BooleanField(default=False, required=True) @@ -2623,6 +2686,7 @@ class InstanceTest(MongoDBTestCase): def test_can_save_false_values_dynamic(self): """Ensures you can save False values on dynamic docs.""" + class Doc(DynamicDocument): foo = StringField() @@ -2637,6 +2701,7 @@ class InstanceTest(MongoDBTestCase): def test_do_not_save_unchanged_references(self): """Ensures cascading saves dont auto update""" + class Job(Document): name = StringField() @@ -2655,8 +2720,10 @@ class InstanceTest(MongoDBTestCase): person = Person(name="name", age=10, job=job) from pymongo.collection import Collection + orig_update = Collection.update try: + def fake_update(*args, **kwargs): self.fail("Unexpected update for %s" % args[0].name) return orig_update(*args, **kwargs) @@ -2670,9 +2737,9 @@ class InstanceTest(MongoDBTestCase): """DB Alias tests.""" # mongoenginetest - Is default connection alias from setUp() # Register Aliases - register_connection('testdb-1', 'mongoenginetest2') - register_connection('testdb-2', 'mongoenginetest3') - register_connection('testdb-3', 'mongoenginetest4') + register_connection("testdb-1", "mongoenginetest2") + register_connection("testdb-2", "mongoenginetest3") + register_connection("testdb-3", "mongoenginetest4") class User(Document): name = StringField() @@ -2719,42 +2786,43 @@ class InstanceTest(MongoDBTestCase): # Collections self.assertEqual( - User._get_collection(), - get_db("testdb-1")[User._get_collection_name()]) + User._get_collection(), get_db("testdb-1")[User._get_collection_name()] + ) self.assertEqual( - Book._get_collection(), - get_db("testdb-2")[Book._get_collection_name()]) + Book._get_collection(), get_db("testdb-2")[Book._get_collection_name()] + ) self.assertEqual( AuthorBooks._get_collection(), - get_db("testdb-3")[AuthorBooks._get_collection_name()]) + get_db("testdb-3")[AuthorBooks._get_collection_name()], + ) def test_db_alias_overrides(self): """Test db_alias can be overriden.""" # Register a connection with db_alias testdb-2 - register_connection('testdb-2', 'mongoenginetest2') + register_connection("testdb-2", "mongoenginetest2") class A(Document): """Uses default db_alias """ + name = StringField() meta = {"allow_inheritance": True} class B(A): """Uses testdb-2 db_alias """ + meta = {"db_alias": "testdb-2"} A.objects.all() - self.assertEqual('testdb-2', B._meta.get('db_alias')) - self.assertEqual('mongoenginetest', - A._get_collection().database.name) - self.assertEqual('mongoenginetest2', - B._get_collection().database.name) + self.assertEqual("testdb-2", B._meta.get("db_alias")) + self.assertEqual("mongoenginetest", A._get_collection().database.name) + self.assertEqual("mongoenginetest2", B._get_collection().database.name) def test_db_alias_propagates(self): """db_alias propagates?""" - register_connection('testdb-1', 'mongoenginetest2') + register_connection("testdb-1", "mongoenginetest2") class A(Document): name = StringField() @@ -2763,10 +2831,11 @@ class InstanceTest(MongoDBTestCase): class B(A): pass - self.assertEqual('testdb-1', B._meta.get('db_alias')) + self.assertEqual("testdb-1", B._meta.get("db_alias")) def test_db_ref_usage(self): """DB Ref usage in dict_fields.""" + class User(Document): name = StringField() @@ -2774,9 +2843,7 @@ class InstanceTest(MongoDBTestCase): name = StringField() author = ReferenceField(User) extra = DictField() - meta = { - 'ordering': ['+name'] - } + meta = {"ordering": ["+name"]} def __unicode__(self): return self.name @@ -2798,12 +2865,19 @@ class InstanceTest(MongoDBTestCase): peter = User.objects.create(name="Peter") # Bob - Book.objects.create(name="1", author=bob, extra={ - "a": bob.to_dbref(), "b": [karl.to_dbref(), susan.to_dbref()]}) - Book.objects.create(name="2", author=bob, extra={ - "a": bob.to_dbref(), "b": karl.to_dbref()}) - Book.objects.create(name="3", author=bob, extra={ - "a": bob.to_dbref(), "c": [jon.to_dbref(), peter.to_dbref()]}) + Book.objects.create( + name="1", + author=bob, + extra={"a": bob.to_dbref(), "b": [karl.to_dbref(), susan.to_dbref()]}, + ) + Book.objects.create( + name="2", author=bob, extra={"a": bob.to_dbref(), "b": karl.to_dbref()} + ) + Book.objects.create( + name="3", + author=bob, + extra={"a": bob.to_dbref(), "c": [jon.to_dbref(), peter.to_dbref()]}, + ) Book.objects.create(name="4", author=bob) # Jon @@ -2811,56 +2885,77 @@ class InstanceTest(MongoDBTestCase): Book.objects.create(name="6", author=peter) Book.objects.create(name="7", author=jon) Book.objects.create(name="8", author=jon) - Book.objects.create(name="9", author=jon, - extra={"a": peter.to_dbref()}) + Book.objects.create(name="9", author=jon, extra={"a": peter.to_dbref()}) # Checks - self.assertEqual(",".join([str(b) for b in Book.objects.all()]), - "1,2,3,4,5,6,7,8,9") + self.assertEqual( + ",".join([str(b) for b in Book.objects.all()]), "1,2,3,4,5,6,7,8,9" + ) # bob related books - self.assertEqual(",".join([str(b) for b in Book.objects.filter( - Q(extra__a=bob) | - Q(author=bob) | - Q(extra__b=bob))]), - "1,2,3,4") + self.assertEqual( + ",".join( + [ + str(b) + for b in Book.objects.filter( + Q(extra__a=bob) | Q(author=bob) | Q(extra__b=bob) + ) + ] + ), + "1,2,3,4", + ) # Susan & Karl related books - self.assertEqual(",".join([str(b) for b in Book.objects.filter( - Q(extra__a__all=[karl, susan]) | - Q(author__all=[karl, susan]) | - Q(extra__b__all=[ - karl.to_dbref(), susan.to_dbref()])) - ]), "1") + self.assertEqual( + ",".join( + [ + str(b) + for b in Book.objects.filter( + Q(extra__a__all=[karl, susan]) + | Q(author__all=[karl, susan]) + | Q(extra__b__all=[karl.to_dbref(), susan.to_dbref()]) + ) + ] + ), + "1", + ) # $Where - self.assertEqual(u",".join([str(b) for b in Book.objects.filter( - __raw__={ - "$where": """ + self.assertEqual( + u",".join( + [ + str(b) + for b in Book.objects.filter( + __raw__={ + "$where": """ function(){ return this.name == '1' || this.name == '2';}""" - })]), - "1,2") + } + ) + ] + ), + "1,2", + ) def test_switch_db_instance(self): - register_connection('testdb-1', 'mongoenginetest2') + register_connection("testdb-1", "mongoenginetest2") class Group(Document): name = StringField() Group.drop_collection() - with switch_db(Group, 'testdb-1') as Group: + with switch_db(Group, "testdb-1") as Group: Group.drop_collection() Group(name="hello - default").save() self.assertEqual(1, Group.objects.count()) group = Group.objects.first() - group.switch_db('testdb-1') + group.switch_db("testdb-1") group.name = "hello - testdb!" group.save() - with switch_db(Group, 'testdb-1') as Group: + with switch_db(Group, "testdb-1") as Group: group = Group.objects.first() self.assertEqual("hello - testdb!", group.name) @@ -2869,10 +2964,10 @@ class InstanceTest(MongoDBTestCase): # Slightly contrived now - perform an update # Only works as they have the same object_id - group.switch_db('testdb-1') + group.switch_db("testdb-1") group.update(set__name="hello - update") - with switch_db(Group, 'testdb-1') as Group: + with switch_db(Group, "testdb-1") as Group: group = Group.objects.first() self.assertEqual("hello - update", group.name) Group.drop_collection() @@ -2883,10 +2978,10 @@ class InstanceTest(MongoDBTestCase): # Totally contrived now - perform a delete # Only works as they have the same object_id - group.switch_db('testdb-1') + group.switch_db("testdb-1") group.delete() - with switch_db(Group, 'testdb-1') as Group: + with switch_db(Group, "testdb-1") as Group: self.assertEqual(0, Group.objects.count()) group = Group.objects.first() @@ -2898,11 +2993,9 @@ class InstanceTest(MongoDBTestCase): User.drop_collection() - User._get_collection().insert_one({ - 'name': 'John', - 'foo': 'Bar', - 'data': [1, 2, 3] - }) + User._get_collection().insert_one( + {"name": "John", "foo": "Bar", "data": [1, 2, 3]} + ) self.assertRaises(FieldDoesNotExist, User.objects.first) @@ -2910,22 +3003,20 @@ class InstanceTest(MongoDBTestCase): class User(Document): name = StringField() - meta = {'strict': False} + meta = {"strict": False} User.drop_collection() - User._get_collection().insert_one({ - 'name': 'John', - 'foo': 'Bar', - 'data': [1, 2, 3] - }) + User._get_collection().insert_one( + {"name": "John", "foo": "Bar", "data": [1, 2, 3]} + ) user = User.objects.first() - self.assertEqual(user.name, 'John') - self.assertFalse(hasattr(user, 'foo')) - self.assertEqual(user._data['foo'], 'Bar') - self.assertFalse(hasattr(user, 'data')) - self.assertEqual(user._data['data'], [1, 2, 3]) + self.assertEqual(user.name, "John") + self.assertFalse(hasattr(user, "foo")) + self.assertEqual(user._data["foo"], "Bar") + self.assertFalse(hasattr(user, "data")) + self.assertEqual(user._data["data"], [1, 2, 3]) def test_load_undefined_fields_on_embedded_document(self): class Thing(EmbeddedDocument): @@ -2937,14 +3028,12 @@ class InstanceTest(MongoDBTestCase): User.drop_collection() - User._get_collection().insert_one({ - 'name': 'John', - 'thing': { - 'name': 'My thing', - 'foo': 'Bar', - 'data': [1, 2, 3] + User._get_collection().insert_one( + { + "name": "John", + "thing": {"name": "My thing", "foo": "Bar", "data": [1, 2, 3]}, } - }) + ) self.assertRaises(FieldDoesNotExist, User.objects.first) @@ -2956,18 +3045,16 @@ class InstanceTest(MongoDBTestCase): name = StringField() thing = EmbeddedDocumentField(Thing) - meta = {'strict': False} + meta = {"strict": False} User.drop_collection() - User._get_collection().insert_one({ - 'name': 'John', - 'thing': { - 'name': 'My thing', - 'foo': 'Bar', - 'data': [1, 2, 3] + User._get_collection().insert_one( + { + "name": "John", + "thing": {"name": "My thing", "foo": "Bar", "data": [1, 2, 3]}, } - }) + ) self.assertRaises(FieldDoesNotExist, User.objects.first) @@ -2975,7 +3062,7 @@ class InstanceTest(MongoDBTestCase): class Thing(EmbeddedDocument): name = StringField() - meta = {'strict': False} + meta = {"strict": False} class User(Document): name = StringField() @@ -2983,22 +3070,20 @@ class InstanceTest(MongoDBTestCase): User.drop_collection() - User._get_collection().insert_one({ - 'name': 'John', - 'thing': { - 'name': 'My thing', - 'foo': 'Bar', - 'data': [1, 2, 3] + User._get_collection().insert_one( + { + "name": "John", + "thing": {"name": "My thing", "foo": "Bar", "data": [1, 2, 3]}, } - }) + ) user = User.objects.first() - self.assertEqual(user.name, 'John') - self.assertEqual(user.thing.name, 'My thing') - self.assertFalse(hasattr(user.thing, 'foo')) - self.assertEqual(user.thing._data['foo'], 'Bar') - self.assertFalse(hasattr(user.thing, 'data')) - self.assertEqual(user.thing._data['data'], [1, 2, 3]) + self.assertEqual(user.name, "John") + self.assertEqual(user.thing.name, "My thing") + self.assertFalse(hasattr(user.thing, "foo")) + self.assertEqual(user.thing._data["foo"], "Bar") + self.assertFalse(hasattr(user.thing, "data")) + self.assertEqual(user.thing._data["data"], [1, 2, 3]) def test_spaces_in_keys(self): class Embedded(DynamicEmbeddedDocument): @@ -3009,10 +3094,10 @@ class InstanceTest(MongoDBTestCase): Doc.drop_collection() doc = Doc() - setattr(doc, 'hello world', 1) + setattr(doc, "hello world", 1) doc.save() - one = Doc.objects.filter(**{'hello world': 1}).count() + one = Doc.objects.filter(**{"hello world": 1}).count() self.assertEqual(1, one) def test_shard_key(self): @@ -3020,9 +3105,7 @@ class InstanceTest(MongoDBTestCase): machine = StringField() log = StringField() - meta = { - 'shard_key': ('machine',) - } + meta = {"shard_key": ("machine",)} LogEntry.drop_collection() @@ -3044,24 +3127,22 @@ class InstanceTest(MongoDBTestCase): foo = StringField() class Bar(Document): - meta = { - 'shard_key': ('foo.foo',) - } + meta = {"shard_key": ("foo.foo",)} foo = EmbeddedDocumentField(Foo) bar = StringField() - foo_doc = Foo(foo='hello') - bar_doc = Bar(foo=foo_doc, bar='world') + foo_doc = Foo(foo="hello") + bar_doc = Bar(foo=foo_doc, bar="world") bar_doc.save() self.assertTrue(bar_doc.id is not None) - bar_doc.bar = 'baz' + bar_doc.bar = "baz" bar_doc.save() # try to change the shard key with self.assertRaises(OperationError): - bar_doc.foo.foo = 'something' + bar_doc.foo.foo = "something" bar_doc.save() def test_shard_key_primary(self): @@ -3069,9 +3150,7 @@ class InstanceTest(MongoDBTestCase): machine = StringField(primary_key=True) log = StringField() - meta = { - 'shard_key': ('machine',) - } + meta = {"shard_key": ("machine",)} LogEntry.drop_collection() @@ -3097,12 +3176,10 @@ class InstanceTest(MongoDBTestCase): doc = EmbeddedDocumentField(Embedded) def __eq__(self, other): - return (self.doc_name == other.doc_name and - self.doc == other.doc) + return self.doc_name == other.doc_name and self.doc == other.doc classic_doc = Doc(doc_name="my doc", doc=Embedded(name="embedded doc")) - dict_doc = Doc(**{"doc_name": "my doc", - "doc": {"name": "embedded doc"}}) + dict_doc = Doc(**{"doc_name": "my doc", "doc": {"name": "embedded doc"}}) self.assertEqual(classic_doc, dict_doc) self.assertEqual(classic_doc._data, dict_doc._data) @@ -3116,15 +3193,18 @@ class InstanceTest(MongoDBTestCase): docs = ListField(EmbeddedDocumentField(Embedded)) def __eq__(self, other): - return (self.doc_name == other.doc_name and - self.docs == other.docs) + return self.doc_name == other.doc_name and self.docs == other.docs - classic_doc = Doc(doc_name="my doc", docs=[ - Embedded(name="embedded doc1"), - Embedded(name="embedded doc2")]) - dict_doc = Doc(**{"doc_name": "my doc", - "docs": [{"name": "embedded doc1"}, - {"name": "embedded doc2"}]}) + classic_doc = Doc( + doc_name="my doc", + docs=[Embedded(name="embedded doc1"), Embedded(name="embedded doc2")], + ) + dict_doc = Doc( + **{ + "doc_name": "my doc", + "docs": [{"name": "embedded doc1"}, {"name": "embedded doc2"}], + } + ) self.assertEqual(classic_doc, dict_doc) self.assertEqual(classic_doc._data, dict_doc._data) @@ -3134,8 +3214,8 @@ class InstanceTest(MongoDBTestCase): with self.assertRaises(TypeError) as e: person = self.Person("Test User", 42) expected_msg = ( - 'Instantiating a document with positional arguments is not ' - 'supported. Please use `field_name=value` keyword arguments.' + "Instantiating a document with positional arguments is not " + "supported. Please use `field_name=value` keyword arguments." ) self.assertEqual(str(e.exception), expected_msg) @@ -3144,8 +3224,8 @@ class InstanceTest(MongoDBTestCase): with self.assertRaises(TypeError) as e: person = self.Person("Test User", age=42) expected_msg = ( - 'Instantiating a document with positional arguments is not ' - 'supported. Please use `field_name=value` keyword arguments.' + "Instantiating a document with positional arguments is not " + "supported. Please use `field_name=value` keyword arguments." ) self.assertEqual(str(e.exception), expected_msg) @@ -3154,8 +3234,8 @@ class InstanceTest(MongoDBTestCase): with self.assertRaises(TypeError) as e: job = self.Job("Test Job", 4) expected_msg = ( - 'Instantiating a document with positional arguments is not ' - 'supported. Please use `field_name=value` keyword arguments.' + "Instantiating a document with positional arguments is not " + "supported. Please use `field_name=value` keyword arguments." ) self.assertEqual(str(e.exception), expected_msg) @@ -3164,13 +3244,14 @@ class InstanceTest(MongoDBTestCase): with self.assertRaises(TypeError) as e: job = self.Job("Test Job", years=4) expected_msg = ( - 'Instantiating a document with positional arguments is not ' - 'supported. Please use `field_name=value` keyword arguments.' + "Instantiating a document with positional arguments is not " + "supported. Please use `field_name=value` keyword arguments." ) self.assertEqual(str(e.exception), expected_msg) def test_data_contains_id_field(self): """Ensure that asking for _data returns 'id'.""" + class Person(Document): name = StringField() @@ -3178,8 +3259,8 @@ class InstanceTest(MongoDBTestCase): Person(name="Harry Potter").save() person = Person.objects.first() - self.assertIn('id', person._data.keys()) - self.assertEqual(person._data.get('id'), person.id) + self.assertIn("id", person._data.keys()) + self.assertEqual(person._data.get("id"), person.id) def test_complex_nesting_document_and_embedded_document(self): class Macro(EmbeddedDocument): @@ -3220,8 +3301,8 @@ class InstanceTest(MongoDBTestCase): system = NodesSystem.objects.first() self.assertEqual( - "UNDEFINED", - system.nodes["node"].parameters["param"].macros["test"].value) + "UNDEFINED", system.nodes["node"].parameters["param"].macros["test"].value + ) def test_embedded_document_equality(self): class Test(Document): @@ -3231,7 +3312,7 @@ class InstanceTest(MongoDBTestCase): ref = ReferenceField(Test) Test.drop_collection() - test = Test(field='123').save() # has id + test = Test(field="123").save() # has id e = Embedded(ref=test) f1 = Embedded._from_son(e.to_mongo()) @@ -3250,25 +3331,25 @@ class InstanceTest(MongoDBTestCase): class Test(Document): name = StringField() - test2 = ReferenceField('Test2') - test3 = ReferenceField('Test3') + test2 = ReferenceField("Test2") + test3 = ReferenceField("Test3") Test.drop_collection() Test2.drop_collection() Test3.drop_collection() - t2 = Test2(name='a') + t2 = Test2(name="a") t2.save() - t3 = Test3(name='x') + t3 = Test3(name="x") t3.id = t2.id t3.save() - t = Test(name='b', test2=t2, test3=t3) + t = Test(name="b", test2=t2, test3=t3) f = Test._from_son(t.to_mongo()) - dbref2 = f._data['test2'] + dbref2 = f._data["test2"] obj2 = f.test2 self.assertIsInstance(dbref2, DBRef) self.assertIsInstance(obj2, Test2) @@ -3276,7 +3357,7 @@ class InstanceTest(MongoDBTestCase): self.assertEqual(obj2, dbref2) self.assertEqual(dbref2, obj2) - dbref3 = f._data['test3'] + dbref3 = f._data["test3"] obj3 = f.test3 self.assertIsInstance(dbref3, DBRef) self.assertIsInstance(obj3, Test3) @@ -3306,14 +3387,14 @@ class InstanceTest(MongoDBTestCase): created_on = DateTimeField(default=lambda: datetime.utcnow()) name = StringField() - p = Person(name='alon') + p = Person(name="alon") p.save() - orig_created_on = Person.objects().only('created_on')[0].created_on + orig_created_on = Person.objects().only("created_on")[0].created_on - p2 = Person.objects().only('name')[0] - p2.name = 'alon2' + p2 = Person.objects().only("name")[0] + p2.name = "alon2" p2.save() - p3 = Person.objects().only('created_on')[0] + p3 = Person.objects().only("created_on")[0] self.assertEqual(orig_created_on, p3.created_on) class Person(Document): @@ -3331,8 +3412,8 @@ class InstanceTest(MongoDBTestCase): # alter DB for the new default coll = Person._get_collection() for person in Person.objects.as_pymongo(): - if 'height' not in person: - coll.update_one({'_id': person['_id']}, {'$set': {'height': 189}}) + if "height" not in person: + coll.update_one({"_id": person["_id"]}, {"$set": {"height": 189}}) self.assertEqual(Person.objects(height=189).count(), 1) @@ -3340,12 +3421,17 @@ class InstanceTest(MongoDBTestCase): # 771 class MyPerson(self.Person): meta = dict(shard_key=["id"]) + p = MyPerson.from_json('{"name": "name", "age": 27}', created=True) self.assertEqual(p.id, None) - p.id = "12345" # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here + p.id = ( + "12345" + ) # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here p = MyPerson._from_son({"name": "name", "age": 27}, created=True) self.assertEqual(p.id, None) - p.id = "12345" # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here + p.id = ( + "12345" + ) # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here def test_from_son_created_False_without_id(self): class MyPerson(Document): @@ -3359,7 +3445,7 @@ class InstanceTest(MongoDBTestCase): p.save() self.assertIsNotNone(p.id) saved_p = MyPerson.objects.get(id=p.id) - self.assertEqual(saved_p.name, 'a_fancy_name') + self.assertEqual(saved_p.name, "a_fancy_name") def test_from_son_created_False_with_id(self): # 1854 @@ -3368,11 +3454,13 @@ class InstanceTest(MongoDBTestCase): MyPerson.objects.delete() - p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=False) + p = MyPerson.from_json( + '{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=False + ) self.assertFalse(p._created) self.assertEqual(p._changed_fields, []) - self.assertEqual(p.name, 'a_fancy_name') - self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e')) + self.assertEqual(p.name, "a_fancy_name") + self.assertEqual(p.id, ObjectId("5b85a8b04ec5dc2da388296e")) p.save() with self.assertRaises(DoesNotExist): @@ -3382,8 +3470,8 @@ class InstanceTest(MongoDBTestCase): MyPerson.objects.get(id=p.id) self.assertFalse(p._created) - p.name = 'a new fancy name' - self.assertEqual(p._changed_fields, ['name']) + p.name = "a new fancy name" + self.assertEqual(p._changed_fields, ["name"]) p.save() saved_p = MyPerson.objects.get(id=p.id) self.assertEqual(saved_p.name, p.name) @@ -3394,16 +3482,18 @@ class InstanceTest(MongoDBTestCase): MyPerson.objects.delete() - p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=True) + p = MyPerson.from_json( + '{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=True + ) self.assertTrue(p._created) self.assertEqual(p._changed_fields, []) - self.assertEqual(p.name, 'a_fancy_name') - self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e')) + self.assertEqual(p.name, "a_fancy_name") + self.assertEqual(p.id, ObjectId("5b85a8b04ec5dc2da388296e")) p.save() saved_p = MyPerson.objects.get(id=p.id) self.assertEqual(saved_p, p) - self.assertEqual(p.name, 'a_fancy_name') + self.assertEqual(p.name, "a_fancy_name") def test_null_field(self): # 734 @@ -3417,9 +3507,9 @@ class InstanceTest(MongoDBTestCase): cdt_fld = ComplexDateTimeField(null=True) User.objects.delete() - u = User(name='user') + u = User(name="user") u.save() - u_from_db = User.objects.get(name='user') + u_from_db = User.objects.get(name="user") u_from_db.height = None u_from_db.save() self.assertEqual(u_from_db.height, None) @@ -3432,15 +3522,16 @@ class InstanceTest(MongoDBTestCase): # 735 User.objects.delete() - u = User(name='user') + u = User(name="user") u.save() - User.objects(name='user').update_one(set__height=None, upsert=True) - u_from_db = User.objects.get(name='user') + User.objects(name="user").update_one(set__height=None, upsert=True) + u_from_db = User.objects.get(name="user") self.assertEqual(u_from_db.height, None) def test_not_saved_eq(self): """Ensure we can compare documents not saved. """ + class Person(Document): pass @@ -3458,7 +3549,7 @@ class InstanceTest(MongoDBTestCase): l = ListField(EmbeddedDocumentField(B)) A.objects.delete() - A(l=[B(v='1'), B(v='2'), B(v='3')]).save() + A(l=[B(v="1"), B(v="2"), B(v="3")]).save() a = A.objects.get() self.assertEqual(a.l._instance, a) for idx, b in enumerate(a.l): @@ -3467,6 +3558,7 @@ class InstanceTest(MongoDBTestCase): def test_falsey_pk(self): """Ensure that we can create and update a document with Falsey PK.""" + class Person(Document): age = IntField(primary_key=True) height = FloatField() @@ -3480,6 +3572,7 @@ class InstanceTest(MongoDBTestCase): def test_push_with_position(self): """Ensure that push with position works properly for an instance.""" + class BlogPost(Document): slug = StringField() tags = ListField(StringField()) @@ -3491,10 +3584,11 @@ class InstanceTest(MongoDBTestCase): blog.update(push__tags__0=["mongodb", "code"]) blog.reload() - self.assertEqual(blog.tags, ['mongodb', 'code', 'python']) + self.assertEqual(blog.tags, ["mongodb", "code", "python"]) def test_push_nested_list(self): """Ensure that push update works in nested list""" + class BlogPost(Document): slug = StringField() tags = ListField() @@ -3505,10 +3599,11 @@ class InstanceTest(MongoDBTestCase): self.assertEqual(blog.tags, [["value1", 123]]) def test_accessing_objects_with_indexes_error(self): - insert_result = self.db.company.insert_many([{'name': 'Foo'}, - {'name': 'Foo'}]) # Force 2 doc with same name + insert_result = self.db.company.insert_many( + [{"name": "Foo"}, {"name": "Foo"}] + ) # Force 2 doc with same name REF_OID = insert_result.inserted_ids[0] - self.db.user.insert_one({'company': REF_OID}) # Force 2 doc with same name + self.db.user.insert_one({"company": REF_OID}) # Force 2 doc with same name class Company(Document): name = StringField(unique=True) @@ -3521,5 +3616,5 @@ class InstanceTest(MongoDBTestCase): User.objects().select_related() -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/document/json_serialisation.py b/tests/document/json_serialisation.py index 251b65a2..33d5a6d9 100644 --- a/tests/document/json_serialisation.py +++ b/tests/document/json_serialisation.py @@ -13,9 +13,8 @@ __all__ = ("TestJson",) class TestJson(unittest.TestCase): - def setUp(self): - connect(db='mongoenginetest') + connect(db="mongoenginetest") def test_json_names(self): """ @@ -25,22 +24,24 @@ class TestJson(unittest.TestCase): a to_json with the original class names and not the abreviated mongodb document keys """ + class Embedded(EmbeddedDocument): - string = StringField(db_field='s') + string = StringField(db_field="s") class Doc(Document): - string = StringField(db_field='s') - embedded = EmbeddedDocumentField(Embedded, db_field='e') + string = StringField(db_field="s") + embedded = EmbeddedDocumentField(Embedded, db_field="e") doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello")) - doc_json = doc.to_json(sort_keys=True, use_db_field=False, separators=(',', ':')) + doc_json = doc.to_json( + sort_keys=True, use_db_field=False, separators=(",", ":") + ) expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}""" self.assertEqual(doc_json, expected_json) def test_json_simple(self): - class Embedded(EmbeddedDocument): string = StringField() @@ -49,12 +50,14 @@ class TestJson(unittest.TestCase): embedded_field = EmbeddedDocumentField(Embedded) def __eq__(self, other): - return (self.string == other.string and - self.embedded_field == other.embedded_field) + return ( + self.string == other.string + and self.embedded_field == other.embedded_field + ) doc = Doc(string="Hi", embedded_field=Embedded(string="Hi")) - doc_json = doc.to_json(sort_keys=True, separators=(',', ':')) + doc_json = doc.to_json(sort_keys=True, separators=(",", ":")) expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}""" self.assertEqual(doc_json, expected_json) @@ -68,41 +71,43 @@ class TestJson(unittest.TestCase): pass class Doc(Document): - string_field = StringField(default='1') + string_field = StringField(default="1") int_field = IntField(default=1) float_field = FloatField(default=1.1) boolean_field = BooleanField(default=True) datetime_field = DateTimeField(default=datetime.now) - embedded_document_field = EmbeddedDocumentField(EmbeddedDoc, - default=lambda: EmbeddedDoc()) + embedded_document_field = EmbeddedDocumentField( + EmbeddedDoc, default=lambda: EmbeddedDoc() + ) list_field = ListField(default=lambda: [1, 2, 3]) dict_field = DictField(default=lambda: {"hello": "world"}) objectid_field = ObjectIdField(default=ObjectId) - reference_field = ReferenceField(Simple, default=lambda: - Simple().save()) + reference_field = ReferenceField(Simple, default=lambda: Simple().save()) map_field = MapField(IntField(), default=lambda: {"simple": 1}) decimal_field = DecimalField(default=1.0) complex_datetime_field = ComplexDateTimeField(default=datetime.now) url_field = URLField(default="http://mongoengine.org") dynamic_field = DynamicField(default=1) generic_reference_field = GenericReferenceField( - default=lambda: Simple().save()) - sorted_list_field = SortedListField(IntField(), - default=lambda: [1, 2, 3]) + default=lambda: Simple().save() + ) + sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3]) email_field = EmailField(default="ross@example.com") geo_point_field = GeoPointField(default=lambda: [1, 2]) sequence_field = SequenceField() uuid_field = UUIDField(default=uuid.uuid4) generic_embedded_document_field = GenericEmbeddedDocumentField( - default=lambda: EmbeddedDoc()) + default=lambda: EmbeddedDoc() + ) def __eq__(self, other): import json + return json.loads(self.to_json()) == json.loads(other.to_json()) doc = Doc() self.assertEqual(doc, Doc.from_json(doc.to_json())) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/document/validation.py b/tests/document/validation.py index 30a285b2..78199231 100644 --- a/tests/document/validation.py +++ b/tests/document/validation.py @@ -8,49 +8,56 @@ __all__ = ("ValidatorErrorTest",) class ValidatorErrorTest(unittest.TestCase): - def setUp(self): - connect(db='mongoenginetest') + connect(db="mongoenginetest") def test_to_dict(self): """Ensure a ValidationError handles error to_dict correctly. """ - error = ValidationError('root') + error = ValidationError("root") self.assertEqual(error.to_dict(), {}) # 1st level error schema - error.errors = {'1st': ValidationError('bad 1st'), } - self.assertIn('1st', error.to_dict()) - self.assertEqual(error.to_dict()['1st'], 'bad 1st') + error.errors = {"1st": ValidationError("bad 1st")} + self.assertIn("1st", error.to_dict()) + self.assertEqual(error.to_dict()["1st"], "bad 1st") # 2nd level error schema - error.errors = {'1st': ValidationError('bad 1st', errors={ - '2nd': ValidationError('bad 2nd'), - })} - self.assertIn('1st', error.to_dict()) - self.assertIsInstance(error.to_dict()['1st'], dict) - self.assertIn('2nd', error.to_dict()['1st']) - self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd') + error.errors = { + "1st": ValidationError( + "bad 1st", errors={"2nd": ValidationError("bad 2nd")} + ) + } + self.assertIn("1st", error.to_dict()) + self.assertIsInstance(error.to_dict()["1st"], dict) + self.assertIn("2nd", error.to_dict()["1st"]) + self.assertEqual(error.to_dict()["1st"]["2nd"], "bad 2nd") # moar levels - error.errors = {'1st': ValidationError('bad 1st', errors={ - '2nd': ValidationError('bad 2nd', errors={ - '3rd': ValidationError('bad 3rd', errors={ - '4th': ValidationError('Inception'), - }), - }), - })} - self.assertIn('1st', error.to_dict()) - self.assertIn('2nd', error.to_dict()['1st']) - self.assertIn('3rd', error.to_dict()['1st']['2nd']) - self.assertIn('4th', error.to_dict()['1st']['2nd']['3rd']) - self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'], - 'Inception') + error.errors = { + "1st": ValidationError( + "bad 1st", + errors={ + "2nd": ValidationError( + "bad 2nd", + errors={ + "3rd": ValidationError( + "bad 3rd", errors={"4th": ValidationError("Inception")} + ) + }, + ) + }, + ) + } + self.assertIn("1st", error.to_dict()) + self.assertIn("2nd", error.to_dict()["1st"]) + self.assertIn("3rd", error.to_dict()["1st"]["2nd"]) + self.assertIn("4th", error.to_dict()["1st"]["2nd"]["3rd"]) + self.assertEqual(error.to_dict()["1st"]["2nd"]["3rd"]["4th"], "Inception") self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])") def test_model_validation(self): - class User(Document): username = StringField(primary_key=True) name = StringField(required=True) @@ -59,9 +66,10 @@ class ValidatorErrorTest(unittest.TestCase): User().validate() except ValidationError as e: self.assertIn("User:None", e.message) - self.assertEqual(e.to_dict(), { - 'username': 'Field is required', - 'name': 'Field is required'}) + self.assertEqual( + e.to_dict(), + {"username": "Field is required", "name": "Field is required"}, + ) user = User(username="RossC0", name="Ross").save() user.name = None @@ -69,14 +77,13 @@ class ValidatorErrorTest(unittest.TestCase): user.save() except ValidationError as e: self.assertIn("User:RossC0", e.message) - self.assertEqual(e.to_dict(), { - 'name': 'Field is required'}) + self.assertEqual(e.to_dict(), {"name": "Field is required"}) def test_fields_rewrite(self): class BasePerson(Document): name = StringField() age = IntField() - meta = {'abstract': True} + meta = {"abstract": True} class Person(BasePerson): name = StringField(required=True) @@ -87,6 +94,7 @@ class ValidatorErrorTest(unittest.TestCase): def test_embedded_document_validation(self): """Ensure that embedded documents may be validated. """ + class Comment(EmbeddedDocument): date = DateTimeField() content = StringField(required=True) @@ -94,7 +102,7 @@ class ValidatorErrorTest(unittest.TestCase): comment = Comment() self.assertRaises(ValidationError, comment.validate) - comment.content = 'test' + comment.content = "test" comment.validate() comment.date = 4 @@ -105,20 +113,20 @@ class ValidatorErrorTest(unittest.TestCase): self.assertEqual(comment._instance, None) def test_embedded_db_field_validate(self): - class SubDoc(EmbeddedDocument): val = IntField(required=True) class Doc(Document): id = StringField(primary_key=True) - e = EmbeddedDocumentField(SubDoc, db_field='eb') + e = EmbeddedDocumentField(SubDoc, db_field="eb") try: Doc(id="bad").validate() except ValidationError as e: self.assertIn("SubDoc:None", e.message) - self.assertEqual(e.to_dict(), { - "e": {'val': 'OK could not be converted to int'}}) + self.assertEqual( + e.to_dict(), {"e": {"val": "OK could not be converted to int"}} + ) Doc.drop_collection() @@ -127,24 +135,24 @@ class ValidatorErrorTest(unittest.TestCase): doc = Doc.objects.first() keys = doc._data.keys() self.assertEqual(2, len(keys)) - self.assertIn('e', keys) - self.assertIn('id', keys) + self.assertIn("e", keys) + self.assertIn("id", keys) doc.e.val = "OK" try: doc.save() except ValidationError as e: self.assertIn("Doc:test", e.message) - self.assertEqual(e.to_dict(), { - "e": {'val': 'OK could not be converted to int'}}) + self.assertEqual( + e.to_dict(), {"e": {"val": "OK could not be converted to int"}} + ) def test_embedded_weakref(self): - class SubDoc(EmbeddedDocument): val = IntField(required=True) class Doc(Document): - e = EmbeddedDocumentField(SubDoc, db_field='eb') + e = EmbeddedDocumentField(SubDoc, db_field="eb") Doc.drop_collection() @@ -167,9 +175,10 @@ class ValidatorErrorTest(unittest.TestCase): Test to ensure a ReferenceField can store a reference to a parent class when inherited. Issue #954. """ + class Parent(Document): - meta = {'allow_inheritance': True} - reference = ReferenceField('self') + meta = {"allow_inheritance": True} + reference = ReferenceField("self") class Child(Parent): pass @@ -190,9 +199,10 @@ class ValidatorErrorTest(unittest.TestCase): Test to ensure a ReferenceField can store a reference to a parent class when inherited and when set via attribute. Issue #954. """ + class Parent(Document): - meta = {'allow_inheritance': True} - reference = ReferenceField('self') + meta = {"allow_inheritance": True} + reference = ReferenceField("self") class Child(Parent): pass @@ -210,5 +220,5 @@ class ValidatorErrorTest(unittest.TestCase): self.fail("ValidationError raised: %s" % e.message) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 68baab46..87acf27f 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -6,27 +6,52 @@ from nose.plugins.skip import SkipTest from bson import DBRef, ObjectId, SON -from mongoengine import Document, StringField, IntField, DateTimeField, DateField, ValidationError, \ - ComplexDateTimeField, FloatField, ListField, ReferenceField, DictField, EmbeddedDocument, EmbeddedDocumentField, \ - GenericReferenceField, DoesNotExist, NotRegistered, OperationError, DynamicField, \ - FieldDoesNotExist, EmbeddedDocumentListField, MultipleObjectsReturned, NotUniqueError, BooleanField,\ - ObjectIdField, SortedListField, GenericLazyReferenceField, LazyReferenceField, DynamicDocument -from mongoengine.base import (BaseField, EmbeddedDocumentList, _document_registry) +from mongoengine import ( + Document, + StringField, + IntField, + DateTimeField, + DateField, + ValidationError, + ComplexDateTimeField, + FloatField, + ListField, + ReferenceField, + DictField, + EmbeddedDocument, + EmbeddedDocumentField, + GenericReferenceField, + DoesNotExist, + NotRegistered, + OperationError, + DynamicField, + FieldDoesNotExist, + EmbeddedDocumentListField, + MultipleObjectsReturned, + NotUniqueError, + BooleanField, + ObjectIdField, + SortedListField, + GenericLazyReferenceField, + LazyReferenceField, + DynamicDocument, +) +from mongoengine.base import BaseField, EmbeddedDocumentList, _document_registry from mongoengine.errors import DeprecatedError from tests.utils import MongoDBTestCase class FieldTest(MongoDBTestCase): - def test_default_values_nothing_set(self): """Ensure that default field values are used when creating a document. """ + class Person(Document): name = StringField() age = IntField(default=30, required=False) - userid = StringField(default=lambda: 'test', required=True) + userid = StringField(default=lambda: "test", required=True) created = DateTimeField(default=datetime.datetime.utcnow) day = DateField(default=datetime.date.today) @@ -34,9 +59,7 @@ class FieldTest(MongoDBTestCase): # Confirm saving now would store values data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, - ['age', 'created', 'day', 'name', 'userid'] - ) + self.assertEqual(data_to_be_saved, ["age", "created", "day", "name", "userid"]) self.assertTrue(person.validate() is None) @@ -46,18 +69,19 @@ class FieldTest(MongoDBTestCase): self.assertEqual(person.created, person.created) self.assertEqual(person.day, person.day) - self.assertEqual(person._data['name'], person.name) - self.assertEqual(person._data['age'], person.age) - self.assertEqual(person._data['userid'], person.userid) - self.assertEqual(person._data['created'], person.created) - self.assertEqual(person._data['day'], person.day) + self.assertEqual(person._data["name"], person.name) + self.assertEqual(person._data["age"], person.age) + self.assertEqual(person._data["userid"], person.userid) + self.assertEqual(person._data["created"], person.created) + self.assertEqual(person._data["day"], person.day) # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual( - data_to_be_saved, ['age', 'created', 'day', 'name', 'userid']) + self.assertEqual(data_to_be_saved, ["age", "created", "day", "name", "userid"]) - def test_custom_field_validation_raise_deprecated_error_when_validation_return_something(self): + def test_custom_field_validation_raise_deprecated_error_when_validation_return_something( + self + ): # Covers introduction of a breaking change in the validation parameter (0.18) def _not_empty(z): return bool(z) @@ -67,8 +91,10 @@ class FieldTest(MongoDBTestCase): Person.drop_collection() - error = ("validation argument for `name` must not return anything, " - "it should raise a ValidationError if validation fails") + error = ( + "validation argument for `name` must not return anything, " + "it should raise a ValidationError if validation fails" + ) with self.assertRaises(DeprecatedError) as ctx_err: Person(name="").validate() @@ -81,7 +107,7 @@ class FieldTest(MongoDBTestCase): def test_custom_field_validation_raise_validation_error(self): def _not_empty(z): if not z: - raise ValidationError('cantbeempty') + raise ValidationError("cantbeempty") class Person(Document): name = StringField(validation=_not_empty) @@ -90,11 +116,17 @@ class FieldTest(MongoDBTestCase): with self.assertRaises(ValidationError) as ctx_err: Person(name="").validate() - self.assertEqual("ValidationError (Person:None) (cantbeempty: ['name'])", str(ctx_err.exception)) + self.assertEqual( + "ValidationError (Person:None) (cantbeempty: ['name'])", + str(ctx_err.exception), + ) with self.assertRaises(ValidationError): Person(name="").save() - self.assertEqual("ValidationError (Person:None) (cantbeempty: ['name'])", str(ctx_err.exception)) + self.assertEqual( + "ValidationError (Person:None) (cantbeempty: ['name'])", + str(ctx_err.exception), + ) Person(name="garbage").validate() Person(name="garbage").save() @@ -103,10 +135,11 @@ class FieldTest(MongoDBTestCase): """Ensure that default field values are used even when we explcitly initialize the doc with None values. """ + class Person(Document): name = StringField() age = IntField(default=30, required=False) - userid = StringField(default=lambda: 'test', required=True) + userid = StringField(default=lambda: "test", required=True) created = DateTimeField(default=datetime.datetime.utcnow) # Trying setting values to None @@ -114,7 +147,7 @@ class FieldTest(MongoDBTestCase): # Confirm saving now would store values data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + self.assertEqual(data_to_be_saved, ["age", "created", "userid"]) self.assertTrue(person.validate() is None) @@ -123,23 +156,24 @@ class FieldTest(MongoDBTestCase): self.assertEqual(person.userid, person.userid) self.assertEqual(person.created, person.created) - self.assertEqual(person._data['name'], person.name) - self.assertEqual(person._data['age'], person.age) - self.assertEqual(person._data['userid'], person.userid) - self.assertEqual(person._data['created'], person.created) + self.assertEqual(person._data["name"], person.name) + self.assertEqual(person._data["age"], person.age) + self.assertEqual(person._data["userid"], person.userid) + self.assertEqual(person._data["created"], person.created) # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + self.assertEqual(data_to_be_saved, ["age", "created", "userid"]) def test_default_values_when_setting_to_None(self): """Ensure that default field values are used when creating a document. """ + class Person(Document): name = StringField() age = IntField(default=30, required=False) - userid = StringField(default=lambda: 'test', required=True) + userid = StringField(default=lambda: "test", required=True) created = DateTimeField(default=datetime.datetime.utcnow) person = Person() @@ -150,25 +184,27 @@ class FieldTest(MongoDBTestCase): # Confirm saving now would store values data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + self.assertEqual(data_to_be_saved, ["age", "created", "userid"]) self.assertTrue(person.validate() is None) self.assertEqual(person.name, None) self.assertEqual(person.age, 30) - self.assertEqual(person.userid, 'test') + self.assertEqual(person.userid, "test") self.assertIsInstance(person.created, datetime.datetime) - self.assertEqual(person._data['name'], person.name) - self.assertEqual(person._data['age'], person.age) - self.assertEqual(person._data['userid'], person.userid) - self.assertEqual(person._data['created'], person.created) + self.assertEqual(person._data["name"], person.name) + self.assertEqual(person._data["age"], person.age) + self.assertEqual(person._data["userid"], person.userid) + self.assertEqual(person._data["created"], person.created) # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + self.assertEqual(data_to_be_saved, ["age", "created", "userid"]) - def test_default_value_is_not_used_when_changing_value_to_empty_list_for_strict_doc(self): + def test_default_value_is_not_used_when_changing_value_to_empty_list_for_strict_doc( + self + ): """List field with default can be set to the empty list (strict)""" # Issue #1733 class Doc(Document): @@ -180,7 +216,9 @@ class FieldTest(MongoDBTestCase): reloaded = Doc.objects.get(id=doc.id) self.assertEqual(reloaded.x, []) - def test_default_value_is_not_used_when_changing_value_to_empty_list_for_dyn_doc(self): + def test_default_value_is_not_used_when_changing_value_to_empty_list_for_dyn_doc( + self + ): """List field with default can be set to the empty list (dynamic)""" # Issue #1733 class Doc(DynamicDocument): @@ -188,7 +226,7 @@ class FieldTest(MongoDBTestCase): doc = Doc(x=[1]).save() doc.x = [] - doc.y = 2 # Was triggering the bug + doc.y = 2 # Was triggering the bug doc.save() reloaded = Doc.objects.get(id=doc.id) self.assertEqual(reloaded.x, []) @@ -197,41 +235,47 @@ class FieldTest(MongoDBTestCase): """Ensure that default field values are used after non-default values are explicitly deleted. """ + class Person(Document): name = StringField() age = IntField(default=30, required=False) - userid = StringField(default=lambda: 'test', required=True) + userid = StringField(default=lambda: "test", required=True) created = DateTimeField(default=datetime.datetime.utcnow) - person = Person(name="Ross", age=50, userid='different', - created=datetime.datetime(2014, 6, 12)) + person = Person( + name="Ross", + age=50, + userid="different", + created=datetime.datetime(2014, 6, 12), + ) del person.name del person.age del person.userid del person.created data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + self.assertEqual(data_to_be_saved, ["age", "created", "userid"]) self.assertTrue(person.validate() is None) self.assertEqual(person.name, None) self.assertEqual(person.age, 30) - self.assertEqual(person.userid, 'test') + self.assertEqual(person.userid, "test") self.assertIsInstance(person.created, datetime.datetime) self.assertNotEqual(person.created, datetime.datetime(2014, 6, 12)) - self.assertEqual(person._data['name'], person.name) - self.assertEqual(person._data['age'], person.age) - self.assertEqual(person._data['userid'], person.userid) - self.assertEqual(person._data['created'], person.created) + self.assertEqual(person._data["name"], person.name) + self.assertEqual(person._data["age"], person.age) + self.assertEqual(person._data["userid"], person.userid) + self.assertEqual(person._data["created"], person.created) # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + self.assertEqual(data_to_be_saved, ["age", "created", "userid"]) def test_required_values(self): """Ensure that required field constraints are enforced.""" + class Person(Document): name = StringField(required=True) age = IntField(required=True) @@ -246,6 +290,7 @@ class FieldTest(MongoDBTestCase): """Ensure that every fields should accept None if required is False. """ + class HandleNoneFields(Document): str_fld = StringField() int_fld = IntField() @@ -255,7 +300,7 @@ class FieldTest(MongoDBTestCase): HandleNoneFields.drop_collection() doc = HandleNoneFields() - doc.str_fld = u'spam ham egg' + doc.str_fld = u"spam ham egg" doc.int_fld = 42 doc.flt_fld = 4.2 doc.com_dt_fld = datetime.datetime.utcnow() @@ -281,6 +326,7 @@ class FieldTest(MongoDBTestCase): """Ensure that every field can handle null values from the database. """ + class HandleNoneFields(Document): str_fld = StringField(required=True) int_fld = IntField(required=True) @@ -290,21 +336,17 @@ class FieldTest(MongoDBTestCase): HandleNoneFields.drop_collection() doc = HandleNoneFields() - doc.str_fld = u'spam ham egg' + doc.str_fld = u"spam ham egg" doc.int_fld = 42 doc.flt_fld = 4.2 doc.comp_dt_fld = datetime.datetime.utcnow() doc.save() # Unset all the fields - obj = HandleNoneFields._get_collection().update({"_id": doc.id}, { - "$unset": { - "str_fld": 1, - "int_fld": 1, - "flt_fld": 1, - "comp_dt_fld": 1 - } - }) + obj = HandleNoneFields._get_collection().update( + {"_id": doc.id}, + {"$unset": {"str_fld": 1, "int_fld": 1, "flt_fld": 1, "comp_dt_fld": 1}}, + ) # Retrive data from db and verify it. ret = HandleNoneFields.objects.first() @@ -321,16 +363,17 @@ class FieldTest(MongoDBTestCase): """Ensure that invalid values cannot be assigned to an ObjectIdField. """ + class Person(Document): name = StringField() - person = Person(name='Test User') + person = Person(name="Test User") self.assertEqual(person.id, None) person.id = 47 self.assertRaises(ValidationError, person.validate) - person.id = 'abc' + person.id = "abc" self.assertRaises(ValidationError, person.validate) person.id = str(ObjectId()) @@ -338,26 +381,27 @@ class FieldTest(MongoDBTestCase): def test_string_validation(self): """Ensure that invalid values cannot be assigned to string fields.""" + class Person(Document): name = StringField(max_length=20) - userid = StringField(r'[0-9a-z_]+$') + userid = StringField(r"[0-9a-z_]+$") person = Person(name=34) self.assertRaises(ValidationError, person.validate) # Test regex validation on userid - person = Person(userid='test.User') + person = Person(userid="test.User") self.assertRaises(ValidationError, person.validate) - person.userid = 'test_user' - self.assertEqual(person.userid, 'test_user') + person.userid = "test_user" + self.assertEqual(person.userid, "test_user") person.validate() # Test max length validation on name - person = Person(name='Name that is more than twenty characters') + person = Person(name="Name that is more than twenty characters") self.assertRaises(ValidationError, person.validate) - person.name = 'Shorter name' + person.name = "Shorter name" person.validate() def test_db_field_validation(self): @@ -365,25 +409,28 @@ class FieldTest(MongoDBTestCase): # dot in the name with self.assertRaises(ValueError): + class User(Document): - name = StringField(db_field='user.name') + name = StringField(db_field="user.name") # name starting with $ with self.assertRaises(ValueError): + class User(Document): - name = StringField(db_field='$name') + name = StringField(db_field="$name") # name containing a null character with self.assertRaises(ValueError): + class User(Document): - name = StringField(db_field='name\0') + name = StringField(db_field="name\0") def test_list_validation(self): """Ensure that a list field only accepts lists with valid elements.""" access_level_choices = ( - ('a', u'Administration'), - ('b', u'Manager'), - ('c', u'Staff'), + ("a", u"Administration"), + ("b", u"Manager"), + ("c", u"Staff"), ) class User(Document): @@ -400,41 +447,41 @@ class FieldTest(MongoDBTestCase): authors_as_lazy = ListField(LazyReferenceField(User)) generic = ListField(GenericReferenceField()) generic_as_lazy = ListField(GenericLazyReferenceField()) - access_list = ListField(choices=access_level_choices, display_sep=', ') + access_list = ListField(choices=access_level_choices, display_sep=", ") User.drop_collection() BlogPost.drop_collection() - post = BlogPost(content='Went for a walk today...') + post = BlogPost(content="Went for a walk today...") post.validate() - post.tags = 'fun' + post.tags = "fun" self.assertRaises(ValidationError, post.validate) post.tags = [1, 2] self.assertRaises(ValidationError, post.validate) - post.tags = ['fun', 'leisure'] + post.tags = ["fun", "leisure"] post.validate() - post.tags = ('fun', 'leisure') + post.tags = ("fun", "leisure") post.validate() - post.access_list = 'a,b' + post.access_list = "a,b" self.assertRaises(ValidationError, post.validate) - post.access_list = ['c', 'd'] + post.access_list = ["c", "d"] self.assertRaises(ValidationError, post.validate) - post.access_list = ['a', 'b'] + post.access_list = ["a", "b"] post.validate() - self.assertEqual(post.get_access_list_display(), u'Administration, Manager') + self.assertEqual(post.get_access_list_display(), u"Administration, Manager") - post.comments = ['a'] + post.comments = ["a"] self.assertRaises(ValidationError, post.validate) - post.comments = 'yay' + post.comments = "yay" self.assertRaises(ValidationError, post.validate) - comments = [Comment(content='Good for you'), Comment(content='Yay.')] + comments = [Comment(content="Good for you"), Comment(content="Yay.")] post.comments = comments post.validate() @@ -485,28 +532,28 @@ class FieldTest(MongoDBTestCase): def test_sorted_list_sorting(self): """Ensure that a sorted list field properly sorts values. """ + class Comment(EmbeddedDocument): order = IntField() content = StringField() class BlogPost(Document): content = StringField() - comments = SortedListField(EmbeddedDocumentField(Comment), - ordering='order') + comments = SortedListField(EmbeddedDocumentField(Comment), ordering="order") tags = SortedListField(StringField()) BlogPost.drop_collection() - post = BlogPost(content='Went for a walk today...') + post = BlogPost(content="Went for a walk today...") post.save() - post.tags = ['leisure', 'fun'] + post.tags = ["leisure", "fun"] post.save() post.reload() - self.assertEqual(post.tags, ['fun', 'leisure']) + self.assertEqual(post.tags, ["fun", "leisure"]) - comment1 = Comment(content='Good for you', order=1) - comment2 = Comment(content='Yay.', order=0) + comment1 = Comment(content="Good for you", order=1) + comment2 = Comment(content="Yay.", order=0) comments = [comment1, comment2] post.comments = comments post.save() @@ -529,16 +576,17 @@ class FieldTest(MongoDBTestCase): name = StringField() class CategoryList(Document): - categories = SortedListField(EmbeddedDocumentField(Category), - ordering='count', reverse=True) + categories = SortedListField( + EmbeddedDocumentField(Category), ordering="count", reverse=True + ) name = StringField() CategoryList.drop_collection() catlist = CategoryList(name="Top categories") - cat1 = Category(name='posts', count=10) - cat2 = Category(name='food', count=100) - cat3 = Category(name='drink', count=40) + cat1 = Category(name="posts", count=10) + cat2 = Category(name="food", count=100) + cat3 = Category(name="drink", count=40) catlist.categories = [cat1, cat2, cat3] catlist.save() catlist.reload() @@ -549,57 +597,59 @@ class FieldTest(MongoDBTestCase): def test_list_field(self): """Ensure that list types work as expected.""" + class BlogPost(Document): info = ListField() BlogPost.drop_collection() post = BlogPost() - post.info = 'my post' + post.info = "my post" self.assertRaises(ValidationError, post.validate) - post.info = {'title': 'test'} + post.info = {"title": "test"} self.assertRaises(ValidationError, post.validate) - post.info = ['test'] + post.info = ["test"] post.save() post = BlogPost() - post.info = [{'test': 'test'}] + post.info = [{"test": "test"}] post.save() post = BlogPost() - post.info = [{'test': 3}] + post.info = [{"test": 3}] post.save() self.assertEqual(BlogPost.objects.count(), 3) - self.assertEqual( - BlogPost.objects.filter(info__exact='test').count(), 1) - self.assertEqual( - BlogPost.objects.filter(info__0__test='test').count(), 1) + self.assertEqual(BlogPost.objects.filter(info__exact="test").count(), 1) + self.assertEqual(BlogPost.objects.filter(info__0__test="test").count(), 1) # Confirm handles non strings or non existing keys + self.assertEqual(BlogPost.objects.filter(info__0__test__exact="5").count(), 0) self.assertEqual( - BlogPost.objects.filter(info__0__test__exact='5').count(), 0) - self.assertEqual( - BlogPost.objects.filter(info__100__test__exact='test').count(), 0) + BlogPost.objects.filter(info__100__test__exact="test").count(), 0 + ) # test queries by list post = BlogPost() - post.info = ['1', '2'] + post.info = ["1", "2"] post.save() - post = BlogPost.objects(info=['1', '2']).get() - post.info += ['3', '4'] + post = BlogPost.objects(info=["1", "2"]).get() + post.info += ["3", "4"] post.save() - self.assertEqual(BlogPost.objects(info=['1', '2', '3', '4']).count(), 1) - post = BlogPost.objects(info=['1', '2', '3', '4']).get() + self.assertEqual(BlogPost.objects(info=["1", "2", "3", "4"]).count(), 1) + post = BlogPost.objects(info=["1", "2", "3", "4"]).get() post.info *= 2 post.save() - self.assertEqual(BlogPost.objects(info=['1', '2', '3', '4', '1', '2', '3', '4']).count(), 1) + self.assertEqual( + BlogPost.objects(info=["1", "2", "3", "4", "1", "2", "3", "4"]).count(), 1 + ) def test_list_field_manipulative_operators(self): """Ensure that ListField works with standard list operators that manipulate the list. """ + class BlogPost(Document): ref = StringField() info = ListField(StringField()) @@ -608,162 +658,178 @@ class FieldTest(MongoDBTestCase): post = BlogPost() post.ref = "1234" - post.info = ['0', '1', '2', '3', '4', '5'] + post.info = ["0", "1", "2", "3", "4", "5"] post.save() def reset_post(): - post.info = ['0', '1', '2', '3', '4', '5'] + post.info = ["0", "1", "2", "3", "4", "5"] post.save() # '__add__(listB)' # listA+listB # operator.add(listA, listB) reset_post() - temp = ['a', 'b'] + temp = ["a", "b"] post.info = post.info + temp - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + self.assertEqual(post.info, ["0", "1", "2", "3", "4", "5", "a", "b"]) post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + self.assertEqual(post.info, ["0", "1", "2", "3", "4", "5", "a", "b"]) # '__delitem__(index)' # aka 'del list[index]' # aka 'operator.delitem(list, index)' reset_post() del post.info[2] # del from middle ('2') - self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + self.assertEqual(post.info, ["0", "1", "3", "4", "5"]) post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + self.assertEqual(post.info, ["0", "1", "3", "4", "5"]) # '__delitem__(slice(i, j))' # aka 'del list[i:j]' # aka 'operator.delitem(list, slice(i,j))' reset_post() del post.info[1:3] # removes '1', '2' - self.assertEqual(post.info, ['0', '3', '4', '5']) + self.assertEqual(post.info, ["0", "3", "4", "5"]) post.save() post.reload() - self.assertEqual(post.info, ['0', '3', '4', '5']) + self.assertEqual(post.info, ["0", "3", "4", "5"]) # '__iadd__' # aka 'list += list' reset_post() - temp = ['a', 'b'] + temp = ["a", "b"] post.info += temp - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + self.assertEqual(post.info, ["0", "1", "2", "3", "4", "5", "a", "b"]) post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + self.assertEqual(post.info, ["0", "1", "2", "3", "4", "5", "a", "b"]) # '__imul__' # aka 'list *= number' reset_post() post.info *= 2 - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + self.assertEqual( + post.info, ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] + ) post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + self.assertEqual( + post.info, ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] + ) # '__mul__' # aka 'listA*listB' reset_post() post.info = post.info * 2 - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + self.assertEqual( + post.info, ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] + ) post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + self.assertEqual( + post.info, ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] + ) # '__rmul__' # aka 'listB*listA' reset_post() post.info = 2 * post.info - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + self.assertEqual( + post.info, ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] + ) post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + self.assertEqual( + post.info, ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] + ) # '__setitem__(index, value)' # aka 'list[index]=value' # aka 'setitem(list, value)' reset_post() - post.info[4] = 'a' - self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + post.info[4] = "a" + self.assertEqual(post.info, ["0", "1", "2", "3", "a", "5"]) post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + self.assertEqual(post.info, ["0", "1", "2", "3", "a", "5"]) # __setitem__(index, value) with a negative index reset_post() - post.info[-2] = 'a' - self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + post.info[-2] = "a" + self.assertEqual(post.info, ["0", "1", "2", "3", "a", "5"]) post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + self.assertEqual(post.info, ["0", "1", "2", "3", "a", "5"]) # '__setitem__(slice(i, j), listB)' # aka 'listA[i:j] = listB' # aka 'setitem(listA, slice(i, j), listB)' reset_post() - post.info[1:3] = ['h', 'e', 'l', 'l', 'o'] - self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + post.info[1:3] = ["h", "e", "l", "l", "o"] + self.assertEqual(post.info, ["0", "h", "e", "l", "l", "o", "3", "4", "5"]) post.save() post.reload() - self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + self.assertEqual(post.info, ["0", "h", "e", "l", "l", "o", "3", "4", "5"]) # '__setitem__(slice(i, j), listB)' with negative i and j reset_post() - post.info[-5:-3] = ['h', 'e', 'l', 'l', 'o'] - self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + post.info[-5:-3] = ["h", "e", "l", "l", "o"] + self.assertEqual(post.info, ["0", "h", "e", "l", "l", "o", "3", "4", "5"]) post.save() post.reload() - self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + self.assertEqual(post.info, ["0", "h", "e", "l", "l", "o", "3", "4", "5"]) # negative # 'append' reset_post() - post.info.append('h') - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h']) + post.info.append("h") + self.assertEqual(post.info, ["0", "1", "2", "3", "4", "5", "h"]) post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h']) + self.assertEqual(post.info, ["0", "1", "2", "3", "4", "5", "h"]) # 'extend' reset_post() - post.info.extend(['h', 'e', 'l', 'l', 'o']) - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h', 'e', 'l', 'l', 'o']) + post.info.extend(["h", "e", "l", "l", "o"]) + self.assertEqual( + post.info, ["0", "1", "2", "3", "4", "5", "h", "e", "l", "l", "o"] + ) post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h', 'e', 'l', 'l', 'o']) + self.assertEqual( + post.info, ["0", "1", "2", "3", "4", "5", "h", "e", "l", "l", "o"] + ) # 'insert' # 'pop' reset_post() x = post.info.pop(2) y = post.info.pop() - self.assertEqual(post.info, ['0', '1', '3', '4']) - self.assertEqual(x, '2') - self.assertEqual(y, '5') + self.assertEqual(post.info, ["0", "1", "3", "4"]) + self.assertEqual(x, "2") + self.assertEqual(y, "5") post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '3', '4']) + self.assertEqual(post.info, ["0", "1", "3", "4"]) # 'remove' reset_post() - post.info.remove('2') - self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + post.info.remove("2") + self.assertEqual(post.info, ["0", "1", "3", "4", "5"]) post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + self.assertEqual(post.info, ["0", "1", "3", "4", "5"]) # 'reverse' reset_post() post.info.reverse() - self.assertEqual(post.info, ['5', '4', '3', '2', '1', '0']) + self.assertEqual(post.info, ["5", "4", "3", "2", "1", "0"]) post.save() post.reload() - self.assertEqual(post.info, ['5', '4', '3', '2', '1', '0']) + self.assertEqual(post.info, ["5", "4", "3", "2", "1", "0"]) # 'sort': though this operator method does manipulate the list, it is # tested in the 'test_list_field_lexicograpic_operators' function @@ -775,7 +841,7 @@ class FieldTest(MongoDBTestCase): post = BlogPost() post.ref = "1234" - post.info = ['0', '1', '2', '3', '4', '5'] + post.info = ["0", "1", "2", "3", "4", "5"] # '__hash__' # aka 'hash(list)' @@ -785,6 +851,7 @@ class FieldTest(MongoDBTestCase): """Ensure that ListField works with standard list operators that do lexigraphic ordering. """ + class BlogPost(Document): ref = StringField() text_info = ListField(StringField()) @@ -810,7 +877,7 @@ class FieldTest(MongoDBTestCase): blogLargeB.oid_info = [ "54495ad94c934721ede76f90", "54495ad94c934721ede76d23", - "54495ad94c934721ede76d00" + "54495ad94c934721ede76d00", ] blogLargeB.bool_info = [False, True] blogLargeB.save() @@ -852,7 +919,7 @@ class FieldTest(MongoDBTestCase): sorted_target_list = [ ObjectId("54495ad94c934721ede76d00"), ObjectId("54495ad94c934721ede76d23"), - ObjectId("54495ad94c934721ede76f90") + ObjectId("54495ad94c934721ede76f90"), ] self.assertEqual(blogLargeB.text_info, ["a", "j", "z"]) self.assertEqual(blogLargeB.oid_info, sorted_target_list) @@ -865,13 +932,14 @@ class FieldTest(MongoDBTestCase): def test_list_assignment(self): """Ensure that list field element assignment and slicing work.""" + class BlogPost(Document): info = ListField() BlogPost.drop_collection() post = BlogPost() - post.info = ['e1', 'e2', 3, '4', 5] + post.info = ["e1", "e2", 3, "4", 5] post.save() post.info[0] = 1 @@ -879,35 +947,35 @@ class FieldTest(MongoDBTestCase): post.reload() self.assertEqual(post.info[0], 1) - post.info[1:3] = ['n2', 'n3'] + post.info[1:3] = ["n2", "n3"] post.save() post.reload() - self.assertEqual(post.info, [1, 'n2', 'n3', '4', 5]) + self.assertEqual(post.info, [1, "n2", "n3", "4", 5]) - post.info[-1] = 'n5' + post.info[-1] = "n5" post.save() post.reload() - self.assertEqual(post.info, [1, 'n2', 'n3', '4', 'n5']) + self.assertEqual(post.info, [1, "n2", "n3", "4", "n5"]) post.info[-2] = 4 post.save() post.reload() - self.assertEqual(post.info, [1, 'n2', 'n3', 4, 'n5']) + self.assertEqual(post.info, [1, "n2", "n3", 4, "n5"]) post.info[1:-1] = [2] post.save() post.reload() - self.assertEqual(post.info, [1, 2, 'n5']) + self.assertEqual(post.info, [1, 2, "n5"]) - post.info[:-1] = [1, 'n2', 'n3', 4] + post.info[:-1] = [1, "n2", "n3", 4] post.save() post.reload() - self.assertEqual(post.info, [1, 'n2', 'n3', 4, 'n5']) + self.assertEqual(post.info, [1, "n2", "n3", 4, "n5"]) post.info[-4:3] = [2, 3] post.save() post.reload() - self.assertEqual(post.info, [1, 2, 3, 4, 'n5']) + self.assertEqual(post.info, [1, 2, 3, 4, "n5"]) def test_list_field_passed_in_value(self): class Foo(Document): @@ -921,12 +989,13 @@ class FieldTest(MongoDBTestCase): foo = Foo(bars=[]) foo.bars.append(bar) - self.assertEqual(repr(foo.bars), '[]') + self.assertEqual(repr(foo.bars), "[]") def test_list_field_strict(self): """Ensure that list field handles validation if provided a strict field type. """ + class Simple(Document): mapping = ListField(field=IntField()) @@ -943,17 +1012,19 @@ class FieldTest(MongoDBTestCase): def test_list_field_rejects_strings(self): """Strings aren't valid list field data types.""" + class Simple(Document): mapping = ListField() Simple.drop_collection() e = Simple() - e.mapping = 'hello world' + e.mapping = "hello world" self.assertRaises(ValidationError, e.save) def test_complex_field_required(self): """Ensure required cant be None / Empty.""" + class Simple(Document): mapping = ListField(required=True) @@ -975,6 +1046,7 @@ class FieldTest(MongoDBTestCase): """If a complex field is set to the same value, it should not be marked as changed. """ + class Simple(Document): mapping = ListField() @@ -999,7 +1071,7 @@ class FieldTest(MongoDBTestCase): simple = Simple(widgets=[1, 2, 3, 4]).save() simple.widgets[:3] = [] - self.assertEqual(['widgets'], simple._changed_fields) + self.assertEqual(["widgets"], simple._changed_fields) simple.save() simple = simple.reload() @@ -1011,7 +1083,7 @@ class FieldTest(MongoDBTestCase): simple = Simple(widgets=[1, 2, 3, 4]).save() del simple.widgets[:3] - self.assertEqual(['widgets'], simple._changed_fields) + self.assertEqual(["widgets"], simple._changed_fields) simple.save() simple = simple.reload() @@ -1023,7 +1095,7 @@ class FieldTest(MongoDBTestCase): simple = Simple(widgets=[1, 2, 3, 4]).save() simple.widgets[-1] = 5 - self.assertEqual(['widgets.3'], simple._changed_fields) + self.assertEqual(["widgets.3"], simple._changed_fields) simple.save() simple = simple.reload() @@ -1031,8 +1103,9 @@ class FieldTest(MongoDBTestCase): def test_list_field_complex(self): """Ensure that the list fields can handle the complex types.""" + class SettingBase(EmbeddedDocument): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class StringSetting(SettingBase): value = StringField() @@ -1046,12 +1119,17 @@ class FieldTest(MongoDBTestCase): Simple.drop_collection() e = Simple() - e.mapping.append(StringSetting(value='foo')) + e.mapping.append(StringSetting(value="foo")) e.mapping.append(IntegerSetting(value=42)) - e.mapping.append({'number': 1, 'string': 'Hi!', 'float': 1.001, - 'complex': IntegerSetting(value=42), - 'list': [IntegerSetting(value=42), - StringSetting(value='foo')]}) + e.mapping.append( + { + "number": 1, + "string": "Hi!", + "float": 1.001, + "complex": IntegerSetting(value=42), + "list": [IntegerSetting(value=42), StringSetting(value="foo")], + } + ) e.save() e2 = Simple.objects.get(id=e.id) @@ -1059,35 +1137,36 @@ class FieldTest(MongoDBTestCase): self.assertIsInstance(e2.mapping[1], IntegerSetting) # Test querying + self.assertEqual(Simple.objects.filter(mapping__1__value=42).count(), 1) + self.assertEqual(Simple.objects.filter(mapping__2__number=1).count(), 1) self.assertEqual( - Simple.objects.filter(mapping__1__value=42).count(), 1) + Simple.objects.filter(mapping__2__complex__value=42).count(), 1 + ) self.assertEqual( - Simple.objects.filter(mapping__2__number=1).count(), 1) + Simple.objects.filter(mapping__2__list__0__value=42).count(), 1 + ) self.assertEqual( - Simple.objects.filter(mapping__2__complex__value=42).count(), 1) - self.assertEqual( - Simple.objects.filter(mapping__2__list__0__value=42).count(), 1) - self.assertEqual( - Simple.objects.filter(mapping__2__list__1__value='foo').count(), 1) + Simple.objects.filter(mapping__2__list__1__value="foo").count(), 1 + ) # Confirm can update Simple.objects().update(set__mapping__1=IntegerSetting(value=10)) - self.assertEqual( - Simple.objects.filter(mapping__1__value=10).count(), 1) + self.assertEqual(Simple.objects.filter(mapping__1__value=10).count(), 1) - Simple.objects().update( - set__mapping__2__list__1=StringSetting(value='Boo')) + Simple.objects().update(set__mapping__2__list__1=StringSetting(value="Boo")) self.assertEqual( - Simple.objects.filter(mapping__2__list__1__value='foo').count(), 0) + Simple.objects.filter(mapping__2__list__1__value="foo").count(), 0 + ) self.assertEqual( - Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1) + Simple.objects.filter(mapping__2__list__1__value="Boo").count(), 1 + ) def test_embedded_db_field(self): class Embedded(EmbeddedDocument): - number = IntField(default=0, db_field='i') + number = IntField(default=0, db_field="i") class Test(Document): - embedded = EmbeddedDocumentField(Embedded, db_field='x') + embedded = EmbeddedDocumentField(Embedded, db_field="x") Test.drop_collection() @@ -1100,58 +1179,54 @@ class FieldTest(MongoDBTestCase): test = Test.objects.get() self.assertEqual(test.embedded.number, 2) doc = self.db.test.find_one() - self.assertEqual(doc['x']['i'], 2) + self.assertEqual(doc["x"]["i"], 2) def test_double_embedded_db_field(self): """Make sure multiple layers of embedded docs resolve db fields properly and can be initialized using dicts. """ + class C(EmbeddedDocument): txt = StringField() class B(EmbeddedDocument): - c = EmbeddedDocumentField(C, db_field='fc') + c = EmbeddedDocumentField(C, db_field="fc") class A(Document): - b = EmbeddedDocumentField(B, db_field='fb') + b = EmbeddedDocumentField(B, db_field="fb") - a = A( - b=B( - c=C(txt='hi') - ) - ) + a = A(b=B(c=C(txt="hi"))) a.validate() - a = A(b={'c': {'txt': 'hi'}}) + a = A(b={"c": {"txt": "hi"}}) a.validate() def test_double_embedded_db_field_from_son(self): """Make sure multiple layers of embedded docs resolve db fields from SON properly. """ + class C(EmbeddedDocument): txt = StringField() class B(EmbeddedDocument): - c = EmbeddedDocumentField(C, db_field='fc') + c = EmbeddedDocumentField(C, db_field="fc") class A(Document): - b = EmbeddedDocumentField(B, db_field='fb') + b = EmbeddedDocumentField(B, db_field="fb") - a = A._from_son(SON([ - ('fb', SON([ - ('fc', SON([ - ('txt', 'hi') - ])) - ])) - ])) - self.assertEqual(a.b.c.txt, 'hi') + a = A._from_son(SON([("fb", SON([("fc", SON([("txt", "hi")]))]))])) + self.assertEqual(a.b.c.txt, "hi") - def test_embedded_document_field_cant_reference_using_a_str_if_it_does_not_exist_yet(self): - raise SkipTest("Using a string reference in an EmbeddedDocumentField does not work if the class isnt registerd yet") + def test_embedded_document_field_cant_reference_using_a_str_if_it_does_not_exist_yet( + self + ): + raise SkipTest( + "Using a string reference in an EmbeddedDocumentField does not work if the class isnt registerd yet" + ) class MyDoc2(Document): - emb = EmbeddedDocumentField('MyDoc') + emb = EmbeddedDocumentField("MyDoc") class MyDoc(EmbeddedDocument): name = StringField() @@ -1160,6 +1235,7 @@ class FieldTest(MongoDBTestCase): """Ensure that invalid embedded documents cannot be assigned to embedded document fields. """ + class Comment(EmbeddedDocument): content = StringField() @@ -1173,30 +1249,31 @@ class FieldTest(MongoDBTestCase): Person.drop_collection() - person = Person(name='Test User') - person.preferences = 'My Preferences' + person = Person(name="Test User") + person.preferences = "My Preferences" self.assertRaises(ValidationError, person.validate) # Check that only the right embedded doc works - person.preferences = Comment(content='Nice blog post...') + person.preferences = Comment(content="Nice blog post...") self.assertRaises(ValidationError, person.validate) # Check that the embedded doc is valid person.preferences = PersonPreferences() self.assertRaises(ValidationError, person.validate) - person.preferences = PersonPreferences(food='Cheese', number=47) - self.assertEqual(person.preferences.food, 'Cheese') + person.preferences = PersonPreferences(food="Cheese", number=47) + self.assertEqual(person.preferences.food, "Cheese") person.validate() def test_embedded_document_inheritance(self): """Ensure that subclasses of embedded documents may be provided to EmbeddedDocumentFields of the superclass' type. """ + class User(EmbeddedDocument): name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class PowerUser(User): power = IntField() @@ -1207,8 +1284,8 @@ class FieldTest(MongoDBTestCase): BlogPost.drop_collection() - post = BlogPost(content='What I did today...') - post.author = PowerUser(name='Test User', power=47) + post = BlogPost(content="What I did today...") + post.author = PowerUser(name="Test User", power=47) post.save() self.assertEqual(47, BlogPost.objects.first().author.power) @@ -1217,21 +1294,22 @@ class FieldTest(MongoDBTestCase): """Ensure that nested list of subclassed embedded documents is handled correctly. """ + class Group(EmbeddedDocument): name = StringField() content = ListField(StringField()) class Basedoc(Document): groups = ListField(EmbeddedDocumentField(Group)) - meta = {'abstract': True} + meta = {"abstract": True} class User(Basedoc): - doctype = StringField(require=True, default='userdata') + doctype = StringField(require=True, default="userdata") User.drop_collection() - content = ['la', 'le', 'lu'] - group = Group(name='foo', content=content) + content = ["la", "le", "lu"] + group = Group(name="foo", content=content) foobar = User(groups=[group]) foobar.save() @@ -1241,6 +1319,7 @@ class FieldTest(MongoDBTestCase): """Ensure an exception is raised when dereferencing an unknown document. """ + class Foo(Document): pass @@ -1257,20 +1336,21 @@ class FieldTest(MongoDBTestCase): # Reference is no longer valid foo.delete() bar = Bar.objects.get() - self.assertRaises(DoesNotExist, getattr, bar, 'ref') - self.assertRaises(DoesNotExist, getattr, bar, 'generic_ref') + self.assertRaises(DoesNotExist, getattr, bar, "ref") + self.assertRaises(DoesNotExist, getattr, bar, "generic_ref") # When auto_dereference is disabled, there is no trouble returning DBRef bar = Bar.objects.get() expected = foo.to_dbref() - bar._fields['ref']._auto_dereference = False + bar._fields["ref"]._auto_dereference = False self.assertEqual(bar.ref, expected) - bar._fields['generic_ref']._auto_dereference = False - self.assertEqual(bar.generic_ref, {'_ref': expected, '_cls': 'Foo'}) + bar._fields["generic_ref"]._auto_dereference = False + self.assertEqual(bar.generic_ref, {"_ref": expected, "_cls": "Foo"}) def test_list_item_dereference(self): """Ensure that DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -1280,9 +1360,9 @@ class FieldTest(MongoDBTestCase): User.drop_collection() Group.drop_collection() - user1 = User(name='user1') + user1 = User(name="user1") user1.save() - user2 = User(name='user2') + user2 = User(name="user2") user2.save() group = Group(members=[user1, user2]) @@ -1296,24 +1376,25 @@ class FieldTest(MongoDBTestCase): def test_recursive_reference(self): """Ensure that ReferenceFields can reference their own documents. """ + class Employee(Document): name = StringField() - boss = ReferenceField('self') - friends = ListField(ReferenceField('self')) + boss = ReferenceField("self") + friends = ListField(ReferenceField("self")) Employee.drop_collection() - bill = Employee(name='Bill Lumbergh') + bill = Employee(name="Bill Lumbergh") bill.save() - michael = Employee(name='Michael Bolton') + michael = Employee(name="Michael Bolton") michael.save() - samir = Employee(name='Samir Nagheenanajar') + samir = Employee(name="Samir Nagheenanajar") samir.save() friends = [michael, samir] - peter = Employee(name='Peter Gibbons', boss=bill, friends=friends) + peter = Employee(name="Peter Gibbons", boss=bill, friends=friends) peter.save() peter = Employee.objects.with_id(peter.id) @@ -1323,13 +1404,14 @@ class FieldTest(MongoDBTestCase): def test_recursive_embedding(self): """Ensure that EmbeddedDocumentFields can contain their own documents. """ + class TreeNode(EmbeddedDocument): name = StringField() - children = ListField(EmbeddedDocumentField('self')) + children = ListField(EmbeddedDocumentField("self")) class Tree(Document): name = StringField() - children = ListField(EmbeddedDocumentField('TreeNode')) + children = ListField(EmbeddedDocumentField("TreeNode")) Tree.drop_collection() @@ -1356,18 +1438,18 @@ class FieldTest(MongoDBTestCase): self.assertEqual(tree.children[0].children[1].name, third_child.name) # Test updating - tree.children[0].name = 'I am Child 1' - tree.children[0].children[0].name = 'I am Child 2' - tree.children[0].children[1].name = 'I am Child 3' + tree.children[0].name = "I am Child 1" + tree.children[0].children[0].name = "I am Child 2" + tree.children[0].children[1].name = "I am Child 3" tree.save() - self.assertEqual(tree.children[0].name, 'I am Child 1') - self.assertEqual(tree.children[0].children[0].name, 'I am Child 2') - self.assertEqual(tree.children[0].children[1].name, 'I am Child 3') + self.assertEqual(tree.children[0].name, "I am Child 1") + self.assertEqual(tree.children[0].children[0].name, "I am Child 2") + self.assertEqual(tree.children[0].children[1].name, "I am Child 3") # Test removal self.assertEqual(len(tree.children[0].children), 2) - del(tree.children[0].children[1]) + del tree.children[0].children[1] tree.save() self.assertEqual(len(tree.children[0].children), 1) @@ -1388,6 +1470,7 @@ class FieldTest(MongoDBTestCase): """Ensure that an abstract document cannot be dropped given it has no underlying collection. """ + class AbstractDoc(Document): name = StringField() meta = {"abstract": True} @@ -1397,6 +1480,7 @@ class FieldTest(MongoDBTestCase): def test_reference_class_with_abstract_parent(self): """Ensure that a class with an abstract parent can be referenced. """ + class Sibling(Document): name = StringField() meta = {"abstract": True} @@ -1421,6 +1505,7 @@ class FieldTest(MongoDBTestCase): """Ensure that an abstract class instance cannot be used in the reference of that abstract class. """ + class Sibling(Document): name = StringField() meta = {"abstract": True} @@ -1442,6 +1527,7 @@ class FieldTest(MongoDBTestCase): """Ensure that an an abstract reference fails validation when given a Document that does not inherit from the abstract type. """ + class Sibling(Document): name = StringField() meta = {"abstract": True} @@ -1463,9 +1549,10 @@ class FieldTest(MongoDBTestCase): def test_generic_reference(self): """Ensure that a GenericReferenceField properly dereferences items. """ + class Link(Document): title = StringField() - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} class Post(Document): title = StringField() @@ -1502,6 +1589,7 @@ class FieldTest(MongoDBTestCase): def test_generic_reference_list(self): """Ensure that a ListField properly dereferences generic references. """ + class Link(Document): title = StringField() @@ -1533,6 +1621,7 @@ class FieldTest(MongoDBTestCase): """Ensure dereferencing out of the document registry throws a `NotRegistered` error. """ + class Link(Document): title = StringField() @@ -1550,7 +1639,7 @@ class FieldTest(MongoDBTestCase): # Mimic User and Link definitions being in a different file # and the Link model not being imported in the User file. - del(_document_registry["Link"]) + del _document_registry["Link"] user = User.objects.first() try: @@ -1560,7 +1649,6 @@ class FieldTest(MongoDBTestCase): pass def test_generic_reference_is_none(self): - class Person(Document): name = StringField() city = GenericReferenceField() @@ -1568,11 +1656,11 @@ class FieldTest(MongoDBTestCase): Person.drop_collection() Person(name="Wilson Jr").save() - self.assertEqual(repr(Person.objects(city=None)), - "[]") + self.assertEqual(repr(Person.objects(city=None)), "[]") def test_generic_reference_choices(self): """Ensure that a GenericReferenceField can handle choices.""" + class Link(Document): title = StringField() @@ -1604,6 +1692,7 @@ class FieldTest(MongoDBTestCase): def test_generic_reference_string_choices(self): """Ensure that a GenericReferenceField can handle choices as strings """ + class Link(Document): title = StringField() @@ -1611,7 +1700,7 @@ class FieldTest(MongoDBTestCase): title = StringField() class Bookmark(Document): - bookmark_object = GenericReferenceField(choices=('Post', Link)) + bookmark_object = GenericReferenceField(choices=("Post", Link)) Link.drop_collection() Post.drop_collection() @@ -1636,11 +1725,12 @@ class FieldTest(MongoDBTestCase): """Ensure that a GenericReferenceField can handle choices on non-derefenreced (i.e. DBRef) elements """ + class Post(Document): title = StringField() class Bookmark(Document): - bookmark_object = GenericReferenceField(choices=(Post, )) + bookmark_object = GenericReferenceField(choices=(Post,)) other_field = StringField() Post.drop_collection() @@ -1654,13 +1744,14 @@ class FieldTest(MongoDBTestCase): bm = Bookmark.objects.get(id=bm.id) # bookmark_object is now a DBRef - bm.other_field = 'dummy_change' + bm.other_field = "dummy_change" bm.save() def test_generic_reference_list_choices(self): """Ensure that a ListField properly dereferences generic references and respects choices. """ + class Link(Document): title = StringField() @@ -1692,6 +1783,7 @@ class FieldTest(MongoDBTestCase): def test_generic_reference_list_item_modification(self): """Ensure that modifications of related documents (through generic reference) don't influence on querying """ + class Post(Document): title = StringField() @@ -1721,6 +1813,7 @@ class FieldTest(MongoDBTestCase): """Ensure we can search for a specific generic reference by providing its ObjectId. """ + class Doc(Document): ref = GenericReferenceField() @@ -1729,13 +1822,14 @@ class FieldTest(MongoDBTestCase): doc1 = Doc.objects.create() doc2 = Doc.objects.create(ref=doc1) - doc = Doc.objects.get(ref=DBRef('doc', doc1.pk)) + doc = Doc.objects.get(ref=DBRef("doc", doc1.pk)) self.assertEqual(doc, doc2) def test_generic_reference_is_not_tracked_in_parent_doc(self): """Ensure that modifications of related documents (through generic reference) don't influence the owner changed fields (#1934) """ + class Doc1(Document): name = StringField() @@ -1746,14 +1840,14 @@ class FieldTest(MongoDBTestCase): Doc1.drop_collection() Doc2.drop_collection() - doc1 = Doc1(name='garbage1').save() - doc11 = Doc1(name='garbage11').save() + doc1 = Doc1(name="garbage1").save() + doc11 = Doc1(name="garbage11").save() doc2 = Doc2(ref=doc1, refs=[doc11]).save() - doc2.ref.name = 'garbage2' + doc2.ref.name = "garbage2" self.assertEqual(doc2._get_changed_fields(), []) - doc2.refs[0].name = 'garbage3' + doc2.refs[0].name = "garbage3" self.assertEqual(doc2._get_changed_fields(), []) self.assertEqual(doc2._delta(), ({}, {})) @@ -1761,6 +1855,7 @@ class FieldTest(MongoDBTestCase): """Ensure we can search for a specific generic reference by providing its DBRef. """ + class Doc(Document): ref = GenericReferenceField() @@ -1777,17 +1872,19 @@ class FieldTest(MongoDBTestCase): def test_choices_allow_using_sets_as_choices(self): """Ensure that sets can be used when setting choices """ - class Shirt(Document): - size = StringField(choices={'M', 'L'}) - Shirt(size='M').validate() + class Shirt(Document): + size = StringField(choices={"M", "L"}) + + Shirt(size="M").validate() def test_choices_validation_allow_no_value(self): """Ensure that .validate passes and no value was provided for a field setup with choices """ + class Shirt(Document): - size = StringField(choices=('S', 'M')) + size = StringField(choices=("S", "M")) shirt = Shirt() shirt.validate() @@ -1795,17 +1892,19 @@ class FieldTest(MongoDBTestCase): def test_choices_validation_accept_possible_value(self): """Ensure that value is in a container of allowed values. """ - class Shirt(Document): - size = StringField(choices=('S', 'M')) - shirt = Shirt(size='S') + class Shirt(Document): + size = StringField(choices=("S", "M")) + + shirt = Shirt(size="S") shirt.validate() def test_choices_validation_reject_unknown_value(self): """Ensure that unallowed value are rejected upon validation """ + class Shirt(Document): - size = StringField(choices=('S', 'M')) + size = StringField(choices=("S", "M")) shirt = Shirt(size="XS") with self.assertRaises(ValidationError): @@ -1815,12 +1914,23 @@ class FieldTest(MongoDBTestCase): """Test dynamic helper for returning the display value of a choices field. """ + class Shirt(Document): - size = StringField(max_length=3, choices=( - ('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), - ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) - style = StringField(max_length=3, choices=( - ('S', 'Small'), ('B', 'Baggy'), ('W', 'Wide')), default='W') + size = StringField( + max_length=3, + choices=( + ("S", "Small"), + ("M", "Medium"), + ("L", "Large"), + ("XL", "Extra Large"), + ("XXL", "Extra Extra Large"), + ), + ) + style = StringField( + max_length=3, + choices=(("S", "Small"), ("B", "Baggy"), ("W", "Wide")), + default="W", + ) Shirt.drop_collection() @@ -1829,30 +1939,30 @@ class FieldTest(MongoDBTestCase): # Make sure get__display returns the default value (or None) self.assertEqual(shirt1.get_size_display(), None) - self.assertEqual(shirt1.get_style_display(), 'Wide') + self.assertEqual(shirt1.get_style_display(), "Wide") - shirt1.size = 'XXL' - shirt1.style = 'B' - shirt2.size = 'M' - shirt2.style = 'S' - self.assertEqual(shirt1.get_size_display(), 'Extra Extra Large') - self.assertEqual(shirt1.get_style_display(), 'Baggy') - self.assertEqual(shirt2.get_size_display(), 'Medium') - self.assertEqual(shirt2.get_style_display(), 'Small') + shirt1.size = "XXL" + shirt1.style = "B" + shirt2.size = "M" + shirt2.style = "S" + self.assertEqual(shirt1.get_size_display(), "Extra Extra Large") + self.assertEqual(shirt1.get_style_display(), "Baggy") + self.assertEqual(shirt2.get_size_display(), "Medium") + self.assertEqual(shirt2.get_style_display(), "Small") # Set as Z - an invalid choice - shirt1.size = 'Z' - shirt1.style = 'Z' - self.assertEqual(shirt1.get_size_display(), 'Z') - self.assertEqual(shirt1.get_style_display(), 'Z') + shirt1.size = "Z" + shirt1.style = "Z" + self.assertEqual(shirt1.get_size_display(), "Z") + self.assertEqual(shirt1.get_style_display(), "Z") self.assertRaises(ValidationError, shirt1.validate) def test_simple_choices_validation(self): """Ensure that value is in a container of allowed values. """ + class Shirt(Document): - size = StringField(max_length=3, - choices=('S', 'M', 'L', 'XL', 'XXL')) + size = StringField(max_length=3, choices=("S", "M", "L", "XL", "XXL")) Shirt.drop_collection() @@ -1869,37 +1979,37 @@ class FieldTest(MongoDBTestCase): """Test dynamic helper for returning the display value of a choices field. """ + class Shirt(Document): - size = StringField(max_length=3, - choices=('S', 'M', 'L', 'XL', 'XXL')) - style = StringField(max_length=3, - choices=('Small', 'Baggy', 'wide'), - default='Small') + size = StringField(max_length=3, choices=("S", "M", "L", "XL", "XXL")) + style = StringField( + max_length=3, choices=("Small", "Baggy", "wide"), default="Small" + ) Shirt.drop_collection() shirt = Shirt() self.assertEqual(shirt.get_size_display(), None) - self.assertEqual(shirt.get_style_display(), 'Small') + self.assertEqual(shirt.get_style_display(), "Small") shirt.size = "XXL" shirt.style = "Baggy" - self.assertEqual(shirt.get_size_display(), 'XXL') - self.assertEqual(shirt.get_style_display(), 'Baggy') + self.assertEqual(shirt.get_size_display(), "XXL") + self.assertEqual(shirt.get_style_display(), "Baggy") # Set as Z - an invalid choice shirt.size = "Z" shirt.style = "Z" - self.assertEqual(shirt.get_size_display(), 'Z') - self.assertEqual(shirt.get_style_display(), 'Z') + self.assertEqual(shirt.get_size_display(), "Z") + self.assertEqual(shirt.get_style_display(), "Z") self.assertRaises(ValidationError, shirt.validate) def test_simple_choices_validation_invalid_value(self): """Ensure that error messages are correct. """ - SIZES = ('S', 'M', 'L', 'XL', 'XXL') - COLORS = (('R', 'Red'), ('B', 'Blue')) + SIZES = ("S", "M", "L", "XL", "XXL") + COLORS = (("R", "Red"), ("B", "Blue")) SIZE_MESSAGE = u"Value must be one of ('S', 'M', 'L', 'XL', 'XXL')" COLOR_MESSAGE = u"Value must be one of ['R', 'B']" @@ -1924,11 +2034,12 @@ class FieldTest(MongoDBTestCase): except ValidationError as error: # get the validation rules error_dict = error.to_dict() - self.assertEqual(error_dict['size'], SIZE_MESSAGE) - self.assertEqual(error_dict['color'], COLOR_MESSAGE) + self.assertEqual(error_dict["size"], SIZE_MESSAGE) + self.assertEqual(error_dict["color"], COLOR_MESSAGE) def test_recursive_validation(self): """Ensure that a validation result to_dict is available.""" + class Author(EmbeddedDocument): name = StringField(required=True) @@ -1940,9 +2051,9 @@ class FieldTest(MongoDBTestCase): title = StringField(required=True) comments = ListField(EmbeddedDocumentField(Comment)) - bob = Author(name='Bob') - post = Post(title='hello world') - post.comments.append(Comment(content='hello', author=bob)) + bob = Author(name="Bob") + post = Post(title="hello world") + post.comments.append(Comment(content="hello", author=bob)) post.comments.append(Comment(author=bob)) self.assertRaises(ValidationError, post.validate) @@ -1950,30 +2061,31 @@ class FieldTest(MongoDBTestCase): post.validate() except ValidationError as error: # ValidationError.errors property - self.assertTrue(hasattr(error, 'errors')) + self.assertTrue(hasattr(error, "errors")) self.assertIsInstance(error.errors, dict) - self.assertIn('comments', error.errors) - self.assertIn(1, error.errors['comments']) - self.assertIsInstance(error.errors['comments'][1]['content'], ValidationError) + self.assertIn("comments", error.errors) + self.assertIn(1, error.errors["comments"]) + self.assertIsInstance( + error.errors["comments"][1]["content"], ValidationError + ) # ValidationError.schema property error_dict = error.to_dict() self.assertIsInstance(error_dict, dict) - self.assertIn('comments', error_dict) - self.assertIn(1, error_dict['comments']) - self.assertIn('content', error_dict['comments'][1]) - self.assertEqual(error_dict['comments'][1]['content'], - u'Field is required') + self.assertIn("comments", error_dict) + self.assertIn(1, error_dict["comments"]) + self.assertIn("content", error_dict["comments"][1]) + self.assertEqual(error_dict["comments"][1]["content"], u"Field is required") - post.comments[1].content = 'here we go' + post.comments[1].content = "here we go" post.validate() def test_tuples_as_tuples(self): """Ensure that tuples remain tuples when they are inside a ComplexBaseField. """ - class EnumField(BaseField): + class EnumField(BaseField): def __init__(self, **kwargs): super(EnumField, self).__init__(**kwargs) @@ -1988,7 +2100,7 @@ class FieldTest(MongoDBTestCase): TestDoc.drop_collection() - tuples = [(100, 'Testing')] + tuples = [(100, "Testing")] doc = TestDoc() doc.items = tuples doc.save() @@ -2000,12 +2112,12 @@ class FieldTest(MongoDBTestCase): def test_dynamic_fields_class(self): class Doc2(Document): - field_1 = StringField(db_field='f') + field_1 = StringField(db_field="f") class Doc(Document): my_id = IntField(primary_key=True) - embed_me = DynamicField(db_field='e') - field_x = StringField(db_field='x') + embed_me = DynamicField(db_field="e") + field_x = StringField(db_field="x") Doc.drop_collection() Doc2.drop_collection() @@ -2022,12 +2134,12 @@ class FieldTest(MongoDBTestCase): def test_dynamic_fields_embedded_class(self): class Embed(EmbeddedDocument): - field_1 = StringField(db_field='f') + field_1 = StringField(db_field="f") class Doc(Document): my_id = IntField(primary_key=True) - embed_me = DynamicField(db_field='e') - field_x = StringField(db_field='x') + embed_me = DynamicField(db_field="e") + field_x = StringField(db_field="x") Doc.drop_collection() @@ -2038,6 +2150,7 @@ class FieldTest(MongoDBTestCase): def test_dynamicfield_dump_document(self): """Ensure a DynamicField can handle another document's dump.""" + class Doc(Document): field = DynamicField() @@ -2049,7 +2162,7 @@ class FieldTest(MongoDBTestCase): id = IntField(primary_key=True, default=1) recursive = DynamicField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class ToEmbedChild(ToEmbedParent): pass @@ -2070,7 +2183,7 @@ class FieldTest(MongoDBTestCase): def test_cls_field(self): class Animal(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class Fish(Animal): pass @@ -2088,7 +2201,9 @@ class FieldTest(MongoDBTestCase): Dog().save() Fish().save() Human().save() - self.assertEqual(Animal.objects(_cls__in=["Animal.Mammal.Dog", "Animal.Fish"]).count(), 2) + self.assertEqual( + Animal.objects(_cls__in=["Animal.Mammal.Dog", "Animal.Fish"]).count(), 2 + ) self.assertEqual(Animal.objects(_cls__in=["Animal.Fish.Guppy"]).count(), 0) def test_sparse_field(self): @@ -2104,32 +2219,34 @@ class FieldTest(MongoDBTestCase): trying to instantiate a document with a field that's not defined. """ + class Doc(Document): foo = StringField() with self.assertRaises(FieldDoesNotExist): - Doc(bar='test') + Doc(bar="test") def test_undefined_field_exception_with_strict(self): """Tests if a `FieldDoesNotExist` exception is raised when trying to instantiate a document with a field that's not defined, even when strict is set to False. """ + class Doc(Document): foo = StringField() - meta = {'strict': False} + meta = {"strict": False} with self.assertRaises(FieldDoesNotExist): - Doc(bar='test') + Doc(bar="test") class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): - def setUp(self): """ Create two BlogPost entries in the database, each with several EmbeddedDocuments. """ + class Comments(EmbeddedDocument): author = StringField() message = StringField() @@ -2142,20 +2259,24 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): self.Comments = Comments self.BlogPost = BlogPost - self.post1 = self.BlogPost(comments=[ - self.Comments(author='user1', message='message1'), - self.Comments(author='user2', message='message1') - ]).save() + self.post1 = self.BlogPost( + comments=[ + self.Comments(author="user1", message="message1"), + self.Comments(author="user2", message="message1"), + ] + ).save() - self.post2 = self.BlogPost(comments=[ - self.Comments(author='user2', message='message2'), - self.Comments(author='user2', message='message3'), - self.Comments(author='user3', message='message1') - ]).save() + self.post2 = self.BlogPost( + comments=[ + self.Comments(author="user2", message="message2"), + self.Comments(author="user2", message="message3"), + self.Comments(author="user3", message="message1"), + ] + ).save() def test_fails_upon_validate_if_provide_a_doc_instead_of_a_list_of_doc(self): # Relates to Issue #1464 - comment = self.Comments(author='John') + comment = self.Comments(author="John") class Title(Document): content = StringField() @@ -2166,14 +2287,18 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): with self.assertRaises(ValidationError) as ctx_err: post.validate() self.assertIn("'comments'", str(ctx_err.exception)) - self.assertIn('Only lists and tuples may be used in a list field', str(ctx_err.exception)) + self.assertIn( + "Only lists and tuples may be used in a list field", str(ctx_err.exception) + ) # Test with a Document - post = self.BlogPost(comments=Title(content='garbage')) + post = self.BlogPost(comments=Title(content="garbage")) with self.assertRaises(ValidationError) as e: post.validate() self.assertIn("'comments'", str(ctx_err.exception)) - self.assertIn('Only lists and tuples may be used in a list field', str(ctx_err.exception)) + self.assertIn( + "Only lists and tuples may be used in a list field", str(ctx_err.exception) + ) def test_no_keyword_filter(self): """ @@ -2190,44 +2315,40 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): Tests the filter method of a List of Embedded Documents with a single keyword. """ - filtered = self.post1.comments.filter(author='user1') + filtered = self.post1.comments.filter(author="user1") # Ensure only 1 entry was returned. self.assertEqual(len(filtered), 1) # Ensure the entry returned is the correct entry. - self.assertEqual(filtered[0].author, 'user1') + self.assertEqual(filtered[0].author, "user1") def test_multi_keyword_filter(self): """ Tests the filter method of a List of Embedded Documents with multiple keywords. """ - filtered = self.post2.comments.filter( - author='user2', message='message2' - ) + filtered = self.post2.comments.filter(author="user2", message="message2") # Ensure only 1 entry was returned. self.assertEqual(len(filtered), 1) # Ensure the entry returned is the correct entry. - self.assertEqual(filtered[0].author, 'user2') - self.assertEqual(filtered[0].message, 'message2') + self.assertEqual(filtered[0].author, "user2") + self.assertEqual(filtered[0].message, "message2") def test_chained_filter(self): """ Tests chained filter methods of a List of Embedded Documents """ - filtered = self.post2.comments.filter(author='user2').filter( - message='message2' - ) + filtered = self.post2.comments.filter(author="user2").filter(message="message2") # Ensure only 1 entry was returned. self.assertEqual(len(filtered), 1) # Ensure the entry returned is the correct entry. - self.assertEqual(filtered[0].author, 'user2') - self.assertEqual(filtered[0].message, 'message2') + self.assertEqual(filtered[0].author, "user2") + self.assertEqual(filtered[0].message, "message2") def test_unknown_keyword_filter(self): """ @@ -2252,36 +2373,34 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): Tests the exclude method of a List of Embedded Documents with a single keyword. """ - excluded = self.post1.comments.exclude(author='user1') + excluded = self.post1.comments.exclude(author="user1") # Ensure only 1 entry was returned. self.assertEqual(len(excluded), 1) # Ensure the entry returned is the correct entry. - self.assertEqual(excluded[0].author, 'user2') + self.assertEqual(excluded[0].author, "user2") def test_multi_keyword_exclude(self): """ Tests the exclude method of a List of Embedded Documents with multiple keywords. """ - excluded = self.post2.comments.exclude( - author='user3', message='message1' - ) + excluded = self.post2.comments.exclude(author="user3", message="message1") # Ensure only 2 entries were returned. self.assertEqual(len(excluded), 2) # Ensure the entries returned are the correct entries. - self.assertEqual(excluded[0].author, 'user2') - self.assertEqual(excluded[1].author, 'user2') + self.assertEqual(excluded[0].author, "user2") + self.assertEqual(excluded[1].author, "user2") def test_non_matching_exclude(self): """ Tests the exclude method of a List of Embedded Documents when the keyword does not match any entries. """ - excluded = self.post2.comments.exclude(author='user4') + excluded = self.post2.comments.exclude(author="user4") # Ensure the 3 entries still exist. self.assertEqual(len(excluded), 3) @@ -2299,16 +2418,16 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): Tests the exclude method after a filter method of a List of Embedded Documents. """ - excluded = self.post2.comments.filter(author='user2').exclude( - message='message2' + excluded = self.post2.comments.filter(author="user2").exclude( + message="message2" ) # Ensure only 1 entry was returned. self.assertEqual(len(excluded), 1) # Ensure the entry returned is the correct entry. - self.assertEqual(excluded[0].author, 'user2') - self.assertEqual(excluded[0].message, 'message3') + self.assertEqual(excluded[0].author, "user2") + self.assertEqual(excluded[0].message, "message3") def test_count(self): """ @@ -2321,7 +2440,7 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): """ Tests the filter + count method of a List of Embedded Documents. """ - count = self.post1.comments.filter(author='user1').count() + count = self.post1.comments.filter(author="user1").count() self.assertEqual(count, 1) def test_single_keyword_get(self): @@ -2329,19 +2448,19 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): Tests the get method of a List of Embedded Documents using a single keyword. """ - comment = self.post1.comments.get(author='user1') + comment = self.post1.comments.get(author="user1") self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment.author, 'user1') + self.assertEqual(comment.author, "user1") def test_multi_keyword_get(self): """ Tests the get method of a List of Embedded Documents using multiple keywords. """ - comment = self.post2.comments.get(author='user2', message='message2') + comment = self.post2.comments.get(author="user2", message="message2") self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment.author, 'user2') - self.assertEqual(comment.message, 'message2') + self.assertEqual(comment.author, "user2") + self.assertEqual(comment.message, "message2") def test_no_keyword_multiple_return_get(self): """ @@ -2357,7 +2476,7 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): to return multiple documents. """ with self.assertRaises(MultipleObjectsReturned): - self.post2.comments.get(author='user2') + self.post2.comments.get(author="user2") def test_unknown_keyword_get(self): """ @@ -2373,7 +2492,7 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): returns no results. """ with self.assertRaises(DoesNotExist): - self.post1.comments.get(author='user3') + self.post1.comments.get(author="user3") def test_first(self): """ @@ -2390,20 +2509,17 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): """ Test the create method of a List of Embedded Documents. """ - comment = self.post1.comments.create( - author='user4', message='message1' - ) + comment = self.post1.comments.create(author="user4", message="message1") self.post1.save() # Ensure the returned value is the comment object. self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment.author, 'user4') - self.assertEqual(comment.message, 'message1') + self.assertEqual(comment.author, "user4") + self.assertEqual(comment.message, "message1") # Ensure the new comment was actually saved to the database. self.assertIn( - comment, - self.BlogPost.objects(comments__author='user4')[0].comments + comment, self.BlogPost.objects(comments__author="user4")[0].comments ) def test_filtered_create(self): @@ -2412,20 +2528,19 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): to a call to the filter method. Filtering should have no effect on creation. """ - comment = self.post1.comments.filter(author='user1').create( - author='user4', message='message1' + comment = self.post1.comments.filter(author="user1").create( + author="user4", message="message1" ) self.post1.save() # Ensure the returned value is the comment object. self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment.author, 'user4') - self.assertEqual(comment.message, 'message1') + self.assertEqual(comment.author, "user4") + self.assertEqual(comment.message, "message1") # Ensure the new comment was actually saved to the database. self.assertIn( - comment, - self.BlogPost.objects(comments__author='user4')[0].comments + comment, self.BlogPost.objects(comments__author="user4")[0].comments ) def test_no_keyword_update(self): @@ -2438,15 +2553,9 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): self.post1.save() # Ensure that nothing was altered. - self.assertIn( - original[0], - self.BlogPost.objects(id=self.post1.id)[0].comments - ) + self.assertIn(original[0], self.BlogPost.objects(id=self.post1.id)[0].comments) - self.assertIn( - original[1], - self.BlogPost.objects(id=self.post1.id)[0].comments - ) + self.assertIn(original[1], self.BlogPost.objects(id=self.post1.id)[0].comments) # Ensure the method returned 0 as the number of entries # modified @@ -2457,14 +2566,14 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): Tests the update method of a List of Embedded Documents with a single keyword. """ - number = self.post1.comments.update(author='user4') + number = self.post1.comments.update(author="user4") self.post1.save() comments = self.BlogPost.objects(id=self.post1.id)[0].comments # Ensure that the database was updated properly. - self.assertEqual(comments[0].author, 'user4') - self.assertEqual(comments[1].author, 'user4') + self.assertEqual(comments[0].author, "user4") + self.assertEqual(comments[1].author, "user4") # Ensure the method returned 2 as the number of entries # modified @@ -2474,27 +2583,25 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): """ Tests that unicode strings handled correctly """ - post = self.BlogPost(comments=[ - self.Comments(author='user1', message=u'сообщение'), - self.Comments(author='user2', message=u'хабарлама') - ]).save() - self.assertEqual(post.comments.get(message=u'сообщение').author, - 'user1') + post = self.BlogPost( + comments=[ + self.Comments(author="user1", message=u"сообщение"), + self.Comments(author="user2", message=u"хабарлама"), + ] + ).save() + self.assertEqual(post.comments.get(message=u"сообщение").author, "user1") def test_save(self): """ Tests the save method of a List of Embedded Documents. """ comments = self.post1.comments - new_comment = self.Comments(author='user4') + new_comment = self.Comments(author="user4") comments.append(new_comment) comments.save() # Ensure that the new comment has been added to the database. - self.assertIn( - new_comment, - self.BlogPost.objects(id=self.post1.id)[0].comments - ) + self.assertIn(new_comment, self.BlogPost.objects(id=self.post1.id)[0].comments) def test_delete(self): """ @@ -2505,9 +2612,7 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): # Ensure that all the comments under post1 were deleted in the # database. - self.assertListEqual( - self.BlogPost.objects(id=self.post1.id)[0].comments, [] - ) + self.assertListEqual(self.BlogPost.objects(id=self.post1.id)[0].comments, []) # Ensure that post1 comments were deleted from the list. self.assertListEqual(self.post1.comments, []) @@ -2525,6 +2630,7 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): that have a unique field can be saved, but if the unique field is also sparse than multiple documents with an empty list can be saved. """ + class EmbeddedWithUnique(EmbeddedDocument): number = IntField(unique=True) @@ -2553,16 +2659,12 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): after the filter method has been called. """ comment = self.post1.comments[1] - number = self.post1.comments.filter(author='user2').delete() + number = self.post1.comments.filter(author="user2").delete() self.post1.save() # Ensure that only the user2 comment was deleted. - self.assertNotIn( - comment, self.BlogPost.objects(id=self.post1.id)[0].comments - ) - self.assertEqual( - len(self.BlogPost.objects(id=self.post1.id)[0].comments), 1 - ) + self.assertNotIn(comment, self.BlogPost.objects(id=self.post1.id)[0].comments) + self.assertEqual(len(self.BlogPost.objects(id=self.post1.id)[0].comments), 1) # Ensure that the user2 comment no longer exists in the list. self.assertNotIn(comment, self.post1.comments) @@ -2577,7 +2679,7 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): Tests that custom data is saved in the field object and doesn't interfere with the rest of field functionalities. """ - custom_data = {'a': 'a_value', 'b': [1, 2]} + custom_data = {"a": "a_value", "b": [1, 2]} class CustomData(Document): a_field = IntField() @@ -2587,10 +2689,10 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): a1 = CustomData(a_field=1, c_field=2).save() self.assertEqual(2, a1.c_field) - self.assertFalse(hasattr(a1.c_field, 'custom_data')) - self.assertTrue(hasattr(CustomData.c_field, 'custom_data')) - self.assertEqual(custom_data['a'], CustomData.c_field.custom_data['a']) + self.assertFalse(hasattr(a1.c_field, "custom_data")) + self.assertTrue(hasattr(CustomData.c_field, "custom_data")) + self.assertEqual(custom_data["a"], CustomData.c_field.custom_data["a"]) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index a7722458..dd2fe609 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -14,36 +14,37 @@ from mongoengine.python_support import StringIO try: from PIL import Image + HAS_PIL = True except ImportError: HAS_PIL = False from tests.utils import MongoDBTestCase -TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') -TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') +TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "mongoengine.png") +TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png") def get_file(path): """Use a BytesIO instead of a file to allow to have a one-liner and avoid that the file remains opened""" bytes_io = StringIO() - with open(path, 'rb') as f: + with open(path, "rb") as f: bytes_io.write(f.read()) bytes_io.seek(0) return bytes_io class FileTest(MongoDBTestCase): - def tearDown(self): - self.db.drop_collection('fs.files') - self.db.drop_collection('fs.chunks') + self.db.drop_collection("fs.files") + self.db.drop_collection("fs.chunks") def test_file_field_optional(self): # Make sure FileField is optional and not required class DemoFile(Document): the_file = FileField() + DemoFile.objects.create() def test_file_fields(self): @@ -55,8 +56,8 @@ class FileTest(MongoDBTestCase): PutFile.drop_collection() - text = six.b('Hello, World!') - content_type = 'text/plain' + text = six.b("Hello, World!") + content_type = "text/plain" putfile = PutFile() putfile.the_file.put(text, content_type=content_type, filename="hello") @@ -64,7 +65,10 @@ class FileTest(MongoDBTestCase): result = PutFile.objects.first() self.assertEqual(putfile, result) - self.assertEqual("%s" % result.the_file, "" % result.the_file.grid_id) + self.assertEqual( + "%s" % result.the_file, + "" % result.the_file.grid_id, + ) self.assertEqual(result.the_file.read(), text) self.assertEqual(result.the_file.content_type, content_type) result.the_file.delete() # Remove file from GridFS @@ -89,14 +93,15 @@ class FileTest(MongoDBTestCase): def test_file_fields_stream(self): """Ensure that file fields can be written to and their data retrieved """ + class StreamFile(Document): the_file = FileField() StreamFile.drop_collection() - text = six.b('Hello, World!') - more_text = six.b('Foo Bar') - content_type = 'text/plain' + text = six.b("Hello, World!") + more_text = six.b("Foo Bar") + content_type = "text/plain" streamfile = StreamFile() streamfile.the_file.new_file(content_type=content_type) @@ -124,14 +129,15 @@ class FileTest(MongoDBTestCase): """Ensure that a file field can be written to after it has been saved as None """ + class StreamFile(Document): the_file = FileField() StreamFile.drop_collection() - text = six.b('Hello, World!') - more_text = six.b('Foo Bar') - content_type = 'text/plain' + text = six.b("Hello, World!") + more_text = six.b("Foo Bar") + content_type = "text/plain" streamfile = StreamFile() streamfile.save() @@ -157,12 +163,11 @@ class FileTest(MongoDBTestCase): self.assertTrue(result.the_file.read() is None) def test_file_fields_set(self): - class SetFile(Document): the_file = FileField() - text = six.b('Hello, World!') - more_text = six.b('Foo Bar') + text = six.b("Hello, World!") + more_text = six.b("Foo Bar") SetFile.drop_collection() @@ -184,7 +189,6 @@ class FileTest(MongoDBTestCase): result.the_file.delete() def test_file_field_no_default(self): - class GridDocument(Document): the_file = FileField() @@ -199,7 +203,7 @@ class FileTest(MongoDBTestCase): doc_a.save() doc_b = GridDocument.objects.with_id(doc_a.id) - doc_b.the_file.replace(f, filename='doc_b') + doc_b.the_file.replace(f, filename="doc_b") doc_b.save() self.assertNotEqual(doc_b.the_file.grid_id, None) @@ -208,13 +212,13 @@ class FileTest(MongoDBTestCase): self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id) # Test with default - doc_d = GridDocument(the_file=six.b('')) + doc_d = GridDocument(the_file=six.b("")) doc_d.save() doc_e = GridDocument.objects.with_id(doc_d.id) self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id) - doc_e.the_file.replace(f, filename='doc_e') + doc_e.the_file.replace(f, filename="doc_e") doc_e.save() doc_f = GridDocument.objects.with_id(doc_e.id) @@ -222,11 +226,12 @@ class FileTest(MongoDBTestCase): db = GridDocument._get_db() grid_fs = gridfs.GridFS(db) - self.assertEqual(['doc_b', 'doc_e'], grid_fs.list()) + self.assertEqual(["doc_b", "doc_e"], grid_fs.list()) def test_file_uniqueness(self): """Ensure that each instance of a FileField is unique """ + class TestFile(Document): name = StringField() the_file = FileField() @@ -234,7 +239,7 @@ class FileTest(MongoDBTestCase): # First instance test_file = TestFile() test_file.name = "Hello, World!" - test_file.the_file.put(six.b('Hello, World!')) + test_file.the_file.put(six.b("Hello, World!")) test_file.save() # Second instance @@ -255,20 +260,21 @@ class FileTest(MongoDBTestCase): photo = FileField() Animal.drop_collection() - marmot = Animal(genus='Marmota', family='Sciuridae') + marmot = Animal(genus="Marmota", family="Sciuridae") marmot_photo_content = get_file(TEST_IMAGE_PATH) # Retrieve a photo from disk - marmot.photo.put(marmot_photo_content, content_type='image/jpeg', foo='bar') + marmot.photo.put(marmot_photo_content, content_type="image/jpeg", foo="bar") marmot.photo.close() marmot.save() marmot = Animal.objects.get() - self.assertEqual(marmot.photo.content_type, 'image/jpeg') - self.assertEqual(marmot.photo.foo, 'bar') + self.assertEqual(marmot.photo.content_type, "image/jpeg") + self.assertEqual(marmot.photo.foo, "bar") def test_file_reassigning(self): class TestFile(Document): the_file = FileField() + TestFile.drop_collection() test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() @@ -282,13 +288,15 @@ class FileTest(MongoDBTestCase): def test_file_boolean(self): """Ensure that a boolean test of a FileField indicates its presence """ + class TestFile(Document): the_file = FileField() + TestFile.drop_collection() test_file = TestFile() self.assertFalse(bool(test_file.the_file)) - test_file.the_file.put(six.b('Hello, World!'), content_type='text/plain') + test_file.the_file.put(six.b("Hello, World!"), content_type="text/plain") test_file.save() self.assertTrue(bool(test_file.the_file)) @@ -297,6 +305,7 @@ class FileTest(MongoDBTestCase): def test_file_cmp(self): """Test comparing against other types""" + class TestFile(Document): the_file = FileField() @@ -305,11 +314,12 @@ class FileTest(MongoDBTestCase): def test_file_disk_space(self): """ Test disk space usage when we delete/replace a file """ + class TestFile(Document): the_file = FileField() - text = six.b('Hello, World!') - content_type = 'text/plain' + text = six.b("Hello, World!") + content_type = "text/plain" testfile = TestFile() testfile.the_file.put(text, content_type=content_type, filename="hello") @@ -352,7 +362,7 @@ class FileTest(MongoDBTestCase): testfile.the_file.put(text, content_type=content_type, filename="hello") testfile.save() - text = six.b('Bonjour, World!') + text = six.b("Bonjour, World!") testfile.the_file.replace(text, content_type=content_type, filename="hello") testfile.save() @@ -370,7 +380,7 @@ class FileTest(MongoDBTestCase): def test_image_field(self): if not HAS_PIL: - raise SkipTest('PIL not installed') + raise SkipTest("PIL not installed") class TestImage(Document): image = ImageField() @@ -386,7 +396,9 @@ class FileTest(MongoDBTestCase): t.image.put(f) self.fail("Should have raised an invalidation error") except ValidationError as e: - self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f) + self.assertEqual( + "%s" % e, "Invalid image: cannot identify image file %s" % f + ) t = TestImage() t.image.put(get_file(TEST_IMAGE_PATH)) @@ -394,7 +406,7 @@ class FileTest(MongoDBTestCase): t = TestImage.objects.first() - self.assertEqual(t.image.format, 'PNG') + self.assertEqual(t.image.format, "PNG") w, h = t.image.size self.assertEqual(w, 371) @@ -404,10 +416,11 @@ class FileTest(MongoDBTestCase): def test_image_field_reassigning(self): if not HAS_PIL: - raise SkipTest('PIL not installed') + raise SkipTest("PIL not installed") class TestFile(Document): the_file = ImageField() + TestFile.drop_collection() test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() @@ -420,7 +433,7 @@ class FileTest(MongoDBTestCase): def test_image_field_resize(self): if not HAS_PIL: - raise SkipTest('PIL not installed') + raise SkipTest("PIL not installed") class TestImage(Document): image = ImageField(size=(185, 37)) @@ -433,7 +446,7 @@ class FileTest(MongoDBTestCase): t = TestImage.objects.first() - self.assertEqual(t.image.format, 'PNG') + self.assertEqual(t.image.format, "PNG") w, h = t.image.size self.assertEqual(w, 185) @@ -443,7 +456,7 @@ class FileTest(MongoDBTestCase): def test_image_field_resize_force(self): if not HAS_PIL: - raise SkipTest('PIL not installed') + raise SkipTest("PIL not installed") class TestImage(Document): image = ImageField(size=(185, 37, True)) @@ -456,7 +469,7 @@ class FileTest(MongoDBTestCase): t = TestImage.objects.first() - self.assertEqual(t.image.format, 'PNG') + self.assertEqual(t.image.format, "PNG") w, h = t.image.size self.assertEqual(w, 185) @@ -466,7 +479,7 @@ class FileTest(MongoDBTestCase): def test_image_field_thumbnail(self): if not HAS_PIL: - raise SkipTest('PIL not installed') + raise SkipTest("PIL not installed") class TestImage(Document): image = ImageField(thumbnail_size=(92, 18)) @@ -479,19 +492,18 @@ class FileTest(MongoDBTestCase): t = TestImage.objects.first() - self.assertEqual(t.image.thumbnail.format, 'PNG') + self.assertEqual(t.image.thumbnail.format, "PNG") self.assertEqual(t.image.thumbnail.width, 92) self.assertEqual(t.image.thumbnail.height, 18) t.image.delete() def test_file_multidb(self): - register_connection('test_files', 'test_files') + register_connection("test_files", "test_files") class TestFile(Document): name = StringField() - the_file = FileField(db_alias="test_files", - collection_name="macumba") + the_file = FileField(db_alias="test_files", collection_name="macumba") TestFile.drop_collection() @@ -502,23 +514,21 @@ class FileTest(MongoDBTestCase): # First instance test_file = TestFile() test_file.name = "Hello, World!" - test_file.the_file.put(six.b('Hello, World!'), - name="hello.txt") + test_file.the_file.put(six.b("Hello, World!"), name="hello.txt") test_file.save() data = get_db("test_files").macumba.files.find_one() - self.assertEqual(data.get('name'), 'hello.txt') + self.assertEqual(data.get("name"), "hello.txt") test_file = TestFile.objects.first() - self.assertEqual(test_file.the_file.read(), six.b('Hello, World!')) + self.assertEqual(test_file.the_file.read(), six.b("Hello, World!")) test_file = TestFile.objects.first() - test_file.the_file = six.b('HELLO, WORLD!') + test_file.the_file = six.b("HELLO, WORLD!") test_file.save() test_file = TestFile.objects.first() - self.assertEqual(test_file.the_file.read(), - six.b('HELLO, WORLD!')) + self.assertEqual(test_file.the_file.read(), six.b("HELLO, WORLD!")) def test_copyable(self): class PutFile(Document): @@ -526,8 +536,8 @@ class FileTest(MongoDBTestCase): PutFile.drop_collection() - text = six.b('Hello, World!') - content_type = 'text/plain' + text = six.b("Hello, World!") + content_type = "text/plain" putfile = PutFile() putfile.the_file.put(text, content_type=content_type) @@ -542,7 +552,7 @@ class FileTest(MongoDBTestCase): def test_get_image_by_grid_id(self): if not HAS_PIL: - raise SkipTest('PIL not installed') + raise SkipTest("PIL not installed") class TestImage(Document): @@ -559,8 +569,9 @@ class FileTest(MongoDBTestCase): test = TestImage.objects.first() grid_id = test.image1.grid_id - self.assertEqual(1, TestImage.objects(Q(image1=grid_id) - or Q(image2=grid_id)).count()) + self.assertEqual( + 1, TestImage.objects(Q(image1=grid_id) or Q(image2=grid_id)).count() + ) def test_complex_field_filefield(self): """Ensure you can add meta data to file""" @@ -571,21 +582,21 @@ class FileTest(MongoDBTestCase): photos = ListField(FileField()) Animal.drop_collection() - marmot = Animal(genus='Marmota', family='Sciuridae') + marmot = Animal(genus="Marmota", family="Sciuridae") - with open(TEST_IMAGE_PATH, 'rb') as marmot_photo: # Retrieve a photo from disk - photos_field = marmot._fields['photos'].field - new_proxy = photos_field.get_proxy_obj('photos', marmot) - new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar') + with open(TEST_IMAGE_PATH, "rb") as marmot_photo: # Retrieve a photo from disk + photos_field = marmot._fields["photos"].field + new_proxy = photos_field.get_proxy_obj("photos", marmot) + new_proxy.put(marmot_photo, content_type="image/jpeg", foo="bar") marmot.photos.append(new_proxy) marmot.save() marmot = Animal.objects.get() - self.assertEqual(marmot.photos[0].content_type, 'image/jpeg') - self.assertEqual(marmot.photos[0].foo, 'bar') + self.assertEqual(marmot.photos[0].content_type, "image/jpeg") + self.assertEqual(marmot.photos[0].foo, "bar") self.assertEqual(marmot.photos[0].get().length, 8313) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/fields/geo.py b/tests/fields/geo.py index 37ed97f5..446d7171 100644 --- a/tests/fields/geo.py +++ b/tests/fields/geo.py @@ -4,28 +4,27 @@ import unittest from mongoengine import * from mongoengine.connection import get_db -__all__ = ("GeoFieldTest", ) +__all__ = ("GeoFieldTest",) class GeoFieldTest(unittest.TestCase): - def setUp(self): - connect(db='mongoenginetest') + connect(db="mongoenginetest") self.db = get_db() def _test_for_expected_error(self, Cls, loc, expected): try: Cls(loc=loc).validate() - self.fail('Should not validate the location {0}'.format(loc)) + self.fail("Should not validate the location {0}".format(loc)) except ValidationError as e: - self.assertEqual(expected, e.to_dict()['loc']) + self.assertEqual(expected, e.to_dict()["loc"]) def test_geopoint_validation(self): class Location(Document): loc = GeoPointField() invalid_coords = [{"x": 1, "y": 2}, 5, "a"] - expected = 'GeoPointField can only accept tuples or lists of (x, y)' + expected = "GeoPointField can only accept tuples or lists of (x, y)" for coord in invalid_coords: self._test_for_expected_error(Location, coord, expected) @@ -40,7 +39,7 @@ class GeoFieldTest(unittest.TestCase): expected = "Both values (%s) in point must be float or int" % repr(coord) self._test_for_expected_error(Location, coord, expected) - invalid_coords = [21, 4, 'a'] + invalid_coords = [21, 4, "a"] for coord in invalid_coords: expected = "GeoPointField can only accept tuples or lists of (x, y)" self._test_for_expected_error(Location, coord, expected) @@ -50,7 +49,9 @@ class GeoFieldTest(unittest.TestCase): loc = PointField() invalid_coords = {"x": 1, "y": 2} - expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)' + expected = ( + "PointField can only accept a valid GeoJson dictionary or lists of (x, y)" + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MadeUp", "coordinates": []} @@ -77,19 +78,16 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, coord, expected) Location(loc=[1, 2]).validate() - Location(loc={ - "type": "Point", - "coordinates": [ - 81.4471435546875, - 23.61432859499169 - ]}).validate() + Location( + loc={"type": "Point", "coordinates": [81.4471435546875, 23.61432859499169]} + ).validate() def test_linestring_validation(self): class Location(Document): loc = LineStringField() invalid_coords = {"x": 1, "y": 2} - expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)' + expected = "LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)" self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MadeUp", "coordinates": [[]]} @@ -97,7 +95,9 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]} - expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point" + expected = ( + "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point" + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [5, "a"] @@ -105,16 +105,25 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[1]] - expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) + expected = ( + "Invalid LineString:\nValue (%s) must be a two-dimensional point" + % repr(invalid_coords[0]) + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[1, 2, 3]] - expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) + expected = ( + "Invalid LineString:\nValue (%s) must be a two-dimensional point" + % repr(invalid_coords[0]) + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[{}, {}]], [("a", "b")]] for coord in invalid_coords: - expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0]) + expected = ( + "Invalid LineString:\nBoth values (%s) in point must be float or int" + % repr(coord[0]) + ) self._test_for_expected_error(Location, coord, expected) Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate() @@ -124,7 +133,9 @@ class GeoFieldTest(unittest.TestCase): loc = PolygonField() invalid_coords = {"x": 1, "y": 2} - expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)' + expected = ( + "PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)" + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MadeUp", "coordinates": [[]]} @@ -136,7 +147,9 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[5, "a"]]] - expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int" + expected = ( + "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int" + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[]]] @@ -162,7 +175,7 @@ class GeoFieldTest(unittest.TestCase): loc = MultiPointField() invalid_coords = {"x": 1, "y": 2} - expected = 'MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)' + expected = "MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)" self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MadeUp", "coordinates": [[]]} @@ -188,19 +201,19 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, coord, expected) Location(loc=[[1, 2]]).validate() - Location(loc={ - "type": "MultiPoint", - "coordinates": [ - [1, 2], - [81.4471435546875, 23.61432859499169] - ]}).validate() + Location( + loc={ + "type": "MultiPoint", + "coordinates": [[1, 2], [81.4471435546875, 23.61432859499169]], + } + ).validate() def test_multilinestring_validation(self): class Location(Document): loc = MultiLineStringField() invalid_coords = {"x": 1, "y": 2} - expected = 'MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)' + expected = "MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)" self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MadeUp", "coordinates": [[]]} @@ -216,16 +229,25 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[1]]] - expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0]) + expected = ( + "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" + % repr(invalid_coords[0][0]) + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[1, 2, 3]]] - expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0]) + expected = ( + "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" + % repr(invalid_coords[0][0]) + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[[{}, {}]]], [[("a", "b")]]] for coord in invalid_coords: - expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0]) + expected = ( + "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" + % repr(coord[0][0]) + ) self._test_for_expected_error(Location, coord, expected) Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate() @@ -235,7 +257,7 @@ class GeoFieldTest(unittest.TestCase): loc = MultiPolygonField() invalid_coords = {"x": 1, "y": 2} - expected = 'MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)' + expected = "MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)" self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MadeUp", "coordinates": [[]]} @@ -243,7 +265,9 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]} - expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" + expected = ( + "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[[5, "a"]]]] @@ -255,7 +279,9 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[[1, 2, 3]]]] - expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" + expected = ( + "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[[{}, {}]]], [[("a", "b")]]] @@ -263,7 +289,9 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[[1, 2], [3, 4]]]] - expected = "Invalid MultiPolygon:\nLineStrings must start and end at the same point" + expected = ( + "Invalid MultiPolygon:\nLineStrings must start and end at the same point" + ) self._test_for_expected_error(Location, invalid_coords, expected) Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate() @@ -271,17 +299,19 @@ class GeoFieldTest(unittest.TestCase): def test_indexes_geopoint(self): """Ensure that indexes are created automatically for GeoPointFields. """ + class Event(Document): title = StringField() location = GeoPointField() geo_indicies = Event._geo_indices() - self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}]) + self.assertEqual(geo_indicies, [{"fields": [("location", "2d")]}]) def test_geopoint_embedded_indexes(self): """Ensure that indexes are created automatically for GeoPointFields on embedded documents. """ + class Venue(EmbeddedDocument): location = GeoPointField() name = StringField() @@ -291,11 +321,12 @@ class GeoFieldTest(unittest.TestCase): venue = EmbeddedDocumentField(Venue) geo_indicies = Event._geo_indices() - self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}]) + self.assertEqual(geo_indicies, [{"fields": [("venue.location", "2d")]}]) def test_indexes_2dsphere(self): """Ensure that indexes are created automatically for GeoPointFields. """ + class Event(Document): title = StringField() point = PointField() @@ -303,13 +334,14 @@ class GeoFieldTest(unittest.TestCase): polygon = PolygonField() geo_indicies = Event._geo_indices() - self.assertIn({'fields': [('line', '2dsphere')]}, geo_indicies) - self.assertIn({'fields': [('polygon', '2dsphere')]}, geo_indicies) - self.assertIn({'fields': [('point', '2dsphere')]}, geo_indicies) + self.assertIn({"fields": [("line", "2dsphere")]}, geo_indicies) + self.assertIn({"fields": [("polygon", "2dsphere")]}, geo_indicies) + self.assertIn({"fields": [("point", "2dsphere")]}, geo_indicies) def test_indexes_2dsphere_embedded(self): """Ensure that indexes are created automatically for GeoPointFields. """ + class Venue(EmbeddedDocument): name = StringField() point = PointField() @@ -321,12 +353,11 @@ class GeoFieldTest(unittest.TestCase): venue = EmbeddedDocumentField(Venue) geo_indicies = Event._geo_indices() - self.assertIn({'fields': [('venue.line', '2dsphere')]}, geo_indicies) - self.assertIn({'fields': [('venue.polygon', '2dsphere')]}, geo_indicies) - self.assertIn({'fields': [('venue.point', '2dsphere')]}, geo_indicies) + self.assertIn({"fields": [("venue.line", "2dsphere")]}, geo_indicies) + self.assertIn({"fields": [("venue.polygon", "2dsphere")]}, geo_indicies) + self.assertIn({"fields": [("venue.point", "2dsphere")]}, geo_indicies) def test_geo_indexes_recursion(self): - class Location(Document): name = StringField() location = GeoPointField() @@ -338,11 +369,11 @@ class GeoFieldTest(unittest.TestCase): Location.drop_collection() Parent.drop_collection() - Parent(name='Berlin').save() + Parent(name="Berlin").save() info = Parent._get_collection().index_information() - self.assertNotIn('location_2d', info) + self.assertNotIn("location_2d", info) info = Location._get_collection().index_information() - self.assertIn('location_2d', info) + self.assertIn("location_2d", info) self.assertEqual(len(Parent._geo_indices()), 0) self.assertEqual(len(Location._geo_indices()), 1) @@ -354,9 +385,7 @@ class GeoFieldTest(unittest.TestCase): location = PointField(auto_index=False) datetime = DateTimeField() - meta = { - 'indexes': [[("location", "2dsphere"), ("datetime", 1)]] - } + meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]} self.assertEqual([], Log._geo_indices()) @@ -364,8 +393,10 @@ class GeoFieldTest(unittest.TestCase): Log.ensure_indexes() info = Log._get_collection().index_information() - self.assertEqual(info["location_2dsphere_datetime_1"]["key"], - [('location', '2dsphere'), ('datetime', 1)]) + self.assertEqual( + info["location_2dsphere_datetime_1"]["key"], + [("location", "2dsphere"), ("datetime", 1)], + ) # Test listing explicitly class Log(Document): @@ -373,9 +404,7 @@ class GeoFieldTest(unittest.TestCase): datetime = DateTimeField() meta = { - 'indexes': [ - {'fields': [("location", "2dsphere"), ("datetime", 1)]} - ] + "indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}] } self.assertEqual([], Log._geo_indices()) @@ -384,9 +413,11 @@ class GeoFieldTest(unittest.TestCase): Log.ensure_indexes() info = Log._get_collection().index_information() - self.assertEqual(info["location_2dsphere_datetime_1"]["key"], - [('location', '2dsphere'), ('datetime', 1)]) + self.assertEqual( + info["location_2dsphere_datetime_1"]["key"], + [("location", "2dsphere"), ("datetime", 1)], + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/fields/test_binary_field.py b/tests/fields/test_binary_field.py index 8af75d4e..df4bf2de 100644 --- a/tests/fields/test_binary_field.py +++ b/tests/fields/test_binary_field.py @@ -9,19 +9,22 @@ from bson import Binary from mongoengine import * from tests.utils import MongoDBTestCase -BIN_VALUE = six.b('\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5') +BIN_VALUE = six.b( + "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5" +) class TestBinaryField(MongoDBTestCase): def test_binary_fields(self): """Ensure that binary fields can be stored and retrieved. """ + class Attachment(Document): content_type = StringField() blob = BinaryField() - BLOB = six.b('\xe6\x00\xc4\xff\x07') - MIME_TYPE = 'application/octet-stream' + BLOB = six.b("\xe6\x00\xc4\xff\x07") + MIME_TYPE = "application/octet-stream" Attachment.drop_collection() @@ -35,6 +38,7 @@ class TestBinaryField(MongoDBTestCase): def test_validation_succeeds(self): """Ensure that valid values can be assigned to binary fields. """ + class AttachmentRequired(Document): blob = BinaryField(required=True) @@ -43,11 +47,11 @@ class TestBinaryField(MongoDBTestCase): attachment_required = AttachmentRequired() self.assertRaises(ValidationError, attachment_required.validate) - attachment_required.blob = Binary(six.b('\xe6\x00\xc4\xff\x07')) + attachment_required.blob = Binary(six.b("\xe6\x00\xc4\xff\x07")) attachment_required.validate() - _5_BYTES = six.b('\xe6\x00\xc4\xff\x07') - _4_BYTES = six.b('\xe6\x00\xc4\xff') + _5_BYTES = six.b("\xe6\x00\xc4\xff\x07") + _4_BYTES = six.b("\xe6\x00\xc4\xff") self.assertRaises(ValidationError, AttachmentSizeLimit(blob=_5_BYTES).validate) AttachmentSizeLimit(blob=_4_BYTES).validate() @@ -57,7 +61,7 @@ class TestBinaryField(MongoDBTestCase): class Attachment(Document): blob = BinaryField() - for invalid_data in (2, u'Im_a_unicode', ['some_str']): + for invalid_data in (2, u"Im_a_unicode", ["some_str"]): self.assertRaises(ValidationError, Attachment(blob=invalid_data).validate) def test__primary(self): @@ -108,17 +112,17 @@ class TestBinaryField(MongoDBTestCase): def test_modify_operation__set(self): """Ensures no regression of bug #1127""" + class MyDocument(Document): some_field = StringField() bin_field = BinaryField() MyDocument.drop_collection() - doc = MyDocument.objects(some_field='test').modify( - upsert=True, new=True, - set__bin_field=BIN_VALUE + doc = MyDocument.objects(some_field="test").modify( + upsert=True, new=True, set__bin_field=BIN_VALUE ) - self.assertEqual(doc.some_field, 'test') + self.assertEqual(doc.some_field, "test") if six.PY3: self.assertEqual(doc.bin_field, BIN_VALUE) else: @@ -126,15 +130,18 @@ class TestBinaryField(MongoDBTestCase): def test_update_one(self): """Ensures no regression of bug #1127""" + class MyDocument(Document): bin_field = BinaryField() MyDocument.drop_collection() - bin_data = six.b('\xe6\x00\xc4\xff\x07') + bin_data = six.b("\xe6\x00\xc4\xff\x07") doc = MyDocument(bin_field=bin_data).save() - n_updated = MyDocument.objects(bin_field=bin_data).update_one(bin_field=BIN_VALUE) + n_updated = MyDocument.objects(bin_field=bin_data).update_one( + bin_field=BIN_VALUE + ) self.assertEqual(n_updated, 1) fetched = MyDocument.objects.with_id(doc.id) if six.PY3: diff --git a/tests/fields/test_boolean_field.py b/tests/fields/test_boolean_field.py index 7a2a3db6..22ebb6f7 100644 --- a/tests/fields/test_boolean_field.py +++ b/tests/fields/test_boolean_field.py @@ -11,15 +11,13 @@ class TestBooleanField(MongoDBTestCase): person = Person(admin=True) person.save() - self.assertEqual( - get_as_pymongo(person), - {'_id': person.id, - 'admin': True}) + self.assertEqual(get_as_pymongo(person), {"_id": person.id, "admin": True}) def test_validation(self): """Ensure that invalid values cannot be assigned to boolean fields. """ + class Person(Document): admin = BooleanField() @@ -29,9 +27,9 @@ class TestBooleanField(MongoDBTestCase): person.admin = 2 self.assertRaises(ValidationError, person.validate) - person.admin = 'Yes' + person.admin = "Yes" self.assertRaises(ValidationError, person.validate) - person.admin = 'False' + person.admin = "False" self.assertRaises(ValidationError, person.validate) def test_weirdness_constructor(self): @@ -39,11 +37,12 @@ class TestBooleanField(MongoDBTestCase): which causes some weird behavior. We dont necessarily want to maintain this behavior but its a known issue """ + class Person(Document): admin = BooleanField() - new_person = Person(admin='False') + new_person = Person(admin="False") self.assertTrue(new_person.admin) - new_person = Person(admin='0') + new_person = Person(admin="0") self.assertTrue(new_person.admin) diff --git a/tests/fields/test_cached_reference_field.py b/tests/fields/test_cached_reference_field.py index 470ecc5d..4e467587 100644 --- a/tests/fields/test_cached_reference_field.py +++ b/tests/fields/test_cached_reference_field.py @@ -7,12 +7,12 @@ from tests.utils import MongoDBTestCase class TestCachedReferenceField(MongoDBTestCase): - def test_get_and_save(self): """ Tests #1047: CachedReferenceField creates DBRefs on to_python, but can't save them on to_mongo. """ + class Animal(Document): name = StringField() tag = StringField() @@ -24,10 +24,11 @@ class TestCachedReferenceField(MongoDBTestCase): Animal.drop_collection() Ocorrence.drop_collection() - Ocorrence(person="testte", - animal=Animal(name="Leopard", tag="heavy").save()).save() + Ocorrence( + person="testte", animal=Animal(name="Leopard", tag="heavy").save() + ).save() p = Ocorrence.objects.get() - p.person = 'new_testte' + p.person = "new_testte" p.save() def test_general_things(self): @@ -37,8 +38,7 @@ class TestCachedReferenceField(MongoDBTestCase): class Ocorrence(Document): person = StringField() - animal = CachedReferenceField( - Animal, fields=['tag']) + animal = CachedReferenceField(Animal, fields=["tag"]) Animal.drop_collection() Ocorrence.drop_collection() @@ -55,19 +55,18 @@ class TestCachedReferenceField(MongoDBTestCase): self.assertEqual(Ocorrence.objects(animal=None).count(), 1) - self.assertEqual( - a.to_mongo(fields=['tag']), {'tag': 'heavy', "_id": a.pk}) + self.assertEqual(a.to_mongo(fields=["tag"]), {"tag": "heavy", "_id": a.pk}) - self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') + self.assertEqual(o.to_mongo()["animal"]["tag"], "heavy") # counts Ocorrence(person="teste 2").save() Ocorrence(person="teste 3").save() - count = Ocorrence.objects(animal__tag='heavy').count() + count = Ocorrence.objects(animal__tag="heavy").count() self.assertEqual(count, 1) - ocorrence = Ocorrence.objects(animal__tag='heavy').first() + ocorrence = Ocorrence.objects(animal__tag="heavy").first() self.assertEqual(ocorrence.person, "teste") self.assertIsInstance(ocorrence.animal, Animal) @@ -78,28 +77,21 @@ class TestCachedReferenceField(MongoDBTestCase): class SocialTest(Document): group = StringField() - person = CachedReferenceField( - PersonAuto, - fields=('salary',)) + person = CachedReferenceField(PersonAuto, fields=("salary",)) PersonAuto.drop_collection() SocialTest.drop_collection() - p = PersonAuto(name="Alberto", salary=Decimal('7000.00')) + p = PersonAuto(name="Alberto", salary=Decimal("7000.00")) p.save() s = SocialTest(group="dev", person=p) s.save() self.assertEqual( - SocialTest.objects._collection.find_one({'person.salary': 7000.00}), { - '_id': s.pk, - 'group': s.group, - 'person': { - '_id': p.pk, - 'salary': 7000.00 - } - }) + SocialTest.objects._collection.find_one({"person.salary": 7000.00}), + {"_id": s.pk, "group": s.group, "person": {"_id": p.pk, "salary": 7000.00}}, + ) def test_cached_reference_field_reference(self): class Group(Document): @@ -111,17 +103,14 @@ class TestCachedReferenceField(MongoDBTestCase): class SocialData(Document): obs = StringField() - tags = ListField( - StringField()) - person = CachedReferenceField( - Person, - fields=('group',)) + tags = ListField(StringField()) + person = CachedReferenceField(Person, fields=("group",)) Group.drop_collection() Person.drop_collection() SocialData.drop_collection() - g1 = Group(name='dev') + g1 = Group(name="dev") g1.save() g2 = Group(name="designers") @@ -136,22 +125,21 @@ class TestCachedReferenceField(MongoDBTestCase): p3 = Person(name="Afro design", group=g2) p3.save() - s1 = SocialData(obs="testing 123", person=p1, tags=['tag1', 'tag2']) + s1 = SocialData(obs="testing 123", person=p1, tags=["tag1", "tag2"]) s1.save() - s2 = SocialData(obs="testing 321", person=p3, tags=['tag3', 'tag4']) + s2 = SocialData(obs="testing 321", person=p3, tags=["tag3", "tag4"]) s2.save() - self.assertEqual(SocialData.objects._collection.find_one( - {'tags': 'tag2'}), { - '_id': s1.pk, - 'obs': 'testing 123', - 'tags': ['tag1', 'tag2'], - 'person': { - '_id': p1.pk, - 'group': g1.pk - } - }) + self.assertEqual( + SocialData.objects._collection.find_one({"tags": "tag2"}), + { + "_id": s1.pk, + "obs": "testing 123", + "tags": ["tag1", "tag2"], + "person": {"_id": p1.pk, "group": g1.pk}, + }, + ) self.assertEqual(SocialData.objects(person__group=g2).count(), 1) self.assertEqual(SocialData.objects(person__group=g2).first(), s2) @@ -163,23 +151,18 @@ class TestCachedReferenceField(MongoDBTestCase): Product.drop_collection() class Basket(Document): - products = ListField(CachedReferenceField(Product, fields=['name'])) + products = ListField(CachedReferenceField(Product, fields=["name"])) Basket.drop_collection() - product1 = Product(name='abc').save() - product2 = Product(name='def').save() + product1 = Product(name="abc").save() + product2 = Product(name="def").save() basket = Basket(products=[product1]).save() self.assertEqual( Basket.objects._collection.find_one(), { - '_id': basket.pk, - 'products': [ - { - '_id': product1.pk, - 'name': product1.name - } - ] - } + "_id": basket.pk, + "products": [{"_id": product1.pk, "name": product1.name}], + }, ) # push to list basket.update(push__products=product2) @@ -187,161 +170,135 @@ class TestCachedReferenceField(MongoDBTestCase): self.assertEqual( Basket.objects._collection.find_one(), { - '_id': basket.pk, - 'products': [ - { - '_id': product1.pk, - 'name': product1.name - }, - { - '_id': product2.pk, - 'name': product2.name - } - ] - } + "_id": basket.pk, + "products": [ + {"_id": product1.pk, "name": product1.name}, + {"_id": product2.pk, "name": product2.name}, + ], + }, ) def test_cached_reference_field_update_all(self): class Person(Document): - TYPES = ( - ('pf', "PF"), - ('pj', "PJ") - ) + TYPES = (("pf", "PF"), ("pj", "PJ")) name = StringField() tp = StringField(choices=TYPES) - father = CachedReferenceField('self', fields=('tp',)) + father = CachedReferenceField("self", fields=("tp",)) Person.drop_collection() a1 = Person(name="Wilson Father", tp="pj") a1.save() - a2 = Person(name='Wilson Junior', tp='pf', father=a1) + a2 = Person(name="Wilson Junior", tp="pf", father=a1) a2.save() a2 = Person.objects.with_id(a2.id) self.assertEqual(a2.father.tp, a1.tp) - self.assertEqual(dict(a2.to_mongo()), { - "_id": a2.pk, - "name": u"Wilson Junior", - "tp": u"pf", - "father": { - "_id": a1.pk, - "tp": u"pj" - } - }) + self.assertEqual( + dict(a2.to_mongo()), + { + "_id": a2.pk, + "name": u"Wilson Junior", + "tp": u"pf", + "father": {"_id": a1.pk, "tp": u"pj"}, + }, + ) - self.assertEqual(Person.objects(father=a1)._query, { - 'father._id': a1.pk - }) + self.assertEqual(Person.objects(father=a1)._query, {"father._id": a1.pk}) self.assertEqual(Person.objects(father=a1).count(), 1) Person.objects.update(set__tp="pf") Person.father.sync_all() a2.reload() - self.assertEqual(dict(a2.to_mongo()), { - "_id": a2.pk, - "name": u"Wilson Junior", - "tp": u"pf", - "father": { - "_id": a1.pk, - "tp": u"pf" - } - }) + self.assertEqual( + dict(a2.to_mongo()), + { + "_id": a2.pk, + "name": u"Wilson Junior", + "tp": u"pf", + "father": {"_id": a1.pk, "tp": u"pf"}, + }, + ) def test_cached_reference_fields_on_embedded_documents(self): with self.assertRaises(InvalidDocumentError): + class Test(Document): name = StringField() - type('WrongEmbeddedDocument', ( - EmbeddedDocument,), { - 'test': CachedReferenceField(Test) - }) + type( + "WrongEmbeddedDocument", + (EmbeddedDocument,), + {"test": CachedReferenceField(Test)}, + ) def test_cached_reference_auto_sync(self): class Person(Document): - TYPES = ( - ('pf', "PF"), - ('pj', "PJ") - ) + TYPES = (("pf", "PF"), ("pj", "PJ")) name = StringField() - tp = StringField( - choices=TYPES - ) + tp = StringField(choices=TYPES) - father = CachedReferenceField('self', fields=('tp',)) + father = CachedReferenceField("self", fields=("tp",)) Person.drop_collection() a1 = Person(name="Wilson Father", tp="pj") a1.save() - a2 = Person(name='Wilson Junior', tp='pf', father=a1) + a2 = Person(name="Wilson Junior", tp="pf", father=a1) a2.save() - a1.tp = 'pf' + a1.tp = "pf" a1.save() a2.reload() - self.assertEqual(dict(a2.to_mongo()), { - '_id': a2.pk, - 'name': 'Wilson Junior', - 'tp': 'pf', - 'father': { - '_id': a1.pk, - 'tp': 'pf' - } - }) + self.assertEqual( + dict(a2.to_mongo()), + { + "_id": a2.pk, + "name": "Wilson Junior", + "tp": "pf", + "father": {"_id": a1.pk, "tp": "pf"}, + }, + ) def test_cached_reference_auto_sync_disabled(self): class Persone(Document): - TYPES = ( - ('pf', "PF"), - ('pj', "PJ") - ) + TYPES = (("pf", "PF"), ("pj", "PJ")) name = StringField() - tp = StringField( - choices=TYPES - ) + tp = StringField(choices=TYPES) - father = CachedReferenceField( - 'self', fields=('tp',), auto_sync=False) + father = CachedReferenceField("self", fields=("tp",), auto_sync=False) Persone.drop_collection() a1 = Persone(name="Wilson Father", tp="pj") a1.save() - a2 = Persone(name='Wilson Junior', tp='pf', father=a1) + a2 = Persone(name="Wilson Junior", tp="pf", father=a1) a2.save() - a1.tp = 'pf' + a1.tp = "pf" a1.save() - self.assertEqual(Persone.objects._collection.find_one({'_id': a2.pk}), { - '_id': a2.pk, - 'name': 'Wilson Junior', - 'tp': 'pf', - 'father': { - '_id': a1.pk, - 'tp': 'pj' - } - }) + self.assertEqual( + Persone.objects._collection.find_one({"_id": a2.pk}), + { + "_id": a2.pk, + "name": "Wilson Junior", + "tp": "pf", + "father": {"_id": a1.pk, "tp": "pj"}, + }, + ) def test_cached_reference_embedded_fields(self): class Owner(EmbeddedDocument): - TPS = ( - ('n', "Normal"), - ('u', "Urgent") - ) + TPS = (("n", "Normal"), ("u", "Urgent")) name = StringField() - tp = StringField( - verbose_name="Type", - db_field="t", - choices=TPS) + tp = StringField(verbose_name="Type", db_field="t", choices=TPS) class Animal(Document): name = StringField() @@ -351,43 +308,38 @@ class TestCachedReferenceField(MongoDBTestCase): class Ocorrence(Document): person = StringField() - animal = CachedReferenceField( - Animal, fields=['tag', 'owner.tp']) + animal = CachedReferenceField(Animal, fields=["tag", "owner.tp"]) Animal.drop_collection() Ocorrence.drop_collection() - a = Animal(name="Leopard", tag="heavy", - owner=Owner(tp='u', name="Wilson Júnior") - ) + a = Animal( + name="Leopard", tag="heavy", owner=Owner(tp="u", name="Wilson Júnior") + ) a.save() o = Ocorrence(person="teste", animal=a) o.save() - self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tp'])), { - '_id': a.pk, - 'tag': 'heavy', - 'owner': { - 't': 'u' - } - }) - self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') - self.assertEqual(o.to_mongo()['animal']['owner']['t'], 'u') + self.assertEqual( + dict(a.to_mongo(fields=["tag", "owner.tp"])), + {"_id": a.pk, "tag": "heavy", "owner": {"t": "u"}}, + ) + self.assertEqual(o.to_mongo()["animal"]["tag"], "heavy") + self.assertEqual(o.to_mongo()["animal"]["owner"]["t"], "u") # Check to_mongo with fields - self.assertNotIn('animal', o.to_mongo(fields=['person'])) + self.assertNotIn("animal", o.to_mongo(fields=["person"])) # counts Ocorrence(person="teste 2").save() Ocorrence(person="teste 3").save() - count = Ocorrence.objects( - animal__tag='heavy', animal__owner__tp='u').count() + count = Ocorrence.objects(animal__tag="heavy", animal__owner__tp="u").count() self.assertEqual(count, 1) ocorrence = Ocorrence.objects( - animal__tag='heavy', - animal__owner__tp='u').first() + animal__tag="heavy", animal__owner__tp="u" + ).first() self.assertEqual(ocorrence.person, "teste") self.assertIsInstance(ocorrence.animal, Animal) @@ -404,43 +356,39 @@ class TestCachedReferenceField(MongoDBTestCase): class Ocorrence(Document): person = StringField() - animal = CachedReferenceField( - Animal, fields=['tag', 'owner.tags']) + animal = CachedReferenceField(Animal, fields=["tag", "owner.tags"]) Animal.drop_collection() Ocorrence.drop_collection() - a = Animal(name="Leopard", tag="heavy", - owner=Owner(tags=['cool', 'funny'], - name="Wilson Júnior") - ) + a = Animal( + name="Leopard", + tag="heavy", + owner=Owner(tags=["cool", "funny"], name="Wilson Júnior"), + ) a.save() o = Ocorrence(person="teste 2", animal=a) o.save() - self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tags'])), { - '_id': a.pk, - 'tag': 'heavy', - 'owner': { - 'tags': ['cool', 'funny'] - } - }) + self.assertEqual( + dict(a.to_mongo(fields=["tag", "owner.tags"])), + {"_id": a.pk, "tag": "heavy", "owner": {"tags": ["cool", "funny"]}}, + ) - self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') - self.assertEqual(o.to_mongo()['animal']['owner']['tags'], - ['cool', 'funny']) + self.assertEqual(o.to_mongo()["animal"]["tag"], "heavy") + self.assertEqual(o.to_mongo()["animal"]["owner"]["tags"], ["cool", "funny"]) # counts Ocorrence(person="teste 2").save() Ocorrence(person="teste 3").save() query = Ocorrence.objects( - animal__tag='heavy', animal__owner__tags='cool')._query - self.assertEqual( - query, {'animal.owner.tags': 'cool', 'animal.tag': 'heavy'}) + animal__tag="heavy", animal__owner__tags="cool" + )._query + self.assertEqual(query, {"animal.owner.tags": "cool", "animal.tag": "heavy"}) ocorrence = Ocorrence.objects( - animal__tag='heavy', - animal__owner__tags='cool').first() + animal__tag="heavy", animal__owner__tags="cool" + ).first() self.assertEqual(ocorrence.person, "teste 2") self.assertIsInstance(ocorrence.animal, Animal) diff --git a/tests/fields/test_complex_datetime_field.py b/tests/fields/test_complex_datetime_field.py index 58dc4b43..4eea5bdc 100644 --- a/tests/fields/test_complex_datetime_field.py +++ b/tests/fields/test_complex_datetime_field.py @@ -14,9 +14,10 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): """Tests for complex datetime fields - which can handle microseconds without rounding. """ + class LogEntry(Document): date = ComplexDateTimeField() - date_with_dots = ComplexDateTimeField(separator='.') + date_with_dots = ComplexDateTimeField(separator=".") LogEntry.drop_collection() @@ -62,17 +63,25 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): mm = dd = hh = ii = ss = [1, 10] for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): - stored = LogEntry(date=datetime.datetime(*values)).to_mongo()['date'] - self.assertTrue(re.match('^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$', stored) is not None) + stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"] + self.assertTrue( + re.match("^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored) + is not None + ) # Test separator - stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()['date_with_dots'] - self.assertTrue(re.match('^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$', stored) is not None) + stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()[ + "date_with_dots" + ] + self.assertTrue( + re.match("^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None + ) def test_complexdatetime_usage(self): """Tests for complex datetime fields - which can handle microseconds without rounding. """ + class LogEntry(Document): date = ComplexDateTimeField() @@ -123,22 +132,21 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): # Test microsecond-level ordering/filtering for microsecond in (99, 999, 9999, 10000): - LogEntry( - date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond) - ).save() + LogEntry(date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)).save() - logs = list(LogEntry.objects.order_by('date')) + logs = list(LogEntry.objects.order_by("date")) for next_idx, log in enumerate(logs[:-1], start=1): next_log = logs[next_idx] self.assertTrue(log.date < next_log.date) - logs = list(LogEntry.objects.order_by('-date')) + logs = list(LogEntry.objects.order_by("-date")) for next_idx, log in enumerate(logs[:-1], start=1): next_log = logs[next_idx] self.assertTrue(log.date > next_log.date) logs = LogEntry.objects.filter( - date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000)) + date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000) + ) self.assertEqual(logs.count(), 4) def test_no_default_value(self): @@ -156,6 +164,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): def test_default_static_value(self): NOW = datetime.datetime.utcnow() + class Log(Document): timestamp = ComplexDateTimeField(default=NOW) diff --git a/tests/fields/test_date_field.py b/tests/fields/test_date_field.py index 82adb514..da572134 100644 --- a/tests/fields/test_date_field.py +++ b/tests/fields/test_date_field.py @@ -18,10 +18,11 @@ class TestDateField(MongoDBTestCase): Ensure an exception is raised when trying to cast an empty string to datetime. """ + class MyDoc(Document): dt = DateField() - md = MyDoc(dt='') + md = MyDoc(dt="") self.assertRaises(ValidationError, md.save) def test_date_from_whitespace_string(self): @@ -29,16 +30,18 @@ class TestDateField(MongoDBTestCase): Ensure an exception is raised when trying to cast a whitespace-only string to datetime. """ + class MyDoc(Document): dt = DateField() - md = MyDoc(dt=' ') + md = MyDoc(dt=" ") self.assertRaises(ValidationError, md.save) def test_default_values_today(self): """Ensure that default field values are used when creating a document. """ + class Person(Document): day = DateField(default=datetime.date.today) @@ -46,13 +49,14 @@ class TestDateField(MongoDBTestCase): person.validate() self.assertEqual(person.day, person.day) self.assertEqual(person.day, datetime.date.today()) - self.assertEqual(person._data['day'], person.day) + self.assertEqual(person._data["day"], person.day) def test_date(self): """Tests showing pymongo date fields See: http://api.mongodb.org/python/current/api/bson/son.html#dt """ + class LogEntry(Document): date = DateField() @@ -95,6 +99,7 @@ class TestDateField(MongoDBTestCase): def test_regular_usage(self): """Tests for regular datetime fields""" + class LogEntry(Document): date = DateField() @@ -106,12 +111,12 @@ class TestDateField(MongoDBTestCase): log.validate() log.save() - for query in (d1, d1.isoformat(' ')): + for query in (d1, d1.isoformat(" ")): log1 = LogEntry.objects.get(date=query) self.assertEqual(log, log1) if dateutil: - log1 = LogEntry.objects.get(date=d1.isoformat('T')) + log1 = LogEntry.objects.get(date=d1.isoformat("T")) self.assertEqual(log, log1) # create additional 19 log entries for a total of 20 @@ -142,6 +147,7 @@ class TestDateField(MongoDBTestCase): """Ensure that invalid values cannot be assigned to datetime fields. """ + class LogEntry(Document): time = DateField() @@ -152,14 +158,14 @@ class TestDateField(MongoDBTestCase): log.time = datetime.date.today() log.validate() - log.time = datetime.datetime.now().isoformat(' ') + log.time = datetime.datetime.now().isoformat(" ") log.validate() if dateutil: - log.time = datetime.datetime.now().isoformat('T') + log.time = datetime.datetime.now().isoformat("T") log.validate() log.time = -1 self.assertRaises(ValidationError, log.validate) - log.time = 'ABC' + log.time = "ABC" self.assertRaises(ValidationError, log.validate) diff --git a/tests/fields/test_datetime_field.py b/tests/fields/test_datetime_field.py index 92f0668a..c911390a 100644 --- a/tests/fields/test_datetime_field.py +++ b/tests/fields/test_datetime_field.py @@ -19,10 +19,11 @@ class TestDateTimeField(MongoDBTestCase): Ensure an exception is raised when trying to cast an empty string to datetime. """ + class MyDoc(Document): dt = DateTimeField() - md = MyDoc(dt='') + md = MyDoc(dt="") self.assertRaises(ValidationError, md.save) def test_datetime_from_whitespace_string(self): @@ -30,16 +31,18 @@ class TestDateTimeField(MongoDBTestCase): Ensure an exception is raised when trying to cast a whitespace-only string to datetime. """ + class MyDoc(Document): dt = DateTimeField() - md = MyDoc(dt=' ') + md = MyDoc(dt=" ") self.assertRaises(ValidationError, md.save) def test_default_value_utcnow(self): """Ensure that default field values are used when creating a document. """ + class Person(Document): created = DateTimeField(default=dt.datetime.utcnow) @@ -48,8 +51,10 @@ class TestDateTimeField(MongoDBTestCase): person.validate() person_created_t0 = person.created self.assertLess(person.created - utcnow, dt.timedelta(seconds=1)) - self.assertEqual(person_created_t0, person.created) # make sure it does not change - self.assertEqual(person._data['created'], person.created) + self.assertEqual( + person_created_t0, person.created + ) # make sure it does not change + self.assertEqual(person._data["created"], person.created) def test_handling_microseconds(self): """Tests showing pymongo datetime fields handling of microseconds. @@ -58,6 +63,7 @@ class TestDateTimeField(MongoDBTestCase): See: http://api.mongodb.org/python/current/api/bson/son.html#dt """ + class LogEntry(Document): date = DateTimeField() @@ -103,6 +109,7 @@ class TestDateTimeField(MongoDBTestCase): def test_regular_usage(self): """Tests for regular datetime fields""" + class LogEntry(Document): date = DateTimeField() @@ -114,12 +121,12 @@ class TestDateTimeField(MongoDBTestCase): log.validate() log.save() - for query in (d1, d1.isoformat(' ')): + for query in (d1, d1.isoformat(" ")): log1 = LogEntry.objects.get(date=query) self.assertEqual(log, log1) if dateutil: - log1 = LogEntry.objects.get(date=d1.isoformat('T')) + log1 = LogEntry.objects.get(date=d1.isoformat("T")) self.assertEqual(log, log1) # create additional 19 log entries for a total of 20 @@ -150,8 +157,7 @@ class TestDateTimeField(MongoDBTestCase): self.assertEqual(logs.count(), 10) logs = LogEntry.objects.filter( - date__lte=dt.datetime(1980, 1, 1), - date__gte=dt.datetime(1975, 1, 1), + date__lte=dt.datetime(1980, 1, 1), date__gte=dt.datetime(1975, 1, 1) ) self.assertEqual(logs.count(), 5) @@ -159,6 +165,7 @@ class TestDateTimeField(MongoDBTestCase): """Ensure that invalid values cannot be assigned to datetime fields. """ + class LogEntry(Document): time = DateTimeField() @@ -169,32 +176,32 @@ class TestDateTimeField(MongoDBTestCase): log.time = dt.date.today() log.validate() - log.time = dt.datetime.now().isoformat(' ') + log.time = dt.datetime.now().isoformat(" ") log.validate() - log.time = '2019-05-16 21:42:57.897847' + log.time = "2019-05-16 21:42:57.897847" log.validate() if dateutil: - log.time = dt.datetime.now().isoformat('T') + log.time = dt.datetime.now().isoformat("T") log.validate() log.time = -1 self.assertRaises(ValidationError, log.validate) - log.time = 'ABC' + log.time = "ABC" self.assertRaises(ValidationError, log.validate) - log.time = '2019-05-16 21:GARBAGE:12' + log.time = "2019-05-16 21:GARBAGE:12" self.assertRaises(ValidationError, log.validate) - log.time = '2019-05-16 21:42:57.GARBAGE' + log.time = "2019-05-16 21:42:57.GARBAGE" self.assertRaises(ValidationError, log.validate) - log.time = '2019-05-16 21:42:57.123.456' + log.time = "2019-05-16 21:42:57.123.456" self.assertRaises(ValidationError, log.validate) def test_parse_datetime_as_str(self): class DTDoc(Document): date = DateTimeField() - date_str = '2019-03-02 22:26:01' + date_str = "2019-03-02 22:26:01" # make sure that passing a parsable datetime works dtd = DTDoc() @@ -206,7 +213,7 @@ class TestDateTimeField(MongoDBTestCase): self.assertIsInstance(dtd.date, dt.datetime) self.assertEqual(str(dtd.date), date_str) - dtd.date = 'January 1st, 9999999999' + dtd.date = "January 1st, 9999999999" self.assertRaises(ValidationError, dtd.validate) @@ -217,7 +224,7 @@ class TestDateTimeTzAware(MongoDBTestCase): connection._connections = {} connection._dbs = {} - connect(db='mongoenginetest', tz_aware=True) + connect(db="mongoenginetest", tz_aware=True) class LogEntry(Document): time = DateTimeField() @@ -228,4 +235,4 @@ class TestDateTimeTzAware(MongoDBTestCase): log = LogEntry.objects.first() log.time = dt.datetime(2013, 1, 1, 0, 0, 0) - self.assertEqual(['time'], log._changed_fields) + self.assertEqual(["time"], log._changed_fields) diff --git a/tests/fields/test_decimal_field.py b/tests/fields/test_decimal_field.py index 0213b880..30b7e5ea 100644 --- a/tests/fields/test_decimal_field.py +++ b/tests/fields/test_decimal_field.py @@ -7,32 +7,31 @@ from tests.utils import MongoDBTestCase class TestDecimalField(MongoDBTestCase): - def test_validation(self): """Ensure that invalid values cannot be assigned to decimal fields. """ + class Person(Document): - height = DecimalField(min_value=Decimal('0.1'), - max_value=Decimal('3.5')) + height = DecimalField(min_value=Decimal("0.1"), max_value=Decimal("3.5")) Person.drop_collection() - Person(height=Decimal('1.89')).save() + Person(height=Decimal("1.89")).save() person = Person.objects.first() - self.assertEqual(person.height, Decimal('1.89')) + self.assertEqual(person.height, Decimal("1.89")) - person.height = '2.0' + person.height = "2.0" person.save() person.height = 0.01 self.assertRaises(ValidationError, person.validate) - person.height = Decimal('0.01') + person.height = Decimal("0.01") self.assertRaises(ValidationError, person.validate) - person.height = Decimal('4.0') + person.height = Decimal("4.0") self.assertRaises(ValidationError, person.validate) - person.height = 'something invalid' + person.height = "something invalid" self.assertRaises(ValidationError, person.validate) - person_2 = Person(height='something invalid') + person_2 = Person(height="something invalid") self.assertRaises(ValidationError, person_2.validate) def test_comparison(self): @@ -58,7 +57,14 @@ class TestDecimalField(MongoDBTestCase): string_value = DecimalField(precision=4, force_string=True) Person.drop_collection() - values_to_store = [10, 10.1, 10.11, "10.111", Decimal("10.1111"), Decimal("10.11111")] + values_to_store = [ + 10, + 10.1, + 10.11, + "10.111", + Decimal("10.1111"), + Decimal("10.11111"), + ] for store_at_creation in [True, False]: for value in values_to_store: # to_python is called explicitly if values were sent in the kwargs of __init__ @@ -72,20 +78,27 @@ class TestDecimalField(MongoDBTestCase): # How its stored expected = [ - {'float_value': 10.0, 'string_value': '10.0000'}, - {'float_value': 10.1, 'string_value': '10.1000'}, - {'float_value': 10.11, 'string_value': '10.1100'}, - {'float_value': 10.111, 'string_value': '10.1110'}, - {'float_value': 10.1111, 'string_value': '10.1111'}, - {'float_value': 10.1111, 'string_value': '10.1111'}] + {"float_value": 10.0, "string_value": "10.0000"}, + {"float_value": 10.1, "string_value": "10.1000"}, + {"float_value": 10.11, "string_value": "10.1100"}, + {"float_value": 10.111, "string_value": "10.1110"}, + {"float_value": 10.1111, "string_value": "10.1111"}, + {"float_value": 10.1111, "string_value": "10.1111"}, + ] expected.extend(expected) - actual = list(Person.objects.exclude('id').as_pymongo()) + actual = list(Person.objects.exclude("id").as_pymongo()) self.assertEqual(expected, actual) # How it comes out locally - expected = [Decimal('10.0000'), Decimal('10.1000'), Decimal('10.1100'), - Decimal('10.1110'), Decimal('10.1111'), Decimal('10.1111')] + expected = [ + Decimal("10.0000"), + Decimal("10.1000"), + Decimal("10.1100"), + Decimal("10.1110"), + Decimal("10.1111"), + Decimal("10.1111"), + ] expected.extend(expected) - for field_name in ['float_value', 'string_value']: + for field_name in ["float_value", "string_value"]: actual = list(Person.objects().scalar(field_name)) self.assertEqual(expected, actual) diff --git a/tests/fields/test_dict_field.py b/tests/fields/test_dict_field.py index ade02ccf..07bab85b 100644 --- a/tests/fields/test_dict_field.py +++ b/tests/fields/test_dict_field.py @@ -6,95 +6,92 @@ from tests.utils import MongoDBTestCase, get_as_pymongo class TestDictField(MongoDBTestCase): - def test_storage(self): class BlogPost(Document): info = DictField() BlogPost.drop_collection() - info = {'testkey': 'testvalue'} + info = {"testkey": "testvalue"} post = BlogPost(info=info).save() - self.assertEqual( - get_as_pymongo(post), - { - '_id': post.id, - 'info': info - } - ) + self.assertEqual(get_as_pymongo(post), {"_id": post.id, "info": info}) def test_general_things(self): """Ensure that dict types work as expected.""" + class BlogPost(Document): info = DictField() BlogPost.drop_collection() post = BlogPost() - post.info = 'my post' + post.info = "my post" self.assertRaises(ValidationError, post.validate) - post.info = ['test', 'test'] + post.info = ["test", "test"] self.assertRaises(ValidationError, post.validate) - post.info = {'$title': 'test'} + post.info = {"$title": "test"} self.assertRaises(ValidationError, post.validate) - post.info = {'nested': {'$title': 'test'}} + post.info = {"nested": {"$title": "test"}} self.assertRaises(ValidationError, post.validate) - post.info = {'the.title': 'test'} + post.info = {"the.title": "test"} self.assertRaises(ValidationError, post.validate) - post.info = {'nested': {'the.title': 'test'}} + post.info = {"nested": {"the.title": "test"}} self.assertRaises(ValidationError, post.validate) - post.info = {1: 'test'} + post.info = {1: "test"} self.assertRaises(ValidationError, post.validate) - post.info = {'title': 'test'} + post.info = {"title": "test"} post.save() post = BlogPost() - post.info = {'title': 'dollar_sign', 'details': {'te$t': 'test'}} + post.info = {"title": "dollar_sign", "details": {"te$t": "test"}} post.save() post = BlogPost() - post.info = {'details': {'test': 'test'}} + post.info = {"details": {"test": "test"}} post.save() post = BlogPost() - post.info = {'details': {'test': 3}} + post.info = {"details": {"test": 3}} post.save() self.assertEqual(BlogPost.objects.count(), 4) + self.assertEqual(BlogPost.objects.filter(info__title__exact="test").count(), 1) self.assertEqual( - BlogPost.objects.filter(info__title__exact='test').count(), 1) - self.assertEqual( - BlogPost.objects.filter(info__details__test__exact='test').count(), 1) + BlogPost.objects.filter(info__details__test__exact="test").count(), 1 + ) - post = BlogPost.objects.filter(info__title__exact='dollar_sign').first() - self.assertIn('te$t', post['info']['details']) + post = BlogPost.objects.filter(info__title__exact="dollar_sign").first() + self.assertIn("te$t", post["info"]["details"]) # Confirm handles non strings or non existing keys self.assertEqual( - BlogPost.objects.filter(info__details__test__exact=5).count(), 0) + BlogPost.objects.filter(info__details__test__exact=5).count(), 0 + ) self.assertEqual( - BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) + BlogPost.objects.filter(info__made_up__test__exact="test").count(), 0 + ) - post = BlogPost.objects.create(info={'title': 'original'}) - post.info.update({'title': 'updated'}) + post = BlogPost.objects.create(info={"title": "original"}) + post.info.update({"title": "updated"}) post.save() post.reload() - self.assertEqual('updated', post.info['title']) + self.assertEqual("updated", post.info["title"]) - post.info.setdefault('authors', []) + post.info.setdefault("authors", []) post.save() post.reload() - self.assertEqual([], post.info['authors']) + self.assertEqual([], post.info["authors"]) def test_dictfield_dump_document(self): """Ensure a DictField can handle another document's dump.""" + class Doc(Document): field = DictField() @@ -106,51 +103,62 @@ class TestDictField(MongoDBTestCase): id = IntField(primary_key=True, default=1) recursive = DictField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class ToEmbedChild(ToEmbedParent): pass to_embed_recursive = ToEmbed(id=1).save() to_embed = ToEmbed( - id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save() + id=2, recursive=to_embed_recursive.to_mongo().to_dict() + ).save() doc = Doc(field=to_embed.to_mongo().to_dict()) doc.save() self.assertIsInstance(doc.field, dict) - self.assertEqual(doc.field, {'_id': 2, 'recursive': {'_id': 1, 'recursive': {}}}) + self.assertEqual( + doc.field, {"_id": 2, "recursive": {"_id": 1, "recursive": {}}} + ) # Same thing with a Document with a _cls field to_embed_recursive = ToEmbedChild(id=1).save() to_embed_child = ToEmbedChild( - id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save() + id=2, recursive=to_embed_recursive.to_mongo().to_dict() + ).save() doc = Doc(field=to_embed_child.to_mongo().to_dict()) doc.save() self.assertIsInstance(doc.field, dict) expected = { - '_id': 2, '_cls': 'ToEmbedParent.ToEmbedChild', - 'recursive': {'_id': 1, '_cls': 'ToEmbedParent.ToEmbedChild', 'recursive': {}} + "_id": 2, + "_cls": "ToEmbedParent.ToEmbedChild", + "recursive": { + "_id": 1, + "_cls": "ToEmbedParent.ToEmbedChild", + "recursive": {}, + }, } self.assertEqual(doc.field, expected) def test_dictfield_strict(self): """Ensure that dict field handles validation if provided a strict field type.""" + class Simple(Document): mapping = DictField(field=IntField()) Simple.drop_collection() e = Simple() - e.mapping['someint'] = 1 + e.mapping["someint"] = 1 e.save() # try creating an invalid mapping with self.assertRaises(ValidationError): - e.mapping['somestring'] = "abc" + e.mapping["somestring"] = "abc" e.save() def test_dictfield_complex(self): """Ensure that the dict field can handle the complex types.""" + class SettingBase(EmbeddedDocument): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class StringSetting(SettingBase): value = StringField() @@ -164,70 +172,72 @@ class TestDictField(MongoDBTestCase): Simple.drop_collection() e = Simple() - e.mapping['somestring'] = StringSetting(value='foo') - e.mapping['someint'] = IntegerSetting(value=42) - e.mapping['nested_dict'] = {'number': 1, 'string': 'Hi!', - 'float': 1.001, - 'complex': IntegerSetting(value=42), - 'list': [IntegerSetting(value=42), - StringSetting(value='foo')]} + e.mapping["somestring"] = StringSetting(value="foo") + e.mapping["someint"] = IntegerSetting(value=42) + e.mapping["nested_dict"] = { + "number": 1, + "string": "Hi!", + "float": 1.001, + "complex": IntegerSetting(value=42), + "list": [IntegerSetting(value=42), StringSetting(value="foo")], + } e.save() e2 = Simple.objects.get(id=e.id) - self.assertIsInstance(e2.mapping['somestring'], StringSetting) - self.assertIsInstance(e2.mapping['someint'], IntegerSetting) + self.assertIsInstance(e2.mapping["somestring"], StringSetting) + self.assertIsInstance(e2.mapping["someint"], IntegerSetting) # Test querying + self.assertEqual(Simple.objects.filter(mapping__someint__value=42).count(), 1) self.assertEqual( - Simple.objects.filter(mapping__someint__value=42).count(), 1) + Simple.objects.filter(mapping__nested_dict__number=1).count(), 1 + ) self.assertEqual( - Simple.objects.filter(mapping__nested_dict__number=1).count(), 1) + Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1 + ) self.assertEqual( - Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1) + Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1 + ) self.assertEqual( - Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1) - self.assertEqual( - Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1) + Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count(), 1 + ) # Confirm can update + Simple.objects().update(set__mapping={"someint": IntegerSetting(value=10)}) Simple.objects().update( - set__mapping={"someint": IntegerSetting(value=10)}) - Simple.objects().update( - set__mapping__nested_dict__list__1=StringSetting(value='Boo')) + set__mapping__nested_dict__list__1=StringSetting(value="Boo") + ) self.assertEqual( - Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0) + Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count(), 0 + ) self.assertEqual( - Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1) + Simple.objects.filter(mapping__nested_dict__list__1__value="Boo").count(), 1 + ) def test_push_dict(self): class MyModel(Document): events = ListField(DictField()) - doc = MyModel(events=[{'a': 1}]).save() + doc = MyModel(events=[{"a": 1}]).save() raw_doc = get_as_pymongo(doc) - expected_raw_doc = { - '_id': doc.id, - 'events': [{'a': 1}] - } + expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}]} self.assertEqual(raw_doc, expected_raw_doc) MyModel.objects(id=doc.id).update(push__events={}) raw_doc = get_as_pymongo(doc) - expected_raw_doc = { - '_id': doc.id, - 'events': [{'a': 1}, {}] - } + expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}, {}]} self.assertEqual(raw_doc, expected_raw_doc) def test_ensure_unique_default_instances(self): """Ensure that every field has it's own unique default instance.""" + class D(Document): data = DictField() data2 = DictField(default=lambda: {}) d1 = D() - d1.data['foo'] = 'bar' - d1.data2['foo'] = 'bar' + d1.data["foo"] = "bar" + d1.data2["foo"] = "bar" d2 = D() self.assertEqual(d2.data, {}) self.assertEqual(d2.data2, {}) @@ -255,22 +265,25 @@ class TestDictField(MongoDBTestCase): class Embedded(EmbeddedDocument): name = StringField() - embed = Embedded(name='garbage') + embed = Embedded(name="garbage") doc = DictFieldTest(dictionary=embed) with self.assertRaises(ValidationError) as ctx_err: doc.validate() self.assertIn("'dictionary'", str(ctx_err.exception)) - self.assertIn('Only dictionaries may be used in a DictField', str(ctx_err.exception)) + self.assertIn( + "Only dictionaries may be used in a DictField", str(ctx_err.exception) + ) def test_atomic_update_dict_field(self): """Ensure that the entire DictField can be atomically updated.""" + class Simple(Document): mapping = DictField(field=ListField(IntField(required=True))) Simple.drop_collection() e = Simple() - e.mapping['someints'] = [1, 2] + e.mapping["someints"] = [1, 2] e.save() e.update(set__mapping={"ints": [3, 4]}) e.reload() @@ -279,7 +292,7 @@ class TestDictField(MongoDBTestCase): # try creating an invalid mapping with self.assertRaises(ValueError): - e.update(set__mapping={"somestrings": ["foo", "bar", ]}) + e.update(set__mapping={"somestrings": ["foo", "bar"]}) def test_dictfield_with_referencefield_complex_nesting_cases(self): """Ensure complex nesting inside DictField handles dereferencing of ReferenceField(dbref=True | False)""" @@ -296,29 +309,33 @@ class TestDictField(MongoDBTestCase): mapping5 = DictField(DictField(field=ReferenceField(Doc, dbref=False))) mapping6 = DictField(ListField(DictField(ReferenceField(Doc, dbref=True)))) mapping7 = DictField(ListField(DictField(ReferenceField(Doc, dbref=False)))) - mapping8 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=True))))) - mapping9 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=False))))) + mapping8 = DictField( + ListField(DictField(ListField(ReferenceField(Doc, dbref=True)))) + ) + mapping9 = DictField( + ListField(DictField(ListField(ReferenceField(Doc, dbref=False)))) + ) Doc.drop_collection() Simple.drop_collection() - d = Doc(s='aa').save() + d = Doc(s="aa").save() e = Simple() - e.mapping0['someint'] = e.mapping1['someint'] = d - e.mapping2['someint'] = e.mapping3['someint'] = [d] - e.mapping4['someint'] = e.mapping5['someint'] = {'d': d} - e.mapping6['someint'] = e.mapping7['someint'] = [{'d': d}] - e.mapping8['someint'] = e.mapping9['someint'] = [{'d': [d]}] + e.mapping0["someint"] = e.mapping1["someint"] = d + e.mapping2["someint"] = e.mapping3["someint"] = [d] + e.mapping4["someint"] = e.mapping5["someint"] = {"d": d} + e.mapping6["someint"] = e.mapping7["someint"] = [{"d": d}] + e.mapping8["someint"] = e.mapping9["someint"] = [{"d": [d]}] e.save() s = Simple.objects.first() - self.assertIsInstance(s.mapping0['someint'], Doc) - self.assertIsInstance(s.mapping1['someint'], Doc) - self.assertIsInstance(s.mapping2['someint'][0], Doc) - self.assertIsInstance(s.mapping3['someint'][0], Doc) - self.assertIsInstance(s.mapping4['someint']['d'], Doc) - self.assertIsInstance(s.mapping5['someint']['d'], Doc) - self.assertIsInstance(s.mapping6['someint'][0]['d'], Doc) - self.assertIsInstance(s.mapping7['someint'][0]['d'], Doc) - self.assertIsInstance(s.mapping8['someint'][0]['d'][0], Doc) - self.assertIsInstance(s.mapping9['someint'][0]['d'][0], Doc) + self.assertIsInstance(s.mapping0["someint"], Doc) + self.assertIsInstance(s.mapping1["someint"], Doc) + self.assertIsInstance(s.mapping2["someint"][0], Doc) + self.assertIsInstance(s.mapping3["someint"][0], Doc) + self.assertIsInstance(s.mapping4["someint"]["d"], Doc) + self.assertIsInstance(s.mapping5["someint"]["d"], Doc) + self.assertIsInstance(s.mapping6["someint"][0]["d"], Doc) + self.assertIsInstance(s.mapping7["someint"][0]["d"], Doc) + self.assertIsInstance(s.mapping8["someint"][0]["d"][0], Doc) + self.assertIsInstance(s.mapping9["someint"][0]["d"][0], Doc) diff --git a/tests/fields/test_email_field.py b/tests/fields/test_email_field.py index 3ce49d62..06ec5151 100644 --- a/tests/fields/test_email_field.py +++ b/tests/fields/test_email_field.py @@ -12,28 +12,29 @@ class TestEmailField(MongoDBTestCase): class User(Document): email = EmailField() - user = User(email='ross@example.com') + user = User(email="ross@example.com") user.validate() - user = User(email='ross@example.co.uk') + user = User(email="ross@example.co.uk") user.validate() - user = User(email=('Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5S' - 'aJIazqqWkm7.net')) + user = User( + email=("Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5SaJIazqqWkm7.net") + ) user.validate() - user = User(email='new-tld@example.technology') + user = User(email="new-tld@example.technology") user.validate() - user = User(email='ross@example.com.') + user = User(email="ross@example.com.") self.assertRaises(ValidationError, user.validate) # unicode domain - user = User(email=u'user@пример.рф') + user = User(email=u"user@пример.рф") user.validate() # invalid unicode domain - user = User(email=u'user@пример') + user = User(email=u"user@пример") self.assertRaises(ValidationError, user.validate) # invalid data type @@ -44,20 +45,20 @@ class TestEmailField(MongoDBTestCase): # Don't run this test on pypy3, which doesn't support unicode regex: # https://bitbucket.org/pypy/pypy/issues/1821/regular-expression-doesnt-find-unicode if sys.version_info[:2] == (3, 2): - raise SkipTest('unicode email addresses are not supported on PyPy 3') + raise SkipTest("unicode email addresses are not supported on PyPy 3") class User(Document): email = EmailField() # unicode user shouldn't validate by default... - user = User(email=u'Dörte@Sörensen.example.com') + user = User(email=u"Dörte@Sörensen.example.com") self.assertRaises(ValidationError, user.validate) # ...but it should be fine with allow_utf8_user set to True class User(Document): email = EmailField(allow_utf8_user=True) - user = User(email=u'Dörte@Sörensen.example.com') + user = User(email=u"Dörte@Sörensen.example.com") user.validate() def test_email_field_domain_whitelist(self): @@ -65,22 +66,22 @@ class TestEmailField(MongoDBTestCase): email = EmailField() # localhost domain shouldn't validate by default... - user = User(email='me@localhost') + user = User(email="me@localhost") self.assertRaises(ValidationError, user.validate) # ...but it should be fine if it's whitelisted class User(Document): - email = EmailField(domain_whitelist=['localhost']) + email = EmailField(domain_whitelist=["localhost"]) - user = User(email='me@localhost') + user = User(email="me@localhost") user.validate() def test_email_domain_validation_fails_if_invalid_idn(self): class User(Document): email = EmailField() - invalid_idn = '.google.com' - user = User(email='me@%s' % invalid_idn) + invalid_idn = ".google.com" + user = User(email="me@%s" % invalid_idn) with self.assertRaises(ValidationError) as ctx_err: user.validate() self.assertIn("domain failed IDN encoding", str(ctx_err.exception)) @@ -89,9 +90,9 @@ class TestEmailField(MongoDBTestCase): class User(Document): email = EmailField() - valid_ipv4 = 'email@[127.0.0.1]' - valid_ipv6 = 'email@[2001:dB8::1]' - invalid_ip = 'email@[324.0.0.1]' + valid_ipv4 = "email@[127.0.0.1]" + valid_ipv6 = "email@[2001:dB8::1]" + invalid_ip = "email@[324.0.0.1]" # IP address as a domain shouldn't validate by default... user = User(email=valid_ipv4) @@ -119,12 +120,12 @@ class TestEmailField(MongoDBTestCase): def test_email_field_honors_regex(self): class User(Document): - email = EmailField(regex=r'\w+@example.com') + email = EmailField(regex=r"\w+@example.com") # Fails regex validation - user = User(email='me@foo.com') + user = User(email="me@foo.com") self.assertRaises(ValidationError, user.validate) # Passes regex validation - user = User(email='me@example.com') + user = User(email="me@example.com") self.assertIsNone(user.validate()) diff --git a/tests/fields/test_embedded_document_field.py b/tests/fields/test_embedded_document_field.py index a262d054..6b420781 100644 --- a/tests/fields/test_embedded_document_field.py +++ b/tests/fields/test_embedded_document_field.py @@ -1,7 +1,18 @@ # -*- coding: utf-8 -*- -from mongoengine import Document, StringField, ValidationError, EmbeddedDocument, EmbeddedDocumentField, \ - InvalidQueryError, LookUpError, IntField, GenericEmbeddedDocumentField, ListField, EmbeddedDocumentListField, \ - ReferenceField +from mongoengine import ( + Document, + StringField, + ValidationError, + EmbeddedDocument, + EmbeddedDocumentField, + InvalidQueryError, + LookUpError, + IntField, + GenericEmbeddedDocumentField, + ListField, + EmbeddedDocumentListField, + ReferenceField, +) from tests.utils import MongoDBTestCase @@ -14,22 +25,24 @@ class TestEmbeddedDocumentField(MongoDBTestCase): field = EmbeddedDocumentField(MyDoc) self.assertEqual(field.document_type_obj, MyDoc) - field2 = EmbeddedDocumentField('MyDoc') - self.assertEqual(field2.document_type_obj, 'MyDoc') + field2 = EmbeddedDocumentField("MyDoc") + self.assertEqual(field2.document_type_obj, "MyDoc") def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self): with self.assertRaises(ValidationError): EmbeddedDocumentField(dict) def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self): - class MyDoc(Document): name = StringField() - emb = EmbeddedDocumentField('MyDoc') + emb = EmbeddedDocumentField("MyDoc") with self.assertRaises(ValidationError) as ctx: emb.document_type - self.assertIn('Invalid embedded document class provided to an EmbeddedDocumentField', str(ctx.exception)) + self.assertIn( + "Invalid embedded document class provided to an EmbeddedDocumentField", + str(ctx.exception), + ) def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self): # Relates to #1661 @@ -37,12 +50,14 @@ class TestEmbeddedDocumentField(MongoDBTestCase): name = StringField() with self.assertRaises(ValidationError): + class MyFailingDoc(Document): emb = EmbeddedDocumentField(MyDoc) with self.assertRaises(ValidationError): + class MyFailingdoc2(Document): - emb = EmbeddedDocumentField('MyDoc') + emb = EmbeddedDocumentField("MyDoc") def test_query_embedded_document_attribute(self): class AdminSettings(EmbeddedDocument): @@ -55,34 +70,31 @@ class TestEmbeddedDocumentField(MongoDBTestCase): Person.drop_collection() - p = Person( - settings=AdminSettings(foo1='bar1', foo2='bar2'), - name='John', - ).save() + p = Person(settings=AdminSettings(foo1="bar1", foo2="bar2"), name="John").save() # Test non exiting attribute with self.assertRaises(InvalidQueryError) as ctx_err: - Person.objects(settings__notexist='bar').first() + Person.objects(settings__notexist="bar").first() self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') with self.assertRaises(LookUpError): - Person.objects.only('settings.notexist') + Person.objects.only("settings.notexist") # Test existing attribute - self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p.id) - only_p = Person.objects.only('settings.foo1').first() + self.assertEqual(Person.objects(settings__foo1="bar1").first().id, p.id) + only_p = Person.objects.only("settings.foo1").first() self.assertEqual(only_p.settings.foo1, p.settings.foo1) self.assertIsNone(only_p.settings.foo2) self.assertIsNone(only_p.name) - exclude_p = Person.objects.exclude('settings.foo1').first() + exclude_p = Person.objects.exclude("settings.foo1").first() self.assertIsNone(exclude_p.settings.foo1) self.assertEqual(exclude_p.settings.foo2, p.settings.foo2) self.assertEqual(exclude_p.name, p.name) def test_query_embedded_document_attribute_with_inheritance(self): class BaseSettings(EmbeddedDocument): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} base_foo = StringField() class AdminSettings(BaseSettings): @@ -93,26 +105,26 @@ class TestEmbeddedDocumentField(MongoDBTestCase): Person.drop_collection() - p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo')) + p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo")) p.save() # Test non exiting attribute with self.assertRaises(InvalidQueryError) as ctx_err: - self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id) + self.assertEqual(Person.objects(settings__notexist="bar").first().id, p.id) self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') # Test existing attribute - self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id) - self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id) + self.assertEqual(Person.objects(settings__base_foo="basefoo").first().id, p.id) + self.assertEqual(Person.objects(settings__sub_foo="subfoo").first().id, p.id) - only_p = Person.objects.only('settings.base_foo', 'settings._cls').first() - self.assertEqual(only_p.settings.base_foo, 'basefoo') + only_p = Person.objects.only("settings.base_foo", "settings._cls").first() + self.assertEqual(only_p.settings.base_foo, "basefoo") self.assertIsNone(only_p.settings.sub_foo) def test_query_list_embedded_document_with_inheritance(self): class Post(EmbeddedDocument): title = StringField(max_length=120, required=True) - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class TextPost(Post): content = StringField() @@ -123,8 +135,8 @@ class TestEmbeddedDocumentField(MongoDBTestCase): class Record(Document): posts = ListField(EmbeddedDocumentField(Post)) - record_movie = Record(posts=[MoviePost(author='John', title='foo')]).save() - record_text = Record(posts=[TextPost(content='a', title='foo')]).save() + record_movie = Record(posts=[MoviePost(author="John", title="foo")]).save() + record_text = Record(posts=[TextPost(content="a", title="foo")]).save() records = list(Record.objects(posts__author=record_movie.posts[0].author)) self.assertEqual(len(records), 1) @@ -134,11 +146,10 @@ class TestEmbeddedDocumentField(MongoDBTestCase): self.assertEqual(len(records), 1) self.assertEqual(records[0].id, record_text.id) - self.assertEqual(Record.objects(posts__title='foo').count(), 2) + self.assertEqual(Record.objects(posts__title="foo").count(), 2) class TestGenericEmbeddedDocumentField(MongoDBTestCase): - def test_generic_embedded_document(self): class Car(EmbeddedDocument): name = StringField() @@ -153,8 +164,8 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): Person.drop_collection() - person = Person(name='Test User') - person.like = Car(name='Fiat') + person = Person(name="Test User") + person.like = Car(name="Fiat") person.save() person = Person.objects.first() @@ -168,6 +179,7 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): def test_generic_embedded_document_choices(self): """Ensure you can limit GenericEmbeddedDocument choices.""" + class Car(EmbeddedDocument): name = StringField() @@ -181,8 +193,8 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): Person.drop_collection() - person = Person(name='Test User') - person.like = Car(name='Fiat') + person = Person(name="Test User") + person.like = Car(name="Fiat") self.assertRaises(ValidationError, person.validate) person.like = Dish(food="arroz", number=15) @@ -195,6 +207,7 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): """Ensure you can limit GenericEmbeddedDocument choices inside a list field. """ + class Car(EmbeddedDocument): name = StringField() @@ -208,8 +221,8 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): Person.drop_collection() - person = Person(name='Test User') - person.likes = [Car(name='Fiat')] + person = Person(name="Test User") + person.likes = [Car(name="Fiat")] self.assertRaises(ValidationError, person.validate) person.likes = [Dish(food="arroz", number=15)] @@ -222,25 +235,23 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): """ Ensure fields with document choices validate given a valid choice. """ + class UserComments(EmbeddedDocument): author = StringField() message = StringField() class BlogPost(Document): - comments = ListField( - GenericEmbeddedDocumentField(choices=(UserComments,)) - ) + comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,))) # Ensure Validation Passes - BlogPost(comments=[ - UserComments(author='user2', message='message2'), - ]).save() + BlogPost(comments=[UserComments(author="user2", message="message2")]).save() def test_choices_validation_documents_invalid(self): """ Ensure fields with document choices validate given an invalid choice. This should throw a ValidationError exception. """ + class UserComments(EmbeddedDocument): author = StringField() message = StringField() @@ -250,31 +261,28 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): message = StringField() class BlogPost(Document): - comments = ListField( - GenericEmbeddedDocumentField(choices=(UserComments,)) - ) + comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,))) # Single Entry Failure - post = BlogPost(comments=[ - ModeratorComments(author='mod1', message='message1'), - ]) + post = BlogPost(comments=[ModeratorComments(author="mod1", message="message1")]) self.assertRaises(ValidationError, post.save) # Mixed Entry Failure - post = BlogPost(comments=[ - ModeratorComments(author='mod1', message='message1'), - UserComments(author='user2', message='message2'), - ]) + post = BlogPost( + comments=[ + ModeratorComments(author="mod1", message="message1"), + UserComments(author="user2", message="message2"), + ] + ) self.assertRaises(ValidationError, post.save) def test_choices_validation_documents_inheritance(self): """ Ensure fields with document choices validate given subclass of choice. """ + class Comments(EmbeddedDocument): - meta = { - 'abstract': True - } + meta = {"abstract": True} author = StringField() message = StringField() @@ -282,14 +290,10 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): pass class BlogPost(Document): - comments = ListField( - GenericEmbeddedDocumentField(choices=(Comments,)) - ) + comments = ListField(GenericEmbeddedDocumentField(choices=(Comments,))) # Save Valid EmbeddedDocument Type - BlogPost(comments=[ - UserComments(author='user2', message='message2'), - ]).save() + BlogPost(comments=[UserComments(author="user2", message="message2")]).save() def test_query_generic_embedded_document_attribute(self): class AdminSettings(EmbeddedDocument): @@ -299,28 +303,30 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): foo2 = StringField() class Person(Document): - settings = GenericEmbeddedDocumentField(choices=(AdminSettings, NonAdminSettings)) + settings = GenericEmbeddedDocumentField( + choices=(AdminSettings, NonAdminSettings) + ) Person.drop_collection() - p1 = Person(settings=AdminSettings(foo1='bar1')).save() - p2 = Person(settings=NonAdminSettings(foo2='bar2')).save() + p1 = Person(settings=AdminSettings(foo1="bar1")).save() + p2 = Person(settings=NonAdminSettings(foo2="bar2")).save() # Test non exiting attribute with self.assertRaises(InvalidQueryError) as ctx_err: - Person.objects(settings__notexist='bar').first() + Person.objects(settings__notexist="bar").first() self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') with self.assertRaises(LookUpError): - Person.objects.only('settings.notexist') + Person.objects.only("settings.notexist") # Test existing attribute - self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p1.id) - self.assertEqual(Person.objects(settings__foo2='bar2').first().id, p2.id) + self.assertEqual(Person.objects(settings__foo1="bar1").first().id, p1.id) + self.assertEqual(Person.objects(settings__foo2="bar2").first().id, p2.id) def test_query_generic_embedded_document_attribute_with_inheritance(self): class BaseSettings(EmbeddedDocument): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} base_foo = StringField() class AdminSettings(BaseSettings): @@ -331,14 +337,14 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): Person.drop_collection() - p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo')) + p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo")) p.save() # Test non exiting attribute with self.assertRaises(InvalidQueryError) as ctx_err: - self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id) + self.assertEqual(Person.objects(settings__notexist="bar").first().id, p.id) self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') # Test existing attribute - self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id) - self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id) + self.assertEqual(Person.objects(settings__base_foo="basefoo").first().id, p.id) + self.assertEqual(Person.objects(settings__sub_foo="subfoo").first().id, p.id) diff --git a/tests/fields/test_float_field.py b/tests/fields/test_float_field.py index fa92cf20..9f357ce5 100644 --- a/tests/fields/test_float_field.py +++ b/tests/fields/test_float_field.py @@ -7,7 +7,6 @@ from tests.utils import MongoDBTestCase class TestFloatField(MongoDBTestCase): - def test_float_ne_operator(self): class TestDocument(Document): float_fld = FloatField() @@ -23,6 +22,7 @@ class TestFloatField(MongoDBTestCase): def test_validation(self): """Ensure that invalid values cannot be assigned to float fields. """ + class Person(Document): height = FloatField(min_value=0.1, max_value=3.5) @@ -33,7 +33,7 @@ class TestFloatField(MongoDBTestCase): person.height = 1.89 person.validate() - person.height = '2.0' + person.height = "2.0" self.assertRaises(ValidationError, person.validate) person.height = 0.01 @@ -42,7 +42,7 @@ class TestFloatField(MongoDBTestCase): person.height = 4.0 self.assertRaises(ValidationError, person.validate) - person_2 = Person(height='something invalid') + person_2 = Person(height="something invalid") self.assertRaises(ValidationError, person_2.validate) big_person = BigPerson() diff --git a/tests/fields/test_int_field.py b/tests/fields/test_int_field.py index 1b1f7ad9..b7db0416 100644 --- a/tests/fields/test_int_field.py +++ b/tests/fields/test_int_field.py @@ -5,10 +5,10 @@ from tests.utils import MongoDBTestCase class TestIntField(MongoDBTestCase): - def test_int_validation(self): """Ensure that invalid values cannot be assigned to int fields. """ + class Person(Document): age = IntField(min_value=0, max_value=110) @@ -26,7 +26,7 @@ class TestIntField(MongoDBTestCase): self.assertRaises(ValidationError, person.validate) person.age = 120 self.assertRaises(ValidationError, person.validate) - person.age = 'ten' + person.age = "ten" self.assertRaises(ValidationError, person.validate) def test_ne_operator(self): diff --git a/tests/fields/test_lazy_reference_field.py b/tests/fields/test_lazy_reference_field.py index 1d6e6e79..2a686d7f 100644 --- a/tests/fields/test_lazy_reference_field.py +++ b/tests/fields/test_lazy_reference_field.py @@ -25,7 +25,7 @@ class TestLazyReferenceField(MongoDBTestCase): animal = Animal() oc = Ocurrence(animal=animal) - self.assertIn('LazyReference', repr(oc.animal)) + self.assertIn("LazyReference", repr(oc.animal)) def test___getattr___unknown_attr_raises_attribute_error(self): class Animal(Document): @@ -93,7 +93,7 @@ class TestLazyReferenceField(MongoDBTestCase): def test_lazy_reference_set(self): class Animal(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} name = StringField() tag = StringField() @@ -109,18 +109,17 @@ class TestLazyReferenceField(MongoDBTestCase): nick = StringField() animal = Animal(name="Leopard", tag="heavy").save() - sub_animal = SubAnimal(nick='doggo', name='dog').save() + sub_animal = SubAnimal(nick="doggo", name="dog").save() for ref in ( - animal, - animal.pk, - DBRef(animal._get_collection_name(), animal.pk), - LazyReference(Animal, animal.pk), - - sub_animal, - sub_animal.pk, - DBRef(sub_animal._get_collection_name(), sub_animal.pk), - LazyReference(SubAnimal, sub_animal.pk), - ): + animal, + animal.pk, + DBRef(animal._get_collection_name(), animal.pk), + LazyReference(Animal, animal.pk), + sub_animal, + sub_animal.pk, + DBRef(sub_animal._get_collection_name(), sub_animal.pk), + LazyReference(SubAnimal, sub_animal.pk), + ): p = Ocurrence(person="test", animal=ref).save() p.reload() self.assertIsInstance(p.animal, LazyReference) @@ -144,12 +143,12 @@ class TestLazyReferenceField(MongoDBTestCase): animal = Animal(name="Leopard", tag="heavy").save() baddoc = BadDoc().save() for bad in ( - 42, - 'foo', - baddoc, - DBRef(baddoc._get_collection_name(), animal.pk), - LazyReference(BadDoc, animal.pk) - ): + 42, + "foo", + baddoc, + DBRef(baddoc._get_collection_name(), animal.pk), + LazyReference(BadDoc, animal.pk), + ): with self.assertRaises(ValidationError): p = Ocurrence(person="test", animal=bad).save() @@ -157,6 +156,7 @@ class TestLazyReferenceField(MongoDBTestCase): """Ensure that LazyReferenceFields can be queried using objects and values of the type of the primary key of the referenced object. """ + class Member(Document): user_num = IntField(primary_key=True) @@ -172,10 +172,10 @@ class TestLazyReferenceField(MongoDBTestCase): m2 = Member(user_num=2) m2.save() - post1 = BlogPost(title='post 1', author=m1) + post1 = BlogPost(title="post 1", author=m1) post1.save() - post2 = BlogPost(title='post 2', author=m2) + post2 = BlogPost(title="post 2", author=m2) post2.save() post = BlogPost.objects(author=m1).first() @@ -192,6 +192,7 @@ class TestLazyReferenceField(MongoDBTestCase): """Ensure that LazyReferenceFields can be queried using objects and values of the type of the primary key of the referenced object. """ + class Member(Document): user_num = IntField(primary_key=True) @@ -207,10 +208,10 @@ class TestLazyReferenceField(MongoDBTestCase): m2 = Member(user_num=2) m2.save() - post1 = BlogPost(title='post 1', author=m1) + post1 = BlogPost(title="post 1", author=m1) post1.save() - post2 = BlogPost(title='post 2', author=m2) + post2 = BlogPost(title="post 2", author=m2) post2.save() post = BlogPost.objects(author=m1).first() @@ -240,19 +241,19 @@ class TestLazyReferenceField(MongoDBTestCase): p = Ocurrence.objects.get() self.assertIsInstance(p.animal, LazyReference) with self.assertRaises(KeyError): - p.animal['name'] + p.animal["name"] with self.assertRaises(AttributeError): p.animal.name self.assertEqual(p.animal.pk, animal.pk) self.assertEqual(p.animal_passthrough.name, "Leopard") - self.assertEqual(p.animal_passthrough['name'], "Leopard") + self.assertEqual(p.animal_passthrough["name"], "Leopard") # Should not be able to access referenced document's methods with self.assertRaises(AttributeError): p.animal.save with self.assertRaises(KeyError): - p.animal['save'] + p.animal["save"] def test_lazy_reference_not_set(self): class Animal(Document): @@ -266,7 +267,7 @@ class TestLazyReferenceField(MongoDBTestCase): Animal.drop_collection() Ocurrence.drop_collection() - Ocurrence(person='foo').save() + Ocurrence(person="foo").save() p = Ocurrence.objects.get() self.assertIs(p.animal, None) @@ -303,8 +304,8 @@ class TestLazyReferenceField(MongoDBTestCase): Animal.drop_collection() Ocurrence.drop_collection() - animal1 = Animal(name='doggo').save() - animal2 = Animal(name='cheeta').save() + animal1 = Animal(name="doggo").save() + animal2 = Animal(name="cheeta").save() def check_fields_type(occ): self.assertIsInstance(occ.direct, LazyReference) @@ -316,8 +317,8 @@ class TestLazyReferenceField(MongoDBTestCase): occ = Ocurrence( in_list=[animal1, animal2], - in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, - direct=animal1 + in_embedded={"in_list": [animal1, animal2], "direct": animal1}, + direct=animal1, ).save() check_fields_type(occ) occ.reload() @@ -403,7 +404,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): def test_generic_lazy_reference_set(self): class Animal(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} name = StringField() tag = StringField() @@ -419,16 +420,18 @@ class TestGenericLazyReferenceField(MongoDBTestCase): nick = StringField() animal = Animal(name="Leopard", tag="heavy").save() - sub_animal = SubAnimal(nick='doggo', name='dog').save() + sub_animal = SubAnimal(nick="doggo", name="dog").save() for ref in ( - animal, - LazyReference(Animal, animal.pk), - {'_cls': 'Animal', '_ref': DBRef(animal._get_collection_name(), animal.pk)}, - - sub_animal, - LazyReference(SubAnimal, sub_animal.pk), - {'_cls': 'SubAnimal', '_ref': DBRef(sub_animal._get_collection_name(), sub_animal.pk)}, - ): + animal, + LazyReference(Animal, animal.pk), + {"_cls": "Animal", "_ref": DBRef(animal._get_collection_name(), animal.pk)}, + sub_animal, + LazyReference(SubAnimal, sub_animal.pk), + { + "_cls": "SubAnimal", + "_ref": DBRef(sub_animal._get_collection_name(), sub_animal.pk), + }, + ): p = Ocurrence(person="test", animal=ref).save() p.reload() self.assertIsInstance(p.animal, (LazyReference, Document)) @@ -441,7 +444,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): class Ocurrence(Document): person = StringField() - animal = GenericLazyReferenceField(choices=['Animal']) + animal = GenericLazyReferenceField(choices=["Animal"]) Animal.drop_collection() Ocurrence.drop_collection() @@ -451,12 +454,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): animal = Animal(name="Leopard", tag="heavy").save() baddoc = BadDoc().save() - for bad in ( - 42, - 'foo', - baddoc, - LazyReference(BadDoc, animal.pk) - ): + for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)): with self.assertRaises(ValidationError): p = Ocurrence(person="test", animal=bad).save() @@ -476,10 +474,10 @@ class TestGenericLazyReferenceField(MongoDBTestCase): m2 = Member(user_num=2) m2.save() - post1 = BlogPost(title='post 1', author=m1) + post1 = BlogPost(title="post 1", author=m1) post1.save() - post2 = BlogPost(title='post 2', author=m2) + post2 = BlogPost(title="post 2", author=m2) post2.save() post = BlogPost.objects(author=m1).first() @@ -504,7 +502,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): Animal.drop_collection() Ocurrence.drop_collection() - Ocurrence(person='foo').save() + Ocurrence(person="foo").save() p = Ocurrence.objects.get() self.assertIs(p.animal, None) @@ -515,7 +513,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): class Ocurrence(Document): person = StringField() - animal = GenericLazyReferenceField('Animal') + animal = GenericLazyReferenceField("Animal") Animal.drop_collection() Ocurrence.drop_collection() @@ -542,8 +540,8 @@ class TestGenericLazyReferenceField(MongoDBTestCase): Animal.drop_collection() Ocurrence.drop_collection() - animal1 = Animal(name='doggo').save() - animal2 = Animal(name='cheeta').save() + animal1 = Animal(name="doggo").save() + animal2 = Animal(name="cheeta").save() def check_fields_type(occ): self.assertIsInstance(occ.direct, LazyReference) @@ -555,14 +553,20 @@ class TestGenericLazyReferenceField(MongoDBTestCase): occ = Ocurrence( in_list=[animal1, animal2], - in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, - direct=animal1 + in_embedded={"in_list": [animal1, animal2], "direct": animal1}, + direct=animal1, ).save() check_fields_type(occ) occ.reload() check_fields_type(occ) - animal1_ref = {'_cls': 'Animal', '_ref': DBRef(animal1._get_collection_name(), animal1.pk)} - animal2_ref = {'_cls': 'Animal', '_ref': DBRef(animal2._get_collection_name(), animal2.pk)} + animal1_ref = { + "_cls": "Animal", + "_ref": DBRef(animal1._get_collection_name(), animal1.pk), + } + animal2_ref = { + "_cls": "Animal", + "_ref": DBRef(animal2._get_collection_name(), animal2.pk), + } occ.direct = animal1_ref occ.in_list = [animal1_ref, animal2_ref] occ.in_embedded.direct = animal1_ref diff --git a/tests/fields/test_long_field.py b/tests/fields/test_long_field.py index 3f307809..ab86eccd 100644 --- a/tests/fields/test_long_field.py +++ b/tests/fields/test_long_field.py @@ -13,23 +13,26 @@ from tests.utils import MongoDBTestCase class TestLongField(MongoDBTestCase): - def test_long_field_is_considered_as_int64(self): """ Tests that long fields are stored as long in mongo, even if long value is small enough to be an int. """ + class TestLongFieldConsideredAsInt64(Document): some_long = LongField() doc = TestLongFieldConsideredAsInt64(some_long=42).save() db = get_db() - self.assertIsInstance(db.test_long_field_considered_as_int64.find()[0]['some_long'], Int64) + self.assertIsInstance( + db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64 + ) self.assertIsInstance(doc.some_long, six.integer_types) def test_long_validation(self): """Ensure that invalid values cannot be assigned to long fields. """ + class TestDocument(Document): value = LongField(min_value=0, max_value=110) @@ -41,7 +44,7 @@ class TestLongField(MongoDBTestCase): self.assertRaises(ValidationError, doc.validate) doc.value = 120 self.assertRaises(ValidationError, doc.validate) - doc.value = 'ten' + doc.value = "ten" self.assertRaises(ValidationError, doc.validate) def test_long_ne_operator(self): diff --git a/tests/fields/test_map_field.py b/tests/fields/test_map_field.py index cb27cfff..54f70aa1 100644 --- a/tests/fields/test_map_field.py +++ b/tests/fields/test_map_field.py @@ -7,23 +7,24 @@ from tests.utils import MongoDBTestCase class TestMapField(MongoDBTestCase): - def test_mapfield(self): """Ensure that the MapField handles the declared type.""" + class Simple(Document): mapping = MapField(IntField()) Simple.drop_collection() e = Simple() - e.mapping['someint'] = 1 + e.mapping["someint"] = 1 e.save() with self.assertRaises(ValidationError): - e.mapping['somestring'] = "abc" + e.mapping["somestring"] = "abc" e.save() with self.assertRaises(ValidationError): + class NoDeclaredType(Document): mapping = MapField() @@ -45,38 +46,37 @@ class TestMapField(MongoDBTestCase): Extensible.drop_collection() e = Extensible() - e.mapping['somestring'] = StringSetting(value='foo') - e.mapping['someint'] = IntegerSetting(value=42) + e.mapping["somestring"] = StringSetting(value="foo") + e.mapping["someint"] = IntegerSetting(value=42) e.save() e2 = Extensible.objects.get(id=e.id) - self.assertIsInstance(e2.mapping['somestring'], StringSetting) - self.assertIsInstance(e2.mapping['someint'], IntegerSetting) + self.assertIsInstance(e2.mapping["somestring"], StringSetting) + self.assertIsInstance(e2.mapping["someint"], IntegerSetting) with self.assertRaises(ValidationError): - e.mapping['someint'] = 123 + e.mapping["someint"] = 123 e.save() def test_embedded_mapfield_db_field(self): class Embedded(EmbeddedDocument): - number = IntField(default=0, db_field='i') + number = IntField(default=0, db_field="i") class Test(Document): - my_map = MapField(field=EmbeddedDocumentField(Embedded), - db_field='x') + my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field="x") Test.drop_collection() test = Test() - test.my_map['DICTIONARY_KEY'] = Embedded(number=1) + test.my_map["DICTIONARY_KEY"] = Embedded(number=1) test.save() Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1) test = Test.objects.get() - self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2) + self.assertEqual(test.my_map["DICTIONARY_KEY"].number, 2) doc = self.db.test.find_one() - self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2) + self.assertEqual(doc["x"]["DICTIONARY_KEY"]["i"], 2) def test_mapfield_numerical_index(self): """Ensure that MapField accept numeric strings as indexes.""" @@ -90,9 +90,9 @@ class TestMapField(MongoDBTestCase): Test.drop_collection() test = Test() - test.my_map['1'] = Embedded(name='test') + test.my_map["1"] = Embedded(name="test") test.save() - test.my_map['1'].name = 'test updated' + test.my_map["1"].name = "test updated" test.save() def test_map_field_lookup(self): @@ -110,15 +110,20 @@ class TestMapField(MongoDBTestCase): actions = MapField(EmbeddedDocumentField(Action)) Log.drop_collection() - Log(name="wilson", visited={'friends': datetime.datetime.now()}, - actions={'friends': Action(operation='drink', object='beer')}).save() + Log( + name="wilson", + visited={"friends": datetime.datetime.now()}, + actions={"friends": Action(operation="drink", object="beer")}, + ).save() - self.assertEqual(1, Log.objects( - visited__friends__exists=True).count()) + self.assertEqual(1, Log.objects(visited__friends__exists=True).count()) - self.assertEqual(1, Log.objects( - actions__friends__operation='drink', - actions__friends__object='beer').count()) + self.assertEqual( + 1, + Log.objects( + actions__friends__operation="drink", actions__friends__object="beer" + ).count(), + ) def test_map_field_unicode(self): class Info(EmbeddedDocument): @@ -130,15 +135,11 @@ class TestMapField(MongoDBTestCase): BlogPost.drop_collection() - tree = BlogPost(info_dict={ - u"éééé": { - 'description': u"VALUE: éééé" - } - }) + tree = BlogPost(info_dict={u"éééé": {"description": u"VALUE: éééé"}}) tree.save() self.assertEqual( BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description, - u"VALUE: éééé" + u"VALUE: éééé", ) diff --git a/tests/fields/test_reference_field.py b/tests/fields/test_reference_field.py index 5e1fc605..5fd053fe 100644 --- a/tests/fields/test_reference_field.py +++ b/tests/fields/test_reference_field.py @@ -26,15 +26,15 @@ class TestReferenceField(MongoDBTestCase): # with a document class name. self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument) - user = User(name='Test User') + user = User(name="Test User") # Ensure that the referenced object must have been saved - post1 = BlogPost(content='Chips and gravy taste good.') + post1 = BlogPost(content="Chips and gravy taste good.") post1.author = user self.assertRaises(ValidationError, post1.save) # Check that an invalid object type cannot be used - post2 = BlogPost(content='Chips and chilli taste good.') + post2 = BlogPost(content="Chips and chilli taste good.") post1.author = post2 self.assertRaises(ValidationError, post1.validate) @@ -59,7 +59,7 @@ class TestReferenceField(MongoDBTestCase): class Person(Document): name = StringField() - parent = ReferenceField('self') + parent = ReferenceField("self") Person.drop_collection() @@ -74,7 +74,7 @@ class TestReferenceField(MongoDBTestCase): class Person(Document): name = StringField() - parent = ReferenceField('self', dbref=True) + parent = ReferenceField("self", dbref=True) Person.drop_collection() @@ -82,8 +82,8 @@ class TestReferenceField(MongoDBTestCase): Person(name="Ross", parent=p1).save() self.assertEqual( - Person._get_collection().find_one({'name': 'Ross'})['parent'], - DBRef('person', p1.pk) + Person._get_collection().find_one({"name": "Ross"})["parent"], + DBRef("person", p1.pk), ) p = Person.objects.get(name="Ross") @@ -97,21 +97,17 @@ class TestReferenceField(MongoDBTestCase): class Person(Document): name = StringField() - parent = ReferenceField('self', dbref=False) + parent = ReferenceField("self", dbref=False) - p = Person( - name='Steve', - parent=DBRef('person', 'abcdefghijklmnop') + p = Person(name="Steve", parent=DBRef("person", "abcdefghijklmnop")) + self.assertEqual( + p.to_mongo(), SON([("name", u"Steve"), ("parent", "abcdefghijklmnop")]) ) - self.assertEqual(p.to_mongo(), SON([ - ('name', u'Steve'), - ('parent', 'abcdefghijklmnop') - ])) def test_objectid_reference_fields(self): class Person(Document): name = StringField() - parent = ReferenceField('self', dbref=False) + parent = ReferenceField("self", dbref=False) Person.drop_collection() @@ -119,8 +115,8 @@ class TestReferenceField(MongoDBTestCase): Person(name="Ross", parent=p1).save() col = Person._get_collection() - data = col.find_one({'name': 'Ross'}) - self.assertEqual(data['parent'], p1.pk) + data = col.find_one({"name": "Ross"}) + self.assertEqual(data["parent"], p1.pk) p = Person.objects.get(name="Ross") self.assertEqual(p.parent, p1) @@ -128,9 +124,10 @@ class TestReferenceField(MongoDBTestCase): def test_undefined_reference(self): """Ensure that ReferenceFields may reference undefined Documents. """ + class Product(Document): name = StringField() - company = ReferenceField('Company') + company = ReferenceField("Company") class Company(Document): name = StringField() @@ -138,12 +135,12 @@ class TestReferenceField(MongoDBTestCase): Product.drop_collection() Company.drop_collection() - ten_gen = Company(name='10gen') + ten_gen = Company(name="10gen") ten_gen.save() - mongodb = Product(name='MongoDB', company=ten_gen) + mongodb = Product(name="MongoDB", company=ten_gen) mongodb.save() - me = Product(name='MongoEngine') + me = Product(name="MongoEngine") me.save() obj = Product.objects(company=ten_gen).first() @@ -160,6 +157,7 @@ class TestReferenceField(MongoDBTestCase): """Ensure that ReferenceFields can be queried using objects and values of the type of the primary key of the referenced object. """ + class Member(Document): user_num = IntField(primary_key=True) @@ -175,10 +173,10 @@ class TestReferenceField(MongoDBTestCase): m2 = Member(user_num=2) m2.save() - post1 = BlogPost(title='post 1', author=m1) + post1 = BlogPost(title="post 1", author=m1) post1.save() - post2 = BlogPost(title='post 2', author=m2) + post2 = BlogPost(title="post 2", author=m2) post2.save() post = BlogPost.objects(author=m1).first() @@ -191,6 +189,7 @@ class TestReferenceField(MongoDBTestCase): """Ensure that ReferenceFields can be queried using objects and values of the type of the primary key of the referenced object. """ + class Member(Document): user_num = IntField(primary_key=True) @@ -206,10 +205,10 @@ class TestReferenceField(MongoDBTestCase): m2 = Member(user_num=2) m2.save() - post1 = BlogPost(title='post 1', author=m1) + post1 = BlogPost(title="post 1", author=m1) post1.save() - post2 = BlogPost(title='post 2', author=m2) + post2 = BlogPost(title="post 2", author=m2) post2.save() post = BlogPost.objects(author=m1).first() diff --git a/tests/fields/test_sequence_field.py b/tests/fields/test_sequence_field.py index 6124c65e..f2c8388b 100644 --- a/tests/fields/test_sequence_field.py +++ b/tests/fields/test_sequence_field.py @@ -11,38 +11,38 @@ class TestSequenceField(MongoDBTestCase): id = SequenceField(primary_key=True) name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Person.drop_collection() for x in range(10): Person(name="Person %s" % x).save() - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + self.assertEqual(c["next"], 10) ids = [i.id for i in Person.objects] self.assertEqual(ids, range(1, 11)) - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + self.assertEqual(c["next"], 10) Person.id.set_next_value(1000) - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 1000) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + self.assertEqual(c["next"], 1000) def test_sequence_field_get_next_value(self): class Person(Document): id = SequenceField(primary_key=True) name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Person.drop_collection() for x in range(10): Person(name="Person %s" % x).save() self.assertEqual(Person.id.get_next_value(), 11) - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() self.assertEqual(Person.id.get_next_value(), 1) @@ -50,40 +50,40 @@ class TestSequenceField(MongoDBTestCase): id = SequenceField(primary_key=True, value_decorator=str) name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Person.drop_collection() for x in range(10): Person(name="Person %s" % x).save() - self.assertEqual(Person.id.get_next_value(), '11') - self.db['mongoengine.counters'].drop() + self.assertEqual(Person.id.get_next_value(), "11") + self.db["mongoengine.counters"].drop() - self.assertEqual(Person.id.get_next_value(), '1') + self.assertEqual(Person.id.get_next_value(), "1") def test_sequence_field_sequence_name(self): class Person(Document): - id = SequenceField(primary_key=True, sequence_name='jelly') + id = SequenceField(primary_key=True, sequence_name="jelly") name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Person.drop_collection() for x in range(10): Person(name="Person %s" % x).save() - c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) + self.assertEqual(c["next"], 10) ids = [i.id for i in Person.objects] self.assertEqual(ids, range(1, 11)) - c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) + self.assertEqual(c["next"], 10) Person.id.set_next_value(1000) - c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) - self.assertEqual(c['next'], 1000) + c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) + self.assertEqual(c["next"], 1000) def test_multiple_sequence_fields(self): class Person(Document): @@ -91,14 +91,14 @@ class TestSequenceField(MongoDBTestCase): counter = SequenceField() name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Person.drop_collection() for x in range(10): Person(name="Person %s" % x).save() - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + self.assertEqual(c["next"], 10) ids = [i.id for i in Person.objects] self.assertEqual(ids, range(1, 11)) @@ -106,23 +106,23 @@ class TestSequenceField(MongoDBTestCase): counters = [i.counter for i in Person.objects] self.assertEqual(counters, range(1, 11)) - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + self.assertEqual(c["next"], 10) Person.id.set_next_value(1000) - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 1000) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + self.assertEqual(c["next"], 1000) Person.counter.set_next_value(999) - c = self.db['mongoengine.counters'].find_one({'_id': 'person.counter'}) - self.assertEqual(c['next'], 999) + c = self.db["mongoengine.counters"].find_one({"_id": "person.counter"}) + self.assertEqual(c["next"], 999) def test_sequence_fields_reload(self): class Animal(Document): counter = SequenceField() name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Animal.drop_collection() a = Animal(name="Boi").save() @@ -151,7 +151,7 @@ class TestSequenceField(MongoDBTestCase): id = SequenceField(primary_key=True) name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Animal.drop_collection() Person.drop_collection() @@ -159,11 +159,11 @@ class TestSequenceField(MongoDBTestCase): Animal(name="Animal %s" % x).save() Person(name="Person %s" % x).save() - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + self.assertEqual(c["next"], 10) - c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"}) + self.assertEqual(c["next"], 10) ids = [i.id for i in Person.objects] self.assertEqual(ids, range(1, 11)) @@ -171,32 +171,32 @@ class TestSequenceField(MongoDBTestCase): id = [i.id for i in Animal.objects] self.assertEqual(id, range(1, 11)) - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + self.assertEqual(c["next"], 10) - c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"}) + self.assertEqual(c["next"], 10) def test_sequence_field_value_decorator(self): class Person(Document): id = SequenceField(primary_key=True, value_decorator=str) name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Person.drop_collection() for x in range(10): p = Person(name="Person %s" % x) p.save() - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + self.assertEqual(c["next"], 10) ids = [i.id for i in Person.objects] self.assertEqual(ids, map(str, range(1, 11))) - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + self.assertEqual(c["next"], 10) def test_embedded_sequence_field(self): class Comment(EmbeddedDocument): @@ -207,14 +207,18 @@ class TestSequenceField(MongoDBTestCase): title = StringField(required=True) comments = ListField(EmbeddedDocumentField(Comment)) - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Post.drop_collection() - Post(title="MongoEngine", - comments=[Comment(content="NoSQL Rocks"), - Comment(content="MongoEngine Rocks")]).save() - c = self.db['mongoengine.counters'].find_one({'_id': 'comment.id'}) - self.assertEqual(c['next'], 2) + Post( + title="MongoEngine", + comments=[ + Comment(content="NoSQL Rocks"), + Comment(content="MongoEngine Rocks"), + ], + ).save() + c = self.db["mongoengine.counters"].find_one({"_id": "comment.id"}) + self.assertEqual(c["next"], 2) post = Post.objects.first() self.assertEqual(1, post.comments[0].id) self.assertEqual(2, post.comments[1].id) @@ -223,7 +227,7 @@ class TestSequenceField(MongoDBTestCase): class Base(Document): name = StringField() counter = SequenceField() - meta = {'abstract': True} + meta = {"abstract": True} class Foo(Base): pass @@ -231,24 +235,27 @@ class TestSequenceField(MongoDBTestCase): class Bar(Base): pass - bar = Bar(name='Bar') + bar = Bar(name="Bar") bar.save() - foo = Foo(name='Foo') + foo = Foo(name="Foo") foo.save() - self.assertTrue('base.counter' in - self.db['mongoengine.counters'].find().distinct('_id')) - self.assertFalse(('foo.counter' or 'bar.counter') in - self.db['mongoengine.counters'].find().distinct('_id')) + self.assertTrue( + "base.counter" in self.db["mongoengine.counters"].find().distinct("_id") + ) + self.assertFalse( + ("foo.counter" or "bar.counter") + in self.db["mongoengine.counters"].find().distinct("_id") + ) self.assertNotEqual(foo.counter, bar.counter) - self.assertEqual(foo._fields['counter'].owner_document, Base) - self.assertEqual(bar._fields['counter'].owner_document, Base) + self.assertEqual(foo._fields["counter"].owner_document, Base) + self.assertEqual(bar._fields["counter"].owner_document, Base) def test_no_inherited_sequencefield(self): class Base(Document): name = StringField() - meta = {'abstract': True} + meta = {"abstract": True} class Foo(Base): counter = SequenceField() @@ -256,16 +263,19 @@ class TestSequenceField(MongoDBTestCase): class Bar(Base): counter = SequenceField() - bar = Bar(name='Bar') + bar = Bar(name="Bar") bar.save() - foo = Foo(name='Foo') + foo = Foo(name="Foo") foo.save() - self.assertFalse('base.counter' in - self.db['mongoengine.counters'].find().distinct('_id')) - self.assertTrue(('foo.counter' and 'bar.counter') in - self.db['mongoengine.counters'].find().distinct('_id')) + self.assertFalse( + "base.counter" in self.db["mongoengine.counters"].find().distinct("_id") + ) + self.assertTrue( + ("foo.counter" and "bar.counter") + in self.db["mongoengine.counters"].find().distinct("_id") + ) self.assertEqual(foo.counter, bar.counter) - self.assertEqual(foo._fields['counter'].owner_document, Foo) - self.assertEqual(bar._fields['counter'].owner_document, Bar) + self.assertEqual(foo._fields["counter"].owner_document, Foo) + self.assertEqual(bar._fields["counter"].owner_document, Bar) diff --git a/tests/fields/test_url_field.py b/tests/fields/test_url_field.py index ddbf707e..81baf8d0 100644 --- a/tests/fields/test_url_field.py +++ b/tests/fields/test_url_field.py @@ -5,49 +5,53 @@ from tests.utils import MongoDBTestCase class TestURLField(MongoDBTestCase): - def test_validation(self): """Ensure that URLFields validate urls properly.""" + class Link(Document): url = URLField() link = Link() - link.url = 'google' + link.url = "google" self.assertRaises(ValidationError, link.validate) - link.url = 'http://www.google.com:8080' + link.url = "http://www.google.com:8080" link.validate() def test_unicode_url_validation(self): """Ensure unicode URLs are validated properly.""" + class Link(Document): url = URLField() link = Link() - link.url = u'http://привет.com' + link.url = u"http://привет.com" # TODO fix URL validation - this *IS* a valid URL # For now we just want to make sure that the error message is correct with self.assertRaises(ValidationError) as ctx_err: link.validate() - self.assertEqual(unicode(ctx_err.exception), - u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])") + self.assertEqual( + unicode(ctx_err.exception), + u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])", + ) def test_url_scheme_validation(self): """Ensure that URLFields validate urls with specific schemes properly. """ + class Link(Document): url = URLField() class SchemeLink(Document): - url = URLField(schemes=['ws', 'irc']) + url = URLField(schemes=["ws", "irc"]) link = Link() - link.url = 'ws://google.com' + link.url = "ws://google.com" self.assertRaises(ValidationError, link.validate) scheme_link = SchemeLink() - scheme_link.url = 'ws://google.com' + scheme_link.url = "ws://google.com" scheme_link.validate() def test_underscore_allowed_in_domains_names(self): @@ -55,5 +59,5 @@ class TestURLField(MongoDBTestCase): url = URLField() link = Link() - link.url = 'https://san_leandro-ca.geebo.com' + link.url = "https://san_leandro-ca.geebo.com" link.validate() diff --git a/tests/fields/test_uuid_field.py b/tests/fields/test_uuid_field.py index 7b7faaf2..647dceaf 100644 --- a/tests/fields/test_uuid_field.py +++ b/tests/fields/test_uuid_field.py @@ -15,11 +15,8 @@ class TestUUIDField(MongoDBTestCase): uid = uuid.uuid4() person = Person(api_key=uid).save() self.assertEqual( - get_as_pymongo(person), - {'_id': person.id, - 'api_key': str(uid) - } - ) + get_as_pymongo(person), {"_id": person.id, "api_key": str(uid)} + ) def test_field_string(self): """Test UUID fields storing as String @@ -37,8 +34,10 @@ class TestUUIDField(MongoDBTestCase): person.api_key = api_key person.validate() - invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', - '9d159858-549b-4975-9f98-dd2f987c113') + invalid = ( + "9d159858-549b-4975-9f98-dd2f987c113g", + "9d159858-549b-4975-9f98-dd2f987c113", + ) for api_key in invalid: person.api_key = api_key self.assertRaises(ValidationError, person.validate) @@ -58,8 +57,10 @@ class TestUUIDField(MongoDBTestCase): person.api_key = api_key person.validate() - invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', - '9d159858-549b-4975-9f98-dd2f987c113') + invalid = ( + "9d159858-549b-4975-9f98-dd2f987c113g", + "9d159858-549b-4975-9f98-dd2f987c113", + ) for api_key in invalid: person.api_key = api_key self.assertRaises(ValidationError, person.validate) diff --git a/tests/fixtures.py b/tests/fixtures.py index b8303b99..9f06f1ab 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -11,7 +11,7 @@ class PickleEmbedded(EmbeddedDocument): class PickleTest(Document): number = IntField() - string = StringField(choices=(('One', '1'), ('Two', '2'))) + string = StringField(choices=(("One", "1"), ("Two", "2"))) embedded = EmbeddedDocumentField(PickleEmbedded) lists = ListField(StringField()) photo = FileField() @@ -19,7 +19,7 @@ class PickleTest(Document): class NewDocumentPickleTest(Document): number = IntField() - string = StringField(choices=(('One', '1'), ('Two', '2'))) + string = StringField(choices=(("One", "1"), ("Two", "2"))) embedded = EmbeddedDocumentField(PickleEmbedded) lists = ListField(StringField()) photo = FileField() @@ -36,7 +36,7 @@ class PickleDynamicTest(DynamicDocument): class PickleSignalsTest(Document): number = IntField() - string = StringField(choices=(('One', '1'), ('Two', '2'))) + string = StringField(choices=(("One", "1"), ("Two", "2"))) embedded = EmbeddedDocumentField(PickleEmbedded) lists = ListField(StringField()) @@ -58,4 +58,4 @@ class Mixin(object): class Base(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} diff --git a/tests/queryset/field_list.py b/tests/queryset/field_list.py index 250e2601..9f0fe827 100644 --- a/tests/queryset/field_list.py +++ b/tests/queryset/field_list.py @@ -7,79 +7,78 @@ __all__ = ("QueryFieldListTest", "OnlyExcludeAllTest") class QueryFieldListTest(unittest.TestCase): - def test_empty(self): q = QueryFieldList() self.assertFalse(q) - q = QueryFieldList(always_include=['_cls']) + q = QueryFieldList(always_include=["_cls"]) self.assertFalse(q) def test_include_include(self): q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY, _only_called=True) - self.assertEqual(q.as_dict(), {'a': 1, 'b': 1}) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'a': 1, 'b': 1, 'c': 1}) + q += QueryFieldList( + fields=["a", "b"], value=QueryFieldList.ONLY, _only_called=True + ) + self.assertEqual(q.as_dict(), {"a": 1, "b": 1}) + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {"a": 1, "b": 1, "c": 1}) def test_include_exclude(self): q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'a': 1, 'b': 1}) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': 1}) + q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {"a": 1, "b": 1}) + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.EXCLUDE) + self.assertEqual(q.as_dict(), {"a": 1}) def test_exclude_exclude(self): q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': 0, 'b': 0}) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': 0, 'b': 0, 'c': 0}) + q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.EXCLUDE) + self.assertEqual(q.as_dict(), {"a": 0, "b": 0}) + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.EXCLUDE) + self.assertEqual(q.as_dict(), {"a": 0, "b": 0, "c": 0}) def test_exclude_include(self): q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': 0, 'b': 0}) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'c': 1}) + q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.EXCLUDE) + self.assertEqual(q.as_dict(), {"a": 0, "b": 0}) + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {"c": 1}) def test_always_include(self): - q = QueryFieldList(always_include=['x', 'y']) - q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1}) + q = QueryFieldList(always_include=["x", "y"]) + q += QueryFieldList(fields=["a", "b", "x"], value=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {"x": 1, "y": 1, "c": 1}) def test_reset(self): - q = QueryFieldList(always_include=['x', 'y']) - q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1}) + q = QueryFieldList(always_include=["x", "y"]) + q += QueryFieldList(fields=["a", "b", "x"], value=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {"x": 1, "y": 1, "c": 1}) q.reset() self.assertFalse(q) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'b': 1, 'c': 1}) + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {"x": 1, "y": 1, "b": 1, "c": 1}) def test_using_a_slice(self): q = QueryFieldList() - q += QueryFieldList(fields=['a'], value={"$slice": 5}) - self.assertEqual(q.as_dict(), {'a': {"$slice": 5}}) + q += QueryFieldList(fields=["a"], value={"$slice": 5}) + self.assertEqual(q.as_dict(), {"a": {"$slice": 5}}) class OnlyExcludeAllTest(unittest.TestCase): - def setUp(self): - connect(db='mongoenginetest') + connect(db="mongoenginetest") class Person(Document): name = StringField() age = IntField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} Person.drop_collection() self.Person = Person def test_mixing_only_exclude(self): - class MyDoc(Document): a = StringField() b = StringField() @@ -88,32 +87,32 @@ class OnlyExcludeAllTest(unittest.TestCase): e = StringField() f = StringField() - include = ['a', 'b', 'c', 'd', 'e'] - exclude = ['d', 'e'] - only = ['b', 'c'] + include = ["a", "b", "c", "d", "e"] + exclude = ["d", "e"] + only = ["b", "c"] qs = MyDoc.objects.fields(**{i: 1 for i in include}) - self.assertEqual(qs._loaded_fields.as_dict(), - {'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1}) + self.assertEqual( + qs._loaded_fields.as_dict(), {"a": 1, "b": 1, "c": 1, "d": 1, "e": 1} + ) qs = qs.only(*only) - self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) + self.assertEqual(qs._loaded_fields.as_dict(), {"b": 1, "c": 1}) qs = qs.exclude(*exclude) - self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) + self.assertEqual(qs._loaded_fields.as_dict(), {"b": 1, "c": 1}) qs = MyDoc.objects.fields(**{i: 1 for i in include}) qs = qs.exclude(*exclude) - self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) + self.assertEqual(qs._loaded_fields.as_dict(), {"a": 1, "b": 1, "c": 1}) qs = qs.only(*only) - self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) + self.assertEqual(qs._loaded_fields.as_dict(), {"b": 1, "c": 1}) qs = MyDoc.objects.exclude(*exclude) qs = qs.fields(**{i: 1 for i in include}) - self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) + self.assertEqual(qs._loaded_fields.as_dict(), {"a": 1, "b": 1, "c": 1}) qs = qs.only(*only) - self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) + self.assertEqual(qs._loaded_fields.as_dict(), {"b": 1, "c": 1}) def test_slicing(self): - class MyDoc(Document): a = ListField() b = ListField() @@ -122,24 +121,23 @@ class OnlyExcludeAllTest(unittest.TestCase): e = ListField() f = ListField() - include = ['a', 'b', 'c', 'd', 'e'] - exclude = ['d', 'e'] - only = ['b', 'c'] + include = ["a", "b", "c", "d", "e"] + exclude = ["d", "e"] + only = ["b", "c"] qs = MyDoc.objects.fields(**{i: 1 for i in include}) qs = qs.exclude(*exclude) qs = qs.only(*only) qs = qs.fields(slice__b=5) - self.assertEqual(qs._loaded_fields.as_dict(), - {'b': {'$slice': 5}, 'c': 1}) + self.assertEqual(qs._loaded_fields.as_dict(), {"b": {"$slice": 5}, "c": 1}) qs = qs.fields(slice__c=[5, 1]) - self.assertEqual(qs._loaded_fields.as_dict(), - {'b': {'$slice': 5}, 'c': {'$slice': [5, 1]}}) + self.assertEqual( + qs._loaded_fields.as_dict(), {"b": {"$slice": 5}, "c": {"$slice": [5, 1]}} + ) - qs = qs.exclude('c') - self.assertEqual(qs._loaded_fields.as_dict(), - {'b': {'$slice': 5}}) + qs = qs.exclude("c") + self.assertEqual(qs._loaded_fields.as_dict(), {"b": {"$slice": 5}}) def test_mix_slice_with_other_fields(self): class MyDoc(Document): @@ -148,43 +146,42 @@ class OnlyExcludeAllTest(unittest.TestCase): c = ListField() qs = MyDoc.objects.fields(a=1, b=0, slice__c=2) - self.assertEqual(qs._loaded_fields.as_dict(), - {'c': {'$slice': 2}, 'a': 1}) + self.assertEqual(qs._loaded_fields.as_dict(), {"c": {"$slice": 2}, "a": 1}) def test_only(self): """Ensure that QuerySet.only only returns the requested fields. """ - person = self.Person(name='test', age=25) + person = self.Person(name="test", age=25) person.save() - obj = self.Person.objects.only('name').get() + obj = self.Person.objects.only("name").get() self.assertEqual(obj.name, person.name) self.assertEqual(obj.age, None) - obj = self.Person.objects.only('age').get() + obj = self.Person.objects.only("age").get() self.assertEqual(obj.name, None) self.assertEqual(obj.age, person.age) - obj = self.Person.objects.only('name', 'age').get() + obj = self.Person.objects.only("name", "age").get() self.assertEqual(obj.name, person.name) self.assertEqual(obj.age, person.age) - obj = self.Person.objects.only(*('id', 'name',)).get() + obj = self.Person.objects.only(*("id", "name")).get() self.assertEqual(obj.name, person.name) self.assertEqual(obj.age, None) # Check polymorphism still works class Employee(self.Person): - salary = IntField(db_field='wage') + salary = IntField(db_field="wage") - employee = Employee(name='test employee', age=40, salary=30000) + employee = Employee(name="test employee", age=40, salary=30000) employee.save() - obj = self.Person.objects(id=employee.id).only('age').get() + obj = self.Person.objects(id=employee.id).only("age").get() self.assertIsInstance(obj, Employee) # Check field names are looked up properly - obj = Employee.objects(id=employee.id).only('salary').get() + obj = Employee.objects(id=employee.id).only("salary").get() self.assertEqual(obj.salary, employee.salary) self.assertEqual(obj.name, None) @@ -208,35 +205,41 @@ class OnlyExcludeAllTest(unittest.TestCase): BlogPost.drop_collection() - post = BlogPost(content='Had a good coffee today...', various={'test_dynamic': {'some': True}}) - post.author = User(name='Test User') - post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] + post = BlogPost( + content="Had a good coffee today...", + various={"test_dynamic": {"some": True}}, + ) + post.author = User(name="Test User") + post.comments = [ + Comment(title="I aggree", text="Great post!"), + Comment(title="Coffee", text="I hate coffee"), + ] post.save() - obj = BlogPost.objects.only('author.name',).get() + obj = BlogPost.objects.only("author.name").get() self.assertEqual(obj.content, None) self.assertEqual(obj.author.email, None) - self.assertEqual(obj.author.name, 'Test User') + self.assertEqual(obj.author.name, "Test User") self.assertEqual(obj.comments, []) - obj = BlogPost.objects.only('various.test_dynamic.some').get() + obj = BlogPost.objects.only("various.test_dynamic.some").get() self.assertEqual(obj.various["test_dynamic"].some, True) - obj = BlogPost.objects.only('content', 'comments.title',).get() - self.assertEqual(obj.content, 'Had a good coffee today...') + obj = BlogPost.objects.only("content", "comments.title").get() + self.assertEqual(obj.content, "Had a good coffee today...") self.assertEqual(obj.author, None) - self.assertEqual(obj.comments[0].title, 'I aggree') - self.assertEqual(obj.comments[1].title, 'Coffee') + self.assertEqual(obj.comments[0].title, "I aggree") + self.assertEqual(obj.comments[1].title, "Coffee") self.assertEqual(obj.comments[0].text, None) self.assertEqual(obj.comments[1].text, None) - obj = BlogPost.objects.only('comments',).get() + obj = BlogPost.objects.only("comments").get() self.assertEqual(obj.content, None) self.assertEqual(obj.author, None) - self.assertEqual(obj.comments[0].title, 'I aggree') - self.assertEqual(obj.comments[1].title, 'Coffee') - self.assertEqual(obj.comments[0].text, 'Great post!') - self.assertEqual(obj.comments[1].text, 'I hate coffee') + self.assertEqual(obj.comments[0].title, "I aggree") + self.assertEqual(obj.comments[1].title, "Coffee") + self.assertEqual(obj.comments[0].text, "Great post!") + self.assertEqual(obj.comments[1].text, "I hate coffee") BlogPost.drop_collection() @@ -256,15 +259,18 @@ class OnlyExcludeAllTest(unittest.TestCase): BlogPost.drop_collection() - post = BlogPost(content='Had a good coffee today...') - post.author = User(name='Test User') - post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] + post = BlogPost(content="Had a good coffee today...") + post.author = User(name="Test User") + post.comments = [ + Comment(title="I aggree", text="Great post!"), + Comment(title="Coffee", text="I hate coffee"), + ] post.save() - obj = BlogPost.objects.exclude('author', 'comments.text').get() + obj = BlogPost.objects.exclude("author", "comments.text").get() self.assertEqual(obj.author, None) - self.assertEqual(obj.content, 'Had a good coffee today...') - self.assertEqual(obj.comments[0].title, 'I aggree') + self.assertEqual(obj.content, "Had a good coffee today...") + self.assertEqual(obj.comments[0].title, "I aggree") self.assertEqual(obj.comments[0].text, None) BlogPost.drop_collection() @@ -283,32 +289,43 @@ class OnlyExcludeAllTest(unittest.TestCase): attachments = ListField(EmbeddedDocumentField(Attachment)) Email.drop_collection() - email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') + email = Email( + sender="me", + to="you", + subject="From Russia with Love", + body="Hello!", + content_type="text/plain", + ) email.attachments = [ - Attachment(name='file1.doc', content='ABC'), - Attachment(name='file2.doc', content='XYZ'), + Attachment(name="file1.doc", content="ABC"), + Attachment(name="file2.doc", content="XYZ"), ] email.save() - obj = Email.objects.exclude('content_type').exclude('body').get() - self.assertEqual(obj.sender, 'me') - self.assertEqual(obj.to, 'you') - self.assertEqual(obj.subject, 'From Russia with Love') + obj = Email.objects.exclude("content_type").exclude("body").get() + self.assertEqual(obj.sender, "me") + self.assertEqual(obj.to, "you") + self.assertEqual(obj.subject, "From Russia with Love") self.assertEqual(obj.body, None) self.assertEqual(obj.content_type, None) - obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get() + obj = Email.objects.only("sender", "to").exclude("body", "sender").get() self.assertEqual(obj.sender, None) - self.assertEqual(obj.to, 'you') + self.assertEqual(obj.to, "you") self.assertEqual(obj.subject, None) self.assertEqual(obj.body, None) self.assertEqual(obj.content_type, None) - obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get() - self.assertEqual(obj.attachments[0].name, 'file1.doc') + obj = ( + Email.objects.exclude("attachments.content") + .exclude("body") + .only("to", "attachments.name") + .get() + ) + self.assertEqual(obj.attachments[0].name, "file1.doc") self.assertEqual(obj.attachments[0].content, None) self.assertEqual(obj.sender, None) - self.assertEqual(obj.to, 'you') + self.assertEqual(obj.to, "you") self.assertEqual(obj.subject, None) self.assertEqual(obj.body, None) self.assertEqual(obj.content_type, None) @@ -316,7 +333,6 @@ class OnlyExcludeAllTest(unittest.TestCase): Email.drop_collection() def test_all_fields(self): - class Email(Document): sender = StringField() to = StringField() @@ -326,21 +342,33 @@ class OnlyExcludeAllTest(unittest.TestCase): Email.drop_collection() - email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') + email = Email( + sender="me", + to="you", + subject="From Russia with Love", + body="Hello!", + content_type="text/plain", + ) email.save() - obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get() - self.assertEqual(obj.sender, 'me') - self.assertEqual(obj.to, 'you') - self.assertEqual(obj.subject, 'From Russia with Love') - self.assertEqual(obj.body, 'Hello!') - self.assertEqual(obj.content_type, 'text/plain') + obj = ( + Email.objects.exclude("content_type", "body") + .only("to", "body") + .all_fields() + .get() + ) + self.assertEqual(obj.sender, "me") + self.assertEqual(obj.to, "you") + self.assertEqual(obj.subject, "From Russia with Love") + self.assertEqual(obj.body, "Hello!") + self.assertEqual(obj.content_type, "text/plain") Email.drop_collection() def test_slicing_fields(self): """Ensure that query slicing an array works. """ + class Numbers(Document): n = ListField(IntField()) @@ -414,11 +442,10 @@ class OnlyExcludeAllTest(unittest.TestCase): self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) def test_exclude_from_subclasses_docs(self): - class Base(Document): username = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class Anon(Base): anon = BooleanField() @@ -436,5 +463,5 @@ class OnlyExcludeAllTest(unittest.TestCase): self.assertRaises(LookUpError, Base.objects.exclude, "made_up") -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/queryset/geo.py b/tests/queryset/geo.py index 45e6a089..95dc913d 100644 --- a/tests/queryset/geo.py +++ b/tests/queryset/geo.py @@ -10,9 +10,9 @@ __all__ = ("GeoQueriesTest",) class GeoQueriesTest(MongoDBTestCase): - def _create_event_data(self, point_field_class=GeoPointField): """Create some sample data re-used in many of the tests below.""" + class Event(Document): title = StringField() date = DateTimeField() @@ -28,15 +28,18 @@ class GeoQueriesTest(MongoDBTestCase): event1 = Event.objects.create( title="Coltrane Motion @ Double Door", date=datetime.datetime.now() - datetime.timedelta(days=1), - location=[-87.677137, 41.909889]) + location=[-87.677137, 41.909889], + ) event2 = Event.objects.create( title="Coltrane Motion @ Bottom of the Hill", date=datetime.datetime.now() - datetime.timedelta(days=10), - location=[-122.4194155, 37.7749295]) + location=[-122.4194155, 37.7749295], + ) event3 = Event.objects.create( title="Coltrane Motion @ Empty Bottle", date=datetime.datetime.now(), - location=[-87.686638, 41.900474]) + location=[-87.686638, 41.900474], + ) return event1, event2, event3 @@ -65,8 +68,7 @@ class GeoQueriesTest(MongoDBTestCase): # find events within 10 degrees of san francisco point = [-122.415579, 37.7566023] - events = self.Event.objects(location__near=point, - location__max_distance=10) + events = self.Event.objects(location__near=point, location__max_distance=10) self.assertEqual(events.count(), 1) self.assertEqual(events[0], event2) @@ -78,8 +80,7 @@ class GeoQueriesTest(MongoDBTestCase): # find events at least 10 degrees away of san francisco point = [-122.415579, 37.7566023] - events = self.Event.objects(location__near=point, - location__min_distance=10) + events = self.Event.objects(location__near=point, location__min_distance=10) self.assertEqual(events.count(), 2) def test_within_distance(self): @@ -88,8 +89,7 @@ class GeoQueriesTest(MongoDBTestCase): # find events within 5 degrees of pitchfork office, chicago point_and_distance = [[-87.67892, 41.9120459], 5] - events = self.Event.objects( - location__within_distance=point_and_distance) + events = self.Event.objects(location__within_distance=point_and_distance) self.assertEqual(events.count(), 2) events = list(events) self.assertNotIn(event2, events) @@ -98,21 +98,18 @@ class GeoQueriesTest(MongoDBTestCase): # find events within 10 degrees of san francisco point_and_distance = [[-122.415579, 37.7566023], 10] - events = self.Event.objects( - location__within_distance=point_and_distance) + events = self.Event.objects(location__within_distance=point_and_distance) self.assertEqual(events.count(), 1) self.assertEqual(events[0], event2) # find events within 1 degree of greenpoint, broolyn, nyc, ny point_and_distance = [[-73.9509714, 40.7237134], 1] - events = self.Event.objects( - location__within_distance=point_and_distance) + events = self.Event.objects(location__within_distance=point_and_distance) self.assertEqual(events.count(), 0) # ensure ordering is respected by "within_distance" point_and_distance = [[-87.67892, 41.9120459], 10] - events = self.Event.objects( - location__within_distance=point_and_distance) + events = self.Event.objects(location__within_distance=point_and_distance) events = events.order_by("-date") self.assertEqual(events.count(), 2) self.assertEqual(events[0], event3) @@ -145,7 +142,7 @@ class GeoQueriesTest(MongoDBTestCase): polygon2 = [ (-1.742249, 54.033586), (-1.225891, 52.792797), - (-4.40094, 53.389881) + (-4.40094, 53.389881), ] events = self.Event.objects(location__within_polygon=polygon2) self.assertEqual(events.count(), 0) @@ -154,9 +151,7 @@ class GeoQueriesTest(MongoDBTestCase): """Make sure the "near" operator works with a PointField, which corresponds to a 2dsphere index. """ - event1, event2, event3 = self._create_event_data( - point_field_class=PointField - ) + event1, event2, event3 = self._create_event_data(point_field_class=PointField) # find all events "near" pitchfork office, chicago. # note that "near" will show the san francisco event, too, @@ -175,26 +170,23 @@ class GeoQueriesTest(MongoDBTestCase): """Ensure the "max_distance" operator works alongside the "near" operator with a 2dsphere index. """ - event1, event2, event3 = self._create_event_data( - point_field_class=PointField - ) + event1, event2, event3 = self._create_event_data(point_field_class=PointField) # find events within 10km of san francisco point = [-122.415579, 37.7566023] - events = self.Event.objects(location__near=point, - location__max_distance=10000) + events = self.Event.objects(location__near=point, location__max_distance=10000) self.assertEqual(events.count(), 1) self.assertEqual(events[0], event2) # find events within 1km of greenpoint, broolyn, nyc, ny - events = self.Event.objects(location__near=[-73.9509714, 40.7237134], - location__max_distance=1000) + events = self.Event.objects( + location__near=[-73.9509714, 40.7237134], location__max_distance=1000 + ) self.assertEqual(events.count(), 0) # ensure ordering is respected by "near" events = self.Event.objects( - location__near=[-87.67892, 41.9120459], - location__max_distance=10000 + location__near=[-87.67892, 41.9120459], location__max_distance=10000 ).order_by("-date") self.assertEqual(events.count(), 2) self.assertEqual(events[0], event3) @@ -203,9 +195,7 @@ class GeoQueriesTest(MongoDBTestCase): """Ensure the "geo_within_box" operator works with a 2dsphere index. """ - event1, event2, event3 = self._create_event_data( - point_field_class=PointField - ) + event1, event2, event3 = self._create_event_data(point_field_class=PointField) # check that within_box works box = [(-125.0, 35.0), (-100.0, 40.0)] @@ -217,9 +207,7 @@ class GeoQueriesTest(MongoDBTestCase): """Ensure the "geo_within_polygon" operator works with a 2dsphere index. """ - event1, event2, event3 = self._create_event_data( - point_field_class=PointField - ) + event1, event2, event3 = self._create_event_data(point_field_class=PointField) polygon = [ (-87.694445, 41.912114), @@ -235,7 +223,7 @@ class GeoQueriesTest(MongoDBTestCase): polygon2 = [ (-1.742249, 54.033586), (-1.225891, 52.792797), - (-4.40094, 53.389881) + (-4.40094, 53.389881), ] events = self.Event.objects(location__geo_within_polygon=polygon2) self.assertEqual(events.count(), 0) @@ -244,23 +232,20 @@ class GeoQueriesTest(MongoDBTestCase): """Ensure "min_distace" and "max_distance" operators work well together with the "near" operator in a 2dsphere index. """ - event1, event2, event3 = self._create_event_data( - point_field_class=PointField - ) + event1, event2, event3 = self._create_event_data(point_field_class=PointField) # ensure min_distance and max_distance combine well events = self.Event.objects( location__near=[-87.67892, 41.9120459], location__min_distance=1000, - location__max_distance=10000 + location__max_distance=10000, ).order_by("-date") self.assertEqual(events.count(), 1) self.assertEqual(events[0], event3) # ensure ordering is respected by "near" with "min_distance" events = self.Event.objects( - location__near=[-87.67892, 41.9120459], - location__min_distance=10000 + location__near=[-87.67892, 41.9120459], location__min_distance=10000 ).order_by("-date") self.assertEqual(events.count(), 1) self.assertEqual(events[0], event2) @@ -269,14 +254,11 @@ class GeoQueriesTest(MongoDBTestCase): """Make sure the "geo_within_center" operator works with a 2dsphere index. """ - event1, event2, event3 = self._create_event_data( - point_field_class=PointField - ) + event1, event2, event3 = self._create_event_data(point_field_class=PointField) # find events within 5 degrees of pitchfork office, chicago point_and_distance = [[-87.67892, 41.9120459], 2] - events = self.Event.objects( - location__geo_within_center=point_and_distance) + events = self.Event.objects(location__geo_within_center=point_and_distance) self.assertEqual(events.count(), 2) events = list(events) self.assertNotIn(event2, events) @@ -287,6 +269,7 @@ class GeoQueriesTest(MongoDBTestCase): """Helper test method ensuring given point field class works well in an embedded document. """ + class Venue(EmbeddedDocument): location = point_field_class() name = StringField() @@ -300,12 +283,11 @@ class GeoQueriesTest(MongoDBTestCase): venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889]) venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295]) - event1 = Event(title="Coltrane Motion @ Double Door", - venue=venue1).save() - event2 = Event(title="Coltrane Motion @ Bottom of the Hill", - venue=venue2).save() - event3 = Event(title="Coltrane Motion @ Empty Bottle", - venue=venue1).save() + event1 = Event(title="Coltrane Motion @ Double Door", venue=venue1).save() + event2 = Event( + title="Coltrane Motion @ Bottom of the Hill", venue=venue2 + ).save() + event3 = Event(title="Coltrane Motion @ Empty Bottle", venue=venue1).save() # find all events "near" pitchfork office, chicago. # note that "near" will show the san francisco event, too, @@ -324,6 +306,7 @@ class GeoQueriesTest(MongoDBTestCase): def test_spherical_geospatial_operators(self): """Ensure that spherical geospatial queries are working.""" + class Point(Document): location = GeoPointField() @@ -343,26 +326,26 @@ class GeoQueriesTest(MongoDBTestCase): # Same behavior for _within_spherical_distance points = Point.objects( - location__within_spherical_distance=[ - [-122, 37.5], - 60 / earth_radius - ] + location__within_spherical_distance=[[-122, 37.5], 60 / earth_radius] ) self.assertEqual(points.count(), 2) - points = Point.objects(location__near_sphere=[-122, 37.5], - location__max_distance=60 / earth_radius) + points = Point.objects( + location__near_sphere=[-122, 37.5], location__max_distance=60 / earth_radius + ) self.assertEqual(points.count(), 2) # Test query works with max_distance, being farer from one point - points = Point.objects(location__near_sphere=[-122, 37.8], - location__max_distance=60 / earth_radius) + points = Point.objects( + location__near_sphere=[-122, 37.8], location__max_distance=60 / earth_radius + ) close_point = points.first() self.assertEqual(points.count(), 1) # Test query works with min_distance, being farer from one point - points = Point.objects(location__near_sphere=[-122, 37.8], - location__min_distance=60 / earth_radius) + points = Point.objects( + location__near_sphere=[-122, 37.8], location__min_distance=60 / earth_radius + ) self.assertEqual(points.count(), 1) far_point = points.first() self.assertNotEqual(close_point, far_point) @@ -384,10 +367,7 @@ class GeoQueriesTest(MongoDBTestCase): # Finds only one point because only the first point is within 60km of # the reference point to the south. points = Point.objects( - location__within_spherical_distance=[ - [-122, 36.5], - 60 / earth_radius - ] + location__within_spherical_distance=[[-122, 36.5], 60 / earth_radius] ) self.assertEqual(points.count(), 1) self.assertEqual(points[0].id, south_point.id) @@ -413,8 +393,10 @@ class GeoQueriesTest(MongoDBTestCase): self.assertEqual(1, roads) # Within - polygon = {"type": "Polygon", - "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} + polygon = { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], + } roads = Road.objects.filter(line__geo_within=polygon["coordinates"]).count() self.assertEqual(1, roads) @@ -425,8 +407,7 @@ class GeoQueriesTest(MongoDBTestCase): self.assertEqual(1, roads) # Intersects - line = {"type": "LineString", - "coordinates": [[40, 5], [40, 6]]} + line = {"type": "LineString", "coordinates": [[40, 5], [40, 6]]} roads = Road.objects.filter(line__geo_intersects=line["coordinates"]).count() self.assertEqual(1, roads) @@ -436,8 +417,10 @@ class GeoQueriesTest(MongoDBTestCase): roads = Road.objects.filter(line__geo_intersects={"$geometry": line}).count() self.assertEqual(1, roads) - polygon = {"type": "Polygon", - "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} + polygon = { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], + } roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]).count() self.assertEqual(1, roads) @@ -468,8 +451,10 @@ class GeoQueriesTest(MongoDBTestCase): self.assertEqual(1, roads) # Within - polygon = {"type": "Polygon", - "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} + polygon = { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], + } roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]).count() self.assertEqual(1, roads) @@ -480,8 +465,7 @@ class GeoQueriesTest(MongoDBTestCase): self.assertEqual(1, roads) # Intersects - line = {"type": "LineString", - "coordinates": [[40, 5], [41, 6]]} + line = {"type": "LineString", "coordinates": [[40, 5], [41, 6]]} roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]).count() self.assertEqual(1, roads) @@ -491,8 +475,10 @@ class GeoQueriesTest(MongoDBTestCase): roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}).count() self.assertEqual(1, roads) - polygon = {"type": "Polygon", - "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} + polygon = { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], + } roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]).count() self.assertEqual(1, roads) @@ -504,20 +490,20 @@ class GeoQueriesTest(MongoDBTestCase): def test_aspymongo_with_only(self): """Ensure as_pymongo works with only""" + class Place(Document): location = PointField() Place.drop_collection() p = Place(location=[24.946861267089844, 60.16311983618494]) p.save() - qs = Place.objects().only('location') + qs = Place.objects().only("location") self.assertDictEqual( - qs.as_pymongo()[0]['location'], - {u'type': u'Point', - u'coordinates': [ - 24.946861267089844, - 60.16311983618494] - } + qs.as_pymongo()[0]["location"], + { + u"type": u"Point", + u"coordinates": [24.946861267089844, 60.16311983618494], + }, ) def test_2dsphere_point_sets_correctly(self): @@ -542,11 +528,15 @@ class GeoQueriesTest(MongoDBTestCase): Location(line=[[1, 2], [2, 2]]).save() loc = Location.objects.as_pymongo()[0] - self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}) + self.assertEqual( + loc["line"], {"type": "LineString", "coordinates": [[1, 2], [2, 2]]} + ) Location.objects.update(set__line=[[2, 1], [1, 2]]) loc = Location.objects.as_pymongo()[0] - self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[2, 1], [1, 2]]}) + self.assertEqual( + loc["line"], {"type": "LineString", "coordinates": [[2, 1], [1, 2]]} + ) def test_geojson_PolygonField(self): class Location(Document): @@ -556,12 +546,18 @@ class GeoQueriesTest(MongoDBTestCase): Location(poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save() loc = Location.objects.as_pymongo()[0] - self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) + self.assertEqual( + loc["poly"], + {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}, + ) Location.objects.update(set__poly=[[[40, 4], [40, 6], [41, 6], [40, 4]]]) loc = Location.objects.as_pymongo()[0] - self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]}) + self.assertEqual( + loc["poly"], + {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]}, + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/queryset/modify.py b/tests/queryset/modify.py index 3c5879ba..e092d11c 100644 --- a/tests/queryset/modify.py +++ b/tests/queryset/modify.py @@ -11,7 +11,6 @@ class Doc(Document): class FindAndModifyTest(unittest.TestCase): - def setUp(self): connect(db="mongoenginetest") Doc.drop_collection() @@ -82,9 +81,14 @@ class FindAndModifyTest(unittest.TestCase): old_doc = Doc.objects().order_by("-id").modify(set__value=-1) self.assertEqual(old_doc.to_json(), doc.to_json()) - self.assertDbEqual([ - {"_id": 0, "value": 3}, {"_id": 1, "value": 2}, - {"_id": 2, "value": 1}, {"_id": 3, "value": -1}]) + self.assertDbEqual( + [ + {"_id": 0, "value": 3}, + {"_id": 1, "value": 2}, + {"_id": 2, "value": 1}, + {"_id": 3, "value": -1}, + ] + ) def test_modify_with_fields(self): Doc(id=0, value=0).save() @@ -103,27 +107,25 @@ class FindAndModifyTest(unittest.TestCase): blog = BlogPost.objects.create() # Push a new tag via modify with new=False (default). - BlogPost(id=blog.id).modify(push__tags='code') + BlogPost(id=blog.id).modify(push__tags="code") self.assertEqual(blog.tags, []) blog.reload() - self.assertEqual(blog.tags, ['code']) + self.assertEqual(blog.tags, ["code"]) # Push a new tag via modify with new=True. - blog = BlogPost.objects(id=blog.id).modify(push__tags='java', new=True) - self.assertEqual(blog.tags, ['code', 'java']) + blog = BlogPost.objects(id=blog.id).modify(push__tags="java", new=True) + self.assertEqual(blog.tags, ["code", "java"]) # Push a new tag with a positional argument. - blog = BlogPost.objects(id=blog.id).modify( - push__tags__0='python', - new=True) - self.assertEqual(blog.tags, ['python', 'code', 'java']) + blog = BlogPost.objects(id=blog.id).modify(push__tags__0="python", new=True) + self.assertEqual(blog.tags, ["python", "code", "java"]) # Push multiple new tags with a positional argument. blog = BlogPost.objects(id=blog.id).modify( - push__tags__1=['go', 'rust'], - new=True) - self.assertEqual(blog.tags, ['python', 'go', 'rust', 'code', 'java']) + push__tags__1=["go", "rust"], new=True + ) + self.assertEqual(blog.tags, ["python", "go", "rust", "code", "java"]) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/queryset/pickable.py b/tests/queryset/pickable.py index bf7bb31c..0945fcbc 100644 --- a/tests/queryset/pickable.py +++ b/tests/queryset/pickable.py @@ -4,7 +4,7 @@ from pymongo.mongo_client import MongoClient from mongoengine import Document, StringField, IntField from mongoengine.connection import connect -__author__ = 'stas' +__author__ = "stas" class Person(Document): @@ -17,6 +17,7 @@ class TestQuerysetPickable(unittest.TestCase): Test for adding pickling support for QuerySet instances See issue https://github.com/MongoEngine/mongoengine/issues/442 """ + def setUp(self): super(TestQuerysetPickable, self).setUp() @@ -24,10 +25,7 @@ class TestQuerysetPickable(unittest.TestCase): connection.drop_database("test") - self.john = Person.objects.create( - name="John", - age=21 - ) + self.john = Person.objects.create(name="John", age=21) def test_picke_simple_qs(self): @@ -54,15 +52,9 @@ class TestQuerysetPickable(unittest.TestCase): self.assertEqual(Person.objects.first().age, 23) def test_pickle_support_filtration(self): - Person.objects.create( - name="Alice", - age=22 - ) + Person.objects.create(name="Alice", age=22) - Person.objects.create( - name="Bob", - age=23 - ) + Person.objects.create(name="Bob", age=23) qs = Person.objects.filter(age__gte=22) self.assertEqual(qs.count(), 2) @@ -71,9 +63,3 @@ class TestQuerysetPickable(unittest.TestCase): self.assertEqual(loaded.count(), 2) self.assertEqual(loaded.filter(name="Bob").first().age, 23) - - - - - - diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index c86e4095..21f35012 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -17,29 +17,34 @@ from mongoengine.connection import get_connection, get_db from mongoengine.context_managers import query_counter, switch_db from mongoengine.errors import InvalidQueryError from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version -from mongoengine.queryset import (DoesNotExist, MultipleObjectsReturned, - QuerySet, QuerySetManager, queryset_manager) +from mongoengine.queryset import ( + DoesNotExist, + MultipleObjectsReturned, + QuerySet, + QuerySetManager, + queryset_manager, +) class db_ops_tracker(query_counter): - def get_ops(self): ignore_query = dict(self._ignored_query) - ignore_query['command.count'] = {'$ne': 'system.profile'} # Ignore the query issued by query_counter + ignore_query["command.count"] = { + "$ne": "system.profile" + } # Ignore the query issued by query_counter return list(self.db.system.profile.find(ignore_query)) def get_key_compat(mongo_ver): - ORDER_BY_KEY = 'sort' - CMD_QUERY_KEY = 'command' if mongo_ver >= MONGODB_36 else 'query' + ORDER_BY_KEY = "sort" + CMD_QUERY_KEY = "command" if mongo_ver >= MONGODB_36 else "query" return ORDER_BY_KEY, CMD_QUERY_KEY class QuerySetTest(unittest.TestCase): - def setUp(self): - connect(db='mongoenginetest') - connect(db='mongoenginetest2', alias='test2') + connect(db="mongoenginetest") + connect(db="mongoenginetest2", alias="test2") class PersonMeta(EmbeddedDocument): weight = IntField() @@ -48,7 +53,7 @@ class QuerySetTest(unittest.TestCase): name = StringField() age = IntField() person_meta = EmbeddedDocumentField(PersonMeta) - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} Person.drop_collection() self.PersonMeta = PersonMeta @@ -60,12 +65,14 @@ class QuerySetTest(unittest.TestCase): """Ensure that a QuerySet is correctly initialised by QuerySetManager. """ self.assertIsInstance(self.Person.objects, QuerySet) - self.assertEqual(self.Person.objects._collection.name, - self.Person._get_collection_name()) - self.assertIsInstance(self.Person.objects._collection, pymongo.collection.Collection) + self.assertEqual( + self.Person.objects._collection.name, self.Person._get_collection_name() + ) + self.assertIsInstance( + self.Person.objects._collection, pymongo.collection.Collection + ) def test_cannot_perform_joins_references(self): - class BlogPost(Document): author = ReferenceField(self.Person) author2 = GenericReferenceField() @@ -80,8 +87,8 @@ class QuerySetTest(unittest.TestCase): def test_find(self): """Ensure that a query returns a valid set of results.""" - user_a = self.Person.objects.create(name='User A', age=20) - user_b = self.Person.objects.create(name='User B', age=30) + user_a = self.Person.objects.create(name="User A", age=20) + user_b = self.Person.objects.create(name="User B", age=30) # Find all people in the collection people = self.Person.objects @@ -92,11 +99,11 @@ class QuerySetTest(unittest.TestCase): self.assertIsInstance(results[0].id, ObjectId) self.assertEqual(results[0], user_a) - self.assertEqual(results[0].name, 'User A') + self.assertEqual(results[0].name, "User A") self.assertEqual(results[0].age, 20) self.assertEqual(results[1], user_b) - self.assertEqual(results[1].name, 'User B') + self.assertEqual(results[1].name, "User B") self.assertEqual(results[1].age, 30) # Filter people by age @@ -109,8 +116,8 @@ class QuerySetTest(unittest.TestCase): def test_limit(self): """Ensure that QuerySet.limit works as expected.""" - user_a = self.Person.objects.create(name='User A', age=20) - user_b = self.Person.objects.create(name='User B', age=30) + user_a = self.Person.objects.create(name="User A", age=20) + user_b = self.Person.objects.create(name="User B", age=30) # Test limit on a new queryset people = list(self.Person.objects.limit(1)) @@ -131,15 +138,15 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(len(people), 2) # Test chaining of only after limit - person = self.Person.objects().limit(1).only('name').first() + person = self.Person.objects().limit(1).only("name").first() self.assertEqual(person, user_a) - self.assertEqual(person.name, 'User A') + self.assertEqual(person.name, "User A") self.assertEqual(person.age, None) def test_skip(self): """Ensure that QuerySet.skip works as expected.""" - user_a = self.Person.objects.create(name='User A', age=20) - user_b = self.Person.objects.create(name='User B', age=30) + user_a = self.Person.objects.create(name="User A", age=20) + user_b = self.Person.objects.create(name="User B", age=30) # Test skip on a new queryset people = list(self.Person.objects.skip(1)) @@ -155,20 +162,20 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(people2[0], user_b) # Test chaining of only after skip - person = self.Person.objects().skip(1).only('name').first() + person = self.Person.objects().skip(1).only("name").first() self.assertEqual(person, user_b) - self.assertEqual(person.name, 'User B') + self.assertEqual(person.name, "User B") self.assertEqual(person.age, None) def test___getitem___invalid_index(self): """Ensure slicing a queryset works as expected.""" with self.assertRaises(TypeError): - self.Person.objects()['a'] + self.Person.objects()["a"] def test_slice(self): """Ensure slicing a queryset works as expected.""" - user_a = self.Person.objects.create(name='User A', age=20) - user_b = self.Person.objects.create(name='User B', age=30) + user_a = self.Person.objects.create(name="User A", age=20) + user_b = self.Person.objects.create(name="User B", age=30) user_c = self.Person.objects.create(name="User C", age=40) # Test slice limit @@ -202,7 +209,7 @@ class QuerySetTest(unittest.TestCase): qs._cursor_obj = None people = list(qs) self.assertEqual(len(people), 1) - self.assertEqual(people[0].name, 'User B') + self.assertEqual(people[0].name, "User B") # Test empty slice people = list(self.Person.objects[1:1]) @@ -215,14 +222,18 @@ class QuerySetTest(unittest.TestCase): # Test larger slice __repr__ self.Person.objects.delete() for i in range(55): - self.Person(name='A%s' % i, age=i).save() + self.Person(name="A%s" % i, age=i).save() self.assertEqual(self.Person.objects.count(), 55) self.assertEqual("Person object", "%s" % self.Person.objects[0]) - self.assertEqual("[, ]", - "%s" % self.Person.objects[1:3]) - self.assertEqual("[, ]", - "%s" % self.Person.objects[51:53]) + self.assertEqual( + "[, ]", + "%s" % self.Person.objects[1:3], + ) + self.assertEqual( + "[, ]", + "%s" % self.Person.objects[51:53], + ) def test_find_one(self): """Ensure that a query using find_one returns a valid result. @@ -276,8 +287,7 @@ class QuerySetTest(unittest.TestCase): # Retrieve the first person from the database self.assertRaises(MultipleObjectsReturned, self.Person.objects.get) - self.assertRaises(self.Person.MultipleObjectsReturned, - self.Person.objects.get) + self.assertRaises(self.Person.MultipleObjectsReturned, self.Person.objects.get) # Use a query to filter the people found to just person2 person = self.Person.objects.get(age=30) @@ -289,6 +299,7 @@ class QuerySetTest(unittest.TestCase): def test_find_array_position(self): """Ensure that query by array position works. """ + class Comment(EmbeddedDocument): name = StringField() @@ -301,34 +312,34 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() - Blog.objects.create(tags=['a', 'b']) - self.assertEqual(Blog.objects(tags__0='a').count(), 1) - self.assertEqual(Blog.objects(tags__0='b').count(), 0) - self.assertEqual(Blog.objects(tags__1='a').count(), 0) - self.assertEqual(Blog.objects(tags__1='b').count(), 1) + Blog.objects.create(tags=["a", "b"]) + self.assertEqual(Blog.objects(tags__0="a").count(), 1) + self.assertEqual(Blog.objects(tags__0="b").count(), 0) + self.assertEqual(Blog.objects(tags__1="a").count(), 0) + self.assertEqual(Blog.objects(tags__1="b").count(), 1) Blog.drop_collection() - comment1 = Comment(name='testa') - comment2 = Comment(name='testb') + comment1 = Comment(name="testa") + comment2 = Comment(name="testb") post1 = Post(comments=[comment1, comment2]) post2 = Post(comments=[comment2, comment2]) blog1 = Blog.objects.create(posts=[post1, post2]) blog2 = Blog.objects.create(posts=[post2, post1]) - blog = Blog.objects(posts__0__comments__0__name='testa').get() + blog = Blog.objects(posts__0__comments__0__name="testa").get() self.assertEqual(blog, blog1) - blog = Blog.objects(posts__0__comments__0__name='testb').get() + blog = Blog.objects(posts__0__comments__0__name="testb").get() self.assertEqual(blog, blog2) - query = Blog.objects(posts__1__comments__1__name='testb') + query = Blog.objects(posts__1__comments__1__name="testb") self.assertEqual(query.count(), 2) - query = Blog.objects(posts__1__comments__1__name='testa') + query = Blog.objects(posts__1__comments__1__name="testa") self.assertEqual(query.count(), 0) - query = Blog.objects(posts__0__comments__1__name='testa') + query = Blog.objects(posts__0__comments__1__name="testa") self.assertEqual(query.count(), 0) Blog.drop_collection() @@ -367,13 +378,14 @@ class QuerySetTest(unittest.TestCase): q2 = q2.filter(ref=a1)._query self.assertEqual(q1, q2) - a_objects = A.objects(s='test1') + a_objects = A.objects(s="test1") query = B.objects(ref__in=a_objects) query = query.filter(boolfield=True) self.assertEqual(query.count(), 1) def test_batch_size(self): """Ensure that batch_size works.""" + class A(Document): s = StringField() @@ -416,33 +428,33 @@ class QuerySetTest(unittest.TestCase): self.Person.drop_collection() write_concern = {"fsync": True} - author = self.Person.objects.create(name='Test User') + author = self.Person.objects.create(name="Test User") author.save(write_concern=write_concern) # Ensure no regression of #1958 - author = self.Person(name='Test User2') + author = self.Person(name="Test User2") author.save(write_concern=None) # will default to {w: 1} - result = self.Person.objects.update( - set__name='Ross', write_concern={"w": 1}) + result = self.Person.objects.update(set__name="Ross", write_concern={"w": 1}) self.assertEqual(result, 2) - result = self.Person.objects.update( - set__name='Ross', write_concern={"w": 0}) + result = self.Person.objects.update(set__name="Ross", write_concern={"w": 0}) self.assertEqual(result, None) result = self.Person.objects.update_one( - set__name='Test User', write_concern={"w": 1}) + set__name="Test User", write_concern={"w": 1} + ) self.assertEqual(result, 1) result = self.Person.objects.update_one( - set__name='Test User', write_concern={"w": 0}) + set__name="Test User", write_concern={"w": 0} + ) self.assertEqual(result, None) def test_update_update_has_a_value(self): """Test to ensure that update is passed a value to update to""" self.Person.drop_collection() - author = self.Person.objects.create(name='Test User') + author = self.Person.objects.create(name="Test User") with self.assertRaises(OperationError): self.Person.objects(pk=author.pk).update({}) @@ -457,6 +469,7 @@ class QuerySetTest(unittest.TestCase): set__posts__1__comments__1__name="testc" Check that it only works for ListFields. """ + class Comment(EmbeddedDocument): name = StringField() @@ -469,16 +482,16 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() - comment1 = Comment(name='testa') - comment2 = Comment(name='testb') + comment1 = Comment(name="testa") + comment2 = Comment(name="testb") post1 = Post(comments=[comment1, comment2]) post2 = Post(comments=[comment2, comment2]) Blog.objects.create(posts=[post1, post2]) Blog.objects.create(posts=[post2, post1]) # Update all of the first comments of second posts of all blogs - Blog.objects().update(set__posts__1__comments__0__name='testc') - testc_blogs = Blog.objects(posts__1__comments__0__name='testc') + Blog.objects().update(set__posts__1__comments__0__name="testc") + testc_blogs = Blog.objects(posts__1__comments__0__name="testc") self.assertEqual(testc_blogs.count(), 2) Blog.drop_collection() @@ -486,14 +499,13 @@ class QuerySetTest(unittest.TestCase): Blog.objects.create(posts=[post2, post1]) # Update only the first blog returned by the query - Blog.objects().update_one( - set__posts__1__comments__1__name='testc') - testc_blogs = Blog.objects(posts__1__comments__1__name='testc') + Blog.objects().update_one(set__posts__1__comments__1__name="testc") + testc_blogs = Blog.objects(posts__1__comments__1__name="testc") self.assertEqual(testc_blogs.count(), 1) # Check that using this indexing syntax on a non-list fails with self.assertRaises(InvalidQueryError): - Blog.objects().update(set__posts__1__comments__0__name__1='asdf') + Blog.objects().update(set__posts__1__comments__0__name__1="asdf") Blog.drop_collection() @@ -519,7 +531,7 @@ class QuerySetTest(unittest.TestCase): BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1) post = BlogPost.objects.first() - self.assertEqual(post.comments[1].by, 'jane') + self.assertEqual(post.comments[1].by, "jane") self.assertEqual(post.comments[1].votes, 8) def test_update_using_positional_operator_matches_first(self): @@ -563,7 +575,7 @@ class QuerySetTest(unittest.TestCase): # Nested updates arent supported yet.. with self.assertRaises(OperationError): Simple.drop_collection() - Simple(x=[{'test': [1, 2, 3, 4]}]).save() + Simple(x=[{"test": [1, 2, 3, 4]}]).save() Simple.objects(x__test=2).update(set__x__S__test__S=3) self.assertEqual(simple.x, [1, 2, 3, 4]) @@ -590,10 +602,11 @@ class QuerySetTest(unittest.TestCase): BlogPost(title="ABC", comments=[c1, c2]).save() BlogPost.objects(comments__by="joe").update( - set__comments__S__votes=Vote(score=4)) + set__comments__S__votes=Vote(score=4) + ) post = BlogPost.objects.first() - self.assertEqual(post.comments[0].by, 'joe') + self.assertEqual(post.comments[0].by, "joe") self.assertEqual(post.comments[0].votes.score, 4) def test_update_min_max(self): @@ -618,16 +631,15 @@ class QuerySetTest(unittest.TestCase): item = StringField() price = FloatField() - product = Product.objects.create(item='ABC', price=10.99) - product = Product.objects.create(item='ABC', price=10.99) + product = Product.objects.create(item="ABC", price=10.99) + product = Product.objects.create(item="ABC", price=10.99) Product.objects(id=product.id).update(mul__price=1.25) self.assertEqual(Product.objects.get(id=product.id).price, 13.7375) - unknown_product = Product.objects.create(item='Unknown') + unknown_product = Product.objects.create(item="Unknown") Product.objects(id=unknown_product.id).update(mul__price=100) self.assertEqual(Product.objects.get(id=unknown_product.id).price, 0) def test_updates_can_have_match_operators(self): - class Comment(EmbeddedDocument): content = StringField() name = StringField(max_length=120) @@ -643,8 +655,11 @@ class QuerySetTest(unittest.TestCase): comm1 = Comment(content="very funny indeed", name="John S", vote=1) comm2 = Comment(content="kind of funny", name="Mark P", vote=0) - Post(title='Fun with MongoEngine', tags=['mongodb', 'mongoengine'], - comments=[comm1, comm2]).save() + Post( + title="Fun with MongoEngine", + tags=["mongodb", "mongoengine"], + comments=[comm1, comm2], + ).save() Post.objects().update_one(pull__comments__vote__lt=1) @@ -652,6 +667,7 @@ class QuerySetTest(unittest.TestCase): def test_mapfield_update(self): """Ensure that the MapField can be updated.""" + class Member(EmbeddedDocument): gender = StringField() age = IntField() @@ -662,37 +678,35 @@ class QuerySetTest(unittest.TestCase): Club.drop_collection() club = Club() - club.members['John'] = Member(gender="M", age=13) + club.members["John"] = Member(gender="M", age=13) club.save() - Club.objects().update( - set__members={"John": Member(gender="F", age=14)}) + Club.objects().update(set__members={"John": Member(gender="F", age=14)}) club = Club.objects().first() - self.assertEqual(club.members['John'].gender, "F") - self.assertEqual(club.members['John'].age, 14) + self.assertEqual(club.members["John"].gender, "F") + self.assertEqual(club.members["John"].age, 14) def test_dictfield_update(self): """Ensure that the DictField can be updated.""" + class Club(Document): members = DictField() club = Club() - club.members['John'] = {'gender': 'M', 'age': 13} + club.members["John"] = {"gender": "M", "age": 13} club.save() - Club.objects().update( - set__members={"John": {'gender': 'F', 'age': 14}}) + Club.objects().update(set__members={"John": {"gender": "F", "age": 14}}) club = Club.objects().first() - self.assertEqual(club.members['John']['gender'], "F") - self.assertEqual(club.members['John']['age'], 14) + self.assertEqual(club.members["John"]["gender"], "F") + self.assertEqual(club.members["John"]["age"], 14) def test_update_results(self): self.Person.drop_collection() - result = self.Person(name="Bob", age=25).update( - upsert=True, full_result=True) + result = self.Person(name="Bob", age=25).update(upsert=True, full_result=True) self.assertIsInstance(result, UpdateResult) self.assertIn("upserted", result.raw_result) self.assertFalse(result.raw_result["updatedExisting"]) @@ -703,8 +717,7 @@ class QuerySetTest(unittest.TestCase): self.assertTrue(result.raw_result["updatedExisting"]) self.Person(name="Bob", age=20).save() - result = self.Person.objects(name="Bob").update( - set__name="bobby", multi=True) + result = self.Person.objects(name="Bob").update(set__name="bobby", multi=True) self.assertEqual(result, 2) def test_update_validate(self): @@ -718,8 +731,12 @@ class QuerySetTest(unittest.TestCase): ed_f = EmbeddedDocumentField(EmDoc) self.assertRaises(ValidationError, Doc.objects().update, str_f=1, upsert=True) - self.assertRaises(ValidationError, Doc.objects().update, dt_f="datetime", upsert=True) - self.assertRaises(ValidationError, Doc.objects().update, ed_f__str_f=1, upsert=True) + self.assertRaises( + ValidationError, Doc.objects().update, dt_f="datetime", upsert=True + ) + self.assertRaises( + ValidationError, Doc.objects().update, ed_f__str_f=1, upsert=True + ) def test_update_related_models(self): class TestPerson(Document): @@ -732,34 +749,33 @@ class QuerySetTest(unittest.TestCase): TestPerson.drop_collection() TestOrganization.drop_collection() - p = TestPerson(name='p1') + p = TestPerson(name="p1") p.save() - o = TestOrganization(name='o1') + o = TestOrganization(name="o1") o.save() o.owner = p - p.name = 'p2' + p.name = "p2" - self.assertEqual(o._get_changed_fields(), ['owner']) - self.assertEqual(p._get_changed_fields(), ['name']) + self.assertEqual(o._get_changed_fields(), ["owner"]) + self.assertEqual(p._get_changed_fields(), ["name"]) o.save() self.assertEqual(o._get_changed_fields(), []) - self.assertEqual(p._get_changed_fields(), ['name']) # Fails; it's empty + self.assertEqual(p._get_changed_fields(), ["name"]) # Fails; it's empty # This will do NOTHING at all, even though we changed the name p.save() p.reload() - self.assertEqual(p.name, 'p2') # Fails; it's still `p1` + self.assertEqual(p.name, "p2") # Fails; it's still `p1` def test_upsert(self): self.Person.drop_collection() - self.Person.objects( - pk=ObjectId(), name="Bob", age=30).update(upsert=True) + self.Person.objects(pk=ObjectId(), name="Bob", age=30).update(upsert=True) bob = self.Person.objects.first() self.assertEqual("Bob", bob.name) @@ -786,7 +802,8 @@ class QuerySetTest(unittest.TestCase): self.Person.drop_collection() self.Person.objects(pk=ObjectId()).update( - set__name='Bob', set_on_insert__age=30, upsert=True) + set__name="Bob", set_on_insert__age=30, upsert=True + ) bob = self.Person.objects.first() self.assertEqual("Bob", bob.name) @@ -797,7 +814,7 @@ class QuerySetTest(unittest.TestCase): field = IntField() class B(Document): - meta = {'collection': 'b'} + meta = {"collection": "b"} field = IntField(default=1) embed = EmbeddedDocumentField(Embed, default=Embed) @@ -820,7 +837,7 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(record.embed.field, 2) # Request only the _id field and save - clone = B.objects().only('id').first() + clone = B.objects().only("id").first() clone.save() # Reload the record and see that the embed data is not lost @@ -831,6 +848,7 @@ class QuerySetTest(unittest.TestCase): def test_bulk_insert(self): """Ensure that bulk insert works""" + class Comment(EmbeddedDocument): name = StringField() @@ -847,14 +865,13 @@ class QuerySetTest(unittest.TestCase): # Recreates the collection self.assertEqual(0, Blog.objects.count()) - comment1 = Comment(name='testa') - comment2 = Comment(name='testb') + comment1 = Comment(name="testa") + comment2 = Comment(name="testb") post1 = Post(comments=[comment1, comment2]) post2 = Post(comments=[comment2, comment2]) # Check bulk insert using load_bulk=False - blogs = [Blog(title="%s" % i, posts=[post1, post2]) - for i in range(99)] + blogs = [Blog(title="%s" % i, posts=[post1, post2]) for i in range(99)] with query_counter() as q: self.assertEqual(q, 0) Blog.objects.insert(blogs, load_bulk=False) @@ -866,8 +883,7 @@ class QuerySetTest(unittest.TestCase): Blog.ensure_indexes() # Check bulk insert using load_bulk=True - blogs = [Blog(title="%s" % i, posts=[post1, post2]) - for i in range(99)] + blogs = [Blog(title="%s" % i, posts=[post1, post2]) for i in range(99)] with query_counter() as q: self.assertEqual(q, 0) Blog.objects.insert(blogs) @@ -875,8 +891,8 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() - comment1 = Comment(name='testa') - comment2 = Comment(name='testb') + comment1 = Comment(name="testa") + comment2 = Comment(name="testb") post1 = Post(comments=[comment1, comment2]) post2 = Post(comments=[comment2, comment2]) blog1 = Blog(title="code", posts=[post1, post2]) @@ -892,8 +908,7 @@ class QuerySetTest(unittest.TestCase): blog = Blog.objects.first() Blog.objects.insert(blog) self.assertEqual( - str(cm.exception), - 'Some documents have ObjectIds, use doc.update() instead' + str(cm.exception), "Some documents have ObjectIds, use doc.update() instead" ) # test inserting a query set @@ -901,8 +916,7 @@ class QuerySetTest(unittest.TestCase): blogs_qs = Blog.objects Blog.objects.insert(blogs_qs) self.assertEqual( - str(cm.exception), - 'Some documents have ObjectIds, use doc.update() instead' + str(cm.exception), "Some documents have ObjectIds, use doc.update() instead" ) # insert 1 new doc @@ -948,13 +962,13 @@ class QuerySetTest(unittest.TestCase): name = StringField() Blog.drop_collection() - Blog(name='test').save() + Blog(name="test").save() with self.assertRaises(OperationError): Blog.objects.insert("HELLO WORLD") with self.assertRaises(OperationError): - Blog.objects.insert({'name': 'garbage'}) + Blog.objects.insert({"name": "garbage"}) def test_bulk_insert_update_input_document_ids(self): class Comment(Document): @@ -1010,10 +1024,11 @@ class QuerySetTest(unittest.TestCase): """Make sure we don't perform unnecessary db operations when none of document's fields were updated. """ + class Person(Document): name = StringField() - owns = ListField(ReferenceField('Organization')) - projects = ListField(ReferenceField('Project')) + owns = ListField(ReferenceField("Organization")) + projects = ListField(ReferenceField("Project")) class Organization(Document): name = StringField() @@ -1070,8 +1085,8 @@ class QuerySetTest(unittest.TestCase): def test_repeated_iteration(self): """Ensure that QuerySet rewinds itself one iteration finishes. """ - self.Person(name='Person 1').save() - self.Person(name='Person 2').save() + self.Person(name="Person 1").save() + self.Person(name="Person 2").save() queryset = self.Person.objects people1 = [person for person in queryset] @@ -1099,7 +1114,7 @@ class QuerySetTest(unittest.TestCase): for i in range(1000): Doc(number=i).save() - docs = Doc.objects.order_by('number') + docs = Doc.objects.order_by("number") self.assertEqual(docs.count(), 1000) @@ -1107,88 +1122,89 @@ class QuerySetTest(unittest.TestCase): self.assertIn("Doc: 0", docs_string) self.assertEqual(docs.count(), 1000) - self.assertIn('(remaining elements truncated)', "%s" % docs) + self.assertIn("(remaining elements truncated)", "%s" % docs) # Limit and skip docs = docs[1:4] - self.assertEqual('[, , ]', "%s" % docs) + self.assertEqual("[, , ]", "%s" % docs) self.assertEqual(docs.count(with_limit_and_skip=True), 3) for doc in docs: - self.assertEqual('.. queryset mid-iteration ..', repr(docs)) + self.assertEqual(".. queryset mid-iteration ..", repr(docs)) def test_regex_query_shortcuts(self): """Ensure that contains, startswith, endswith, etc work. """ - person = self.Person(name='Guido van Rossum') + person = self.Person(name="Guido van Rossum") person.save() # Test contains - obj = self.Person.objects(name__contains='van').first() + obj = self.Person.objects(name__contains="van").first() self.assertEqual(obj, person) - obj = self.Person.objects(name__contains='Van').first() + obj = self.Person.objects(name__contains="Van").first() self.assertEqual(obj, None) # Test icontains - obj = self.Person.objects(name__icontains='Van').first() + obj = self.Person.objects(name__icontains="Van").first() self.assertEqual(obj, person) # Test startswith - obj = self.Person.objects(name__startswith='Guido').first() + obj = self.Person.objects(name__startswith="Guido").first() self.assertEqual(obj, person) - obj = self.Person.objects(name__startswith='guido').first() + obj = self.Person.objects(name__startswith="guido").first() self.assertEqual(obj, None) # Test istartswith - obj = self.Person.objects(name__istartswith='guido').first() + obj = self.Person.objects(name__istartswith="guido").first() self.assertEqual(obj, person) # Test endswith - obj = self.Person.objects(name__endswith='Rossum').first() + obj = self.Person.objects(name__endswith="Rossum").first() self.assertEqual(obj, person) - obj = self.Person.objects(name__endswith='rossuM').first() + obj = self.Person.objects(name__endswith="rossuM").first() self.assertEqual(obj, None) # Test iendswith - obj = self.Person.objects(name__iendswith='rossuM').first() + obj = self.Person.objects(name__iendswith="rossuM").first() self.assertEqual(obj, person) # Test exact - obj = self.Person.objects(name__exact='Guido van Rossum').first() + obj = self.Person.objects(name__exact="Guido van Rossum").first() self.assertEqual(obj, person) - obj = self.Person.objects(name__exact='Guido van rossum').first() + obj = self.Person.objects(name__exact="Guido van rossum").first() self.assertEqual(obj, None) - obj = self.Person.objects(name__exact='Guido van Rossu').first() + obj = self.Person.objects(name__exact="Guido van Rossu").first() self.assertEqual(obj, None) # Test iexact - obj = self.Person.objects(name__iexact='gUIDO VAN rOSSUM').first() + obj = self.Person.objects(name__iexact="gUIDO VAN rOSSUM").first() self.assertEqual(obj, person) - obj = self.Person.objects(name__iexact='gUIDO VAN rOSSU').first() + obj = self.Person.objects(name__iexact="gUIDO VAN rOSSU").first() self.assertEqual(obj, None) # Test unsafe expressions - person = self.Person(name='Guido van Rossum [.\'Geek\']') + person = self.Person(name="Guido van Rossum [.'Geek']") person.save() - obj = self.Person.objects(name__icontains='[.\'Geek').first() + obj = self.Person.objects(name__icontains="[.'Geek").first() self.assertEqual(obj, person) def test_not(self): """Ensure that the __not operator works as expected. """ - alice = self.Person(name='Alice', age=25) + alice = self.Person(name="Alice", age=25) alice.save() - obj = self.Person.objects(name__iexact='alice').first() + obj = self.Person.objects(name__iexact="alice").first() self.assertEqual(obj, alice) - obj = self.Person.objects(name__not__iexact='alice').first() + obj = self.Person.objects(name__not__iexact="alice").first() self.assertEqual(obj, None) def test_filter_chaining(self): """Ensure filters can be chained together. """ + class Blog(Document): id = StringField(primary_key=True) @@ -1217,25 +1233,26 @@ class QuerySetTest(unittest.TestCase): blog=blog_1, title="Blog Post #1", is_published=True, - published_date=datetime.datetime(2010, 1, 5, 0, 0, 0) + published_date=datetime.datetime(2010, 1, 5, 0, 0, 0), ) BlogPost.objects.create( blog=blog_2, title="Blog Post #2", is_published=True, - published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + published_date=datetime.datetime(2010, 1, 6, 0, 0, 0), ) BlogPost.objects.create( blog=blog_3, title="Blog Post #3", is_published=True, - published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) + published_date=datetime.datetime(2010, 1, 7, 0, 0, 0), ) # find all published blog posts before 2010-01-07 published_posts = BlogPost.published() published_posts = published_posts.filter( - published_date__lt=datetime.datetime(2010, 1, 7, 0, 0, 0)) + published_date__lt=datetime.datetime(2010, 1, 7, 0, 0, 0) + ) self.assertEqual(published_posts.count(), 2) blog_posts = BlogPost.objects @@ -1247,11 +1264,11 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() def test_filter_chaining_with_regex(self): - person = self.Person(name='Guido van Rossum') + person = self.Person(name="Guido van Rossum") person.save() people = self.Person.objects - people = people.filter(name__startswith='Gui').filter(name__not__endswith='tum') + people = people.filter(name__startswith="Gui").filter(name__not__endswith="tum") self.assertEqual(people.count(), 1) def assertSequence(self, qs, expected): @@ -1264,27 +1281,23 @@ class QuerySetTest(unittest.TestCase): def test_ordering(self): """Ensure default ordering is applied and can be overridden. """ + class BlogPost(Document): title = StringField() published_date = DateTimeField() - meta = { - 'ordering': ['-published_date'] - } + meta = {"ordering": ["-published_date"]} BlogPost.drop_collection() blog_post_1 = BlogPost.objects.create( - title="Blog Post #1", - published_date=datetime.datetime(2010, 1, 5, 0, 0, 0) + title="Blog Post #1", published_date=datetime.datetime(2010, 1, 5, 0, 0, 0) ) blog_post_2 = BlogPost.objects.create( - title="Blog Post #2", - published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + title="Blog Post #2", published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) ) blog_post_3 = BlogPost.objects.create( - title="Blog Post #3", - published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) + title="Blog Post #3", published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) ) # get the "first" BlogPost using default ordering @@ -1307,39 +1320,35 @@ class QuerySetTest(unittest.TestCase): title = StringField() published_date = DateTimeField() - meta = { - 'ordering': ['-published_date'] - } + meta = {"ordering": ["-published_date"]} BlogPost.drop_collection() # default ordering should be used by default with db_ops_tracker() as q: - BlogPost.objects.filter(title='whatever').first() + BlogPost.objects.filter(title="whatever").first() self.assertEqual(len(q.get_ops()), 1) self.assertEqual( - q.get_ops()[0][CMD_QUERY_KEY][ORDER_BY_KEY], - {'published_date': -1} + q.get_ops()[0][CMD_QUERY_KEY][ORDER_BY_KEY], {"published_date": -1} ) # calling order_by() should clear the default ordering with db_ops_tracker() as q: - BlogPost.objects.filter(title='whatever').order_by().first() + BlogPost.objects.filter(title="whatever").order_by().first() self.assertEqual(len(q.get_ops()), 1) self.assertNotIn(ORDER_BY_KEY, q.get_ops()[0][CMD_QUERY_KEY]) # calling an explicit order_by should use a specified sort with db_ops_tracker() as q: - BlogPost.objects.filter(title='whatever').order_by('published_date').first() + BlogPost.objects.filter(title="whatever").order_by("published_date").first() self.assertEqual(len(q.get_ops()), 1) self.assertEqual( - q.get_ops()[0][CMD_QUERY_KEY][ORDER_BY_KEY], - {'published_date': 1} + q.get_ops()[0][CMD_QUERY_KEY][ORDER_BY_KEY], {"published_date": 1} ) # calling order_by() after an explicit sort should clear it with db_ops_tracker() as q: - qs = BlogPost.objects.filter(title='whatever').order_by('published_date') + qs = BlogPost.objects.filter(title="whatever").order_by("published_date") qs.order_by().first() self.assertEqual(len(q.get_ops()), 1) self.assertNotIn(ORDER_BY_KEY, q.get_ops()[0][CMD_QUERY_KEY]) @@ -1353,21 +1362,20 @@ class QuerySetTest(unittest.TestCase): title = StringField() published_date = DateTimeField() - meta = { - 'ordering': ['-published_date'] - } + meta = {"ordering": ["-published_date"]} BlogPost.objects.create( - title='whatever', published_date=datetime.datetime.utcnow()) + title="whatever", published_date=datetime.datetime.utcnow() + ) with db_ops_tracker() as q: - BlogPost.objects.get(title='whatever') + BlogPost.objects.get(title="whatever") self.assertEqual(len(q.get_ops()), 1) self.assertNotIn(ORDER_BY_KEY, q.get_ops()[0][CMD_QUERY_KEY]) # Ordering should be ignored for .get even if we set it explicitly with db_ops_tracker() as q: - BlogPost.objects.order_by('-title').get(title='whatever') + BlogPost.objects.order_by("-title").get(title="whatever") self.assertEqual(len(q.get_ops()), 1) self.assertNotIn(ORDER_BY_KEY, q.get_ops()[0][CMD_QUERY_KEY]) @@ -1375,6 +1383,7 @@ class QuerySetTest(unittest.TestCase): """Ensure that an embedded document is properly returned from different manners of querying. """ + class User(EmbeddedDocument): name = StringField() @@ -1384,23 +1393,20 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - user = User(name='Test User') - BlogPost.objects.create( - author=user, - content='Had a good coffee today...' - ) + user = User(name="Test User") + BlogPost.objects.create(author=user, content="Had a good coffee today...") result = BlogPost.objects.first() self.assertIsInstance(result.author, User) - self.assertEqual(result.author.name, 'Test User') + self.assertEqual(result.author.name, "Test User") result = BlogPost.objects.get(author__name=user.name) self.assertIsInstance(result.author, User) - self.assertEqual(result.author.name, 'Test User') + self.assertEqual(result.author.name, "Test User") - result = BlogPost.objects.get(author={'name': user.name}) + result = BlogPost.objects.get(author={"name": user.name}) self.assertIsInstance(result.author, User) - self.assertEqual(result.author.name, 'Test User') + self.assertEqual(result.author.name, "Test User") # Fails, since the string is not a type that is able to represent the # author's document structure (should be dict) @@ -1409,6 +1415,7 @@ class QuerySetTest(unittest.TestCase): def test_find_empty_embedded(self): """Ensure that you can save and find an empty embedded document.""" + class User(EmbeddedDocument): name = StringField() @@ -1418,7 +1425,7 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - BlogPost.objects.create(content='Anonymous post...') + BlogPost.objects.create(content="Anonymous post...") result = BlogPost.objects.get(author=None) self.assertEqual(result.author, None) @@ -1426,15 +1433,16 @@ class QuerySetTest(unittest.TestCase): def test_find_dict_item(self): """Ensure that DictField items may be found. """ + class BlogPost(Document): info = DictField() BlogPost.drop_collection() - post = BlogPost(info={'title': 'test'}) + post = BlogPost(info={"title": "test"}) post.save() - post_obj = BlogPost.objects(info__title='test').first() + post_obj = BlogPost.objects(info__title="test").first() self.assertEqual(post_obj.id, post.id) BlogPost.drop_collection() @@ -1442,6 +1450,7 @@ class QuerySetTest(unittest.TestCase): def test_exec_js_query(self): """Ensure that queries are properly formed for use in exec_js. """ + class BlogPost(Document): hits = IntField() published = BooleanField() @@ -1468,10 +1477,10 @@ class QuerySetTest(unittest.TestCase): """ # Ensure that normal queries work - c = BlogPost.objects(published=True).exec_js(js_func, 'hits') + c = BlogPost.objects(published=True).exec_js(js_func, "hits") self.assertEqual(c, 2) - c = BlogPost.objects(published=False).exec_js(js_func, 'hits') + c = BlogPost.objects(published=False).exec_js(js_func, "hits") self.assertEqual(c, 1) BlogPost.drop_collection() @@ -1479,22 +1488,22 @@ class QuerySetTest(unittest.TestCase): def test_exec_js_field_sub(self): """Ensure that field substitutions occur properly in exec_js functions. """ + class Comment(EmbeddedDocument): - content = StringField(db_field='body') + content = StringField(db_field="body") class BlogPost(Document): - name = StringField(db_field='doc-name') - comments = ListField(EmbeddedDocumentField(Comment), - db_field='cmnts') + name = StringField(db_field="doc-name") + comments = ListField(EmbeddedDocumentField(Comment), db_field="cmnts") BlogPost.drop_collection() - comments1 = [Comment(content='cool'), Comment(content='yay')] - post1 = BlogPost(name='post1', comments=comments1) + comments1 = [Comment(content="cool"), Comment(content="yay")] + post1 = BlogPost(name="post1", comments=comments1) post1.save() - comments2 = [Comment(content='nice stuff')] - post2 = BlogPost(name='post2', comments=comments2) + comments2 = [Comment(content="nice stuff")] + post2 = BlogPost(name="post2", comments=comments2) post2.save() code = """ @@ -1514,16 +1523,15 @@ class QuerySetTest(unittest.TestCase): """ sub_code = BlogPost.objects._sub_js_fields(code) - code_chunks = ['doc["cmnts"];', 'doc["doc-name"],', - 'doc["cmnts"][i]["body"]'] + code_chunks = ['doc["cmnts"];', 'doc["doc-name"],', 'doc["cmnts"][i]["body"]'] for chunk in code_chunks: self.assertIn(chunk, sub_code) results = BlogPost.objects.exec_js(code) expected_results = [ - {u'comment': u'cool', u'document': u'post1'}, - {u'comment': u'yay', u'document': u'post1'}, - {u'comment': u'nice stuff', u'document': u'post2'}, + {u"comment": u"cool", u"document": u"post1"}, + {u"comment": u"yay", u"document": u"post1"}, + {u"comment": u"nice stuff", u"document": u"post2"}, ] self.assertEqual(results, expected_results) @@ -1552,55 +1560,60 @@ class QuerySetTest(unittest.TestCase): def test_reverse_delete_rule_cascade(self): """Ensure cascading deletion of referring documents from the database. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) + BlogPost.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - someoneelse = self.Person(name='Some-one Else') + someoneelse = self.Person(name="Some-one Else") someoneelse.save() - BlogPost(content='Watching TV', author=me).save() - BlogPost(content='Chilling out', author=me).save() - BlogPost(content='Pro Testing', author=someoneelse).save() + BlogPost(content="Watching TV", author=me).save() + BlogPost(content="Chilling out", author=me).save() + BlogPost(content="Pro Testing", author=someoneelse).save() self.assertEqual(3, BlogPost.objects.count()) - self.Person.objects(name='Test User').delete() + self.Person.objects(name="Test User").delete() self.assertEqual(1, BlogPost.objects.count()) def test_reverse_delete_rule_cascade_on_abstract_document(self): """Ensure cascading deletion of referring documents from the database does not fail on abstract document. """ + class AbstractBlogPost(Document): - meta = {'abstract': True} + meta = {"abstract": True} author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) class BlogPost(AbstractBlogPost): content = StringField() + BlogPost.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - someoneelse = self.Person(name='Some-one Else') + someoneelse = self.Person(name="Some-one Else") someoneelse.save() - BlogPost(content='Watching TV', author=me).save() - BlogPost(content='Chilling out', author=me).save() - BlogPost(content='Pro Testing', author=someoneelse).save() + BlogPost(content="Watching TV", author=me).save() + BlogPost(content="Chilling out", author=me).save() + BlogPost(content="Pro Testing", author=someoneelse).save() self.assertEqual(3, BlogPost.objects.count()) - self.Person.objects(name='Test User').delete() + self.Person.objects(name="Test User").delete() self.assertEqual(1, BlogPost.objects.count()) def test_reverse_delete_rule_cascade_cycle(self): """Ensure reference cascading doesn't loop if reference graph isn't a tree """ + class Dummy(Document): - reference = ReferenceField('self', reverse_delete_rule=CASCADE) + reference = ReferenceField("self", reverse_delete_rule=CASCADE) base = Dummy().save() other = Dummy(reference=base).save() @@ -1616,14 +1629,15 @@ class QuerySetTest(unittest.TestCase): """Ensure reference cascading doesn't loop if reference graph isn't a tree """ + class Category(Document): name = StringField() class Dummy(Document): - reference = ReferenceField('self', reverse_delete_rule=CASCADE) + reference = ReferenceField("self", reverse_delete_rule=CASCADE) cat = ReferenceField(Category, reverse_delete_rule=CASCADE) - cat = Category(name='cat').save() + cat = Category(name="cat").save() base = Dummy(cat=cat).save() other = Dummy(reference=base).save() other2 = Dummy(reference=other).save() @@ -1640,24 +1654,25 @@ class QuerySetTest(unittest.TestCase): """Ensure self-referencing CASCADE deletes do not result in infinite loop """ + class Category(Document): name = StringField() - parent = ReferenceField('self', reverse_delete_rule=CASCADE) + parent = ReferenceField("self", reverse_delete_rule=CASCADE) Category.drop_collection() num_children = 3 - base = Category(name='Root') + base = Category(name="Root") base.save() # Create a simple parent-child tree for i in range(num_children): - child_name = 'Child-%i' % i + child_name = "Child-%i" % i child = Category(name=child_name, parent=base) child.save() for i in range(num_children): - child_child_name = 'Child-Child-%i' % i + child_child_name = "Child-Child-%i" % i child_child = Category(name=child_child_name, parent=child) child_child.save() @@ -1673,6 +1688,7 @@ class QuerySetTest(unittest.TestCase): def test_reverse_delete_rule_nullify(self): """Ensure nullification of references to deleted documents. """ + class Category(Document): name = StringField() @@ -1683,14 +1699,14 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() Category.drop_collection() - lameness = Category(name='Lameness') + lameness = Category(name="Lameness") lameness.save() - post = BlogPost(content='Watching TV', category=lameness) + post = BlogPost(content="Watching TV", category=lameness) post.save() self.assertEqual(1, BlogPost.objects.count()) - self.assertEqual('Lameness', BlogPost.objects.first().category.name) + self.assertEqual("Lameness", BlogPost.objects.first().category.name) Category.objects.delete() self.assertEqual(1, BlogPost.objects.count()) self.assertEqual(None, BlogPost.objects.first().category) @@ -1699,24 +1715,26 @@ class QuerySetTest(unittest.TestCase): """Ensure nullification of references to deleted documents when reference is on an abstract document. """ + class AbstractBlogPost(Document): - meta = {'abstract': True} + meta = {"abstract": True} author = ReferenceField(self.Person, reverse_delete_rule=NULLIFY) class BlogPost(AbstractBlogPost): content = StringField() + BlogPost.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - someoneelse = self.Person(name='Some-one Else') + someoneelse = self.Person(name="Some-one Else") someoneelse.save() - BlogPost(content='Watching TV', author=me).save() + BlogPost(content="Watching TV", author=me).save() self.assertEqual(1, BlogPost.objects.count()) self.assertEqual(me, BlogPost.objects.first().author) - self.Person.objects(name='Test User').delete() + self.Person.objects(name="Test User").delete() self.assertEqual(1, BlogPost.objects.count()) self.assertEqual(None, BlogPost.objects.first().author) @@ -1724,6 +1742,7 @@ class QuerySetTest(unittest.TestCase): """Ensure deletion gets denied on documents that still have references to them. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=DENY) @@ -1731,10 +1750,10 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() self.Person.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - post = BlogPost(content='Watching TV', author=me) + post = BlogPost(content="Watching TV", author=me) post.save() self.assertRaises(OperationError, self.Person.objects.delete) @@ -1743,18 +1762,20 @@ class QuerySetTest(unittest.TestCase): """Ensure deletion gets denied on documents that still have references to them, when reference is on an abstract document. """ + class AbstractBlogPost(Document): - meta = {'abstract': True} + meta = {"abstract": True} author = ReferenceField(self.Person, reverse_delete_rule=DENY) class BlogPost(AbstractBlogPost): content = StringField() + BlogPost.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - BlogPost(content='Watching TV', author=me).save() + BlogPost(content="Watching TV", author=me).save() self.assertEqual(1, BlogPost.objects.count()) self.assertRaises(OperationError, self.Person.objects.delete) @@ -1762,24 +1783,24 @@ class QuerySetTest(unittest.TestCase): def test_reverse_delete_rule_pull(self): """Ensure pulling of references to deleted documents. """ + class BlogPost(Document): content = StringField() - authors = ListField(ReferenceField(self.Person, - reverse_delete_rule=PULL)) + authors = ListField(ReferenceField(self.Person, reverse_delete_rule=PULL)) BlogPost.drop_collection() self.Person.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - someoneelse = self.Person(name='Some-one Else') + someoneelse = self.Person(name="Some-one Else") someoneelse.save() - post = BlogPost(content='Watching TV', authors=[me, someoneelse]) + post = BlogPost(content="Watching TV", authors=[me, someoneelse]) post.save() - another = BlogPost(content='Chilling Out', authors=[someoneelse]) + another = BlogPost(content="Chilling Out", authors=[someoneelse]) another.save() someoneelse.delete() @@ -1793,10 +1814,10 @@ class QuerySetTest(unittest.TestCase): """Ensure pulling of references to deleted documents when reference is defined on an abstract document.. """ + class AbstractBlogPost(Document): - meta = {'abstract': True} - authors = ListField(ReferenceField(self.Person, - reverse_delete_rule=PULL)) + meta = {"abstract": True} + authors = ListField(ReferenceField(self.Person, reverse_delete_rule=PULL)) class BlogPost(AbstractBlogPost): content = StringField() @@ -1804,16 +1825,16 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() self.Person.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - someoneelse = self.Person(name='Some-one Else') + someoneelse = self.Person(name="Some-one Else") someoneelse.save() - post = BlogPost(content='Watching TV', authors=[me, someoneelse]) + post = BlogPost(content="Watching TV", authors=[me, someoneelse]) post.save() - another = BlogPost(content='Chilling Out', authors=[someoneelse]) + another = BlogPost(content="Chilling Out", authors=[someoneelse]) another.save() someoneelse.delete() @@ -1824,7 +1845,6 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(another.authors, []) def test_delete_with_limits(self): - class Log(Document): pass @@ -1839,19 +1859,21 @@ class QuerySetTest(unittest.TestCase): def test_delete_with_limit_handles_delete_rules(self): """Ensure cascading deletion of referring documents from the database. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) + BlogPost.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - someoneelse = self.Person(name='Some-one Else') + someoneelse = self.Person(name="Some-one Else") someoneelse.save() - BlogPost(content='Watching TV', author=me).save() - BlogPost(content='Chilling out', author=me).save() - BlogPost(content='Pro Testing', author=someoneelse).save() + BlogPost(content="Watching TV", author=me).save() + BlogPost(content="Chilling out", author=me).save() + BlogPost(content="Pro Testing", author=someoneelse).save() self.assertEqual(3, BlogPost.objects.count()) self.Person.objects()[:1].delete() @@ -1870,6 +1892,7 @@ class QuerySetTest(unittest.TestCase): def test_reference_field_find(self): """Ensure cascading deletion of referring documents from the database. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person) @@ -1877,7 +1900,7 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() self.Person.drop_collection() - me = self.Person(name='Test User').save() + me = self.Person(name="Test User").save() BlogPost(content="test 123", author=me).save() self.assertEqual(1, BlogPost.objects(author=me).count()) @@ -1886,12 +1909,12 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(1, BlogPost.objects(author__in=[me]).count()) self.assertEqual(1, BlogPost.objects(author__in=[me.pk]).count()) - self.assertEqual( - 1, BlogPost.objects(author__in=["%s" % me.pk]).count()) + self.assertEqual(1, BlogPost.objects(author__in=["%s" % me.pk]).count()) def test_reference_field_find_dbref(self): """Ensure cascading deletion of referring documents from the database. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, dbref=True) @@ -1899,7 +1922,7 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() self.Person.drop_collection() - me = self.Person(name='Test User').save() + me = self.Person(name="Test User").save() BlogPost(content="test 123", author=me).save() self.assertEqual(1, BlogPost.objects(author=me).count()) @@ -1908,8 +1931,7 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(1, BlogPost.objects(author__in=[me]).count()) self.assertEqual(1, BlogPost.objects(author__in=[me.pk]).count()) - self.assertEqual( - 1, BlogPost.objects(author__in=["%s" % me.pk]).count()) + self.assertEqual(1, BlogPost.objects(author__in=["%s" % me.pk]).count()) def test_update_intfield_operator(self): class BlogPost(Document): @@ -1946,7 +1968,7 @@ class QuerySetTest(unittest.TestCase): post = BlogPost(review=3.5) post.save() - BlogPost.objects.update_one(inc__review=0.1) # test with floats + BlogPost.objects.update_one(inc__review=0.1) # test with floats post.reload() self.assertEqual(float(post.review), 3.6) @@ -1954,7 +1976,7 @@ class QuerySetTest(unittest.TestCase): post.reload() self.assertEqual(float(post.review), 3.5) - BlogPost.objects.update_one(inc__review=Decimal(0.12)) # test with Decimal + BlogPost.objects.update_one(inc__review=Decimal(0.12)) # test with Decimal post.reload() self.assertEqual(float(post.review), 3.62) @@ -1972,38 +1994,39 @@ class QuerySetTest(unittest.TestCase): post.save() with self.assertRaises(OperationError): - BlogPost.objects.update_one(inc__review=0.1) # test with floats + BlogPost.objects.update_one(inc__review=0.1) # test with floats def test_update_listfield_operator(self): """Ensure that atomic updates work properly. """ + class BlogPost(Document): tags = ListField(StringField()) BlogPost.drop_collection() - post = BlogPost(tags=['test']) + post = BlogPost(tags=["test"]) post.save() # ListField operator - BlogPost.objects.update(push__tags='mongo') + BlogPost.objects.update(push__tags="mongo") post.reload() - self.assertIn('mongo', post.tags) + self.assertIn("mongo", post.tags) - BlogPost.objects.update_one(push_all__tags=['db', 'nosql']) + BlogPost.objects.update_one(push_all__tags=["db", "nosql"]) post.reload() - self.assertIn('db', post.tags) - self.assertIn('nosql', post.tags) + self.assertIn("db", post.tags) + self.assertIn("nosql", post.tags) tags = post.tags[:-1] BlogPost.objects.update(pop__tags=1) post.reload() self.assertEqual(post.tags, tags) - BlogPost.objects.update_one(add_to_set__tags='unique') - BlogPost.objects.update_one(add_to_set__tags='unique') + BlogPost.objects.update_one(add_to_set__tags="unique") + BlogPost.objects.update_one(add_to_set__tags="unique") post.reload() - self.assertEqual(post.tags.count('unique'), 1) + self.assertEqual(post.tags.count("unique"), 1) BlogPost.drop_collection() @@ -2013,18 +2036,19 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - post = BlogPost(title='garbage').save() + post = BlogPost(title="garbage").save() self.assertNotEqual(post.title, None) BlogPost.objects.update_one(unset__title=1) post.reload() self.assertEqual(post.title, None) pymongo_doc = BlogPost.objects.as_pymongo().first() - self.assertNotIn('title', pymongo_doc) + self.assertNotIn("title", pymongo_doc) def test_update_push_with_position(self): """Ensure that the 'push' update with position works properly. """ + class BlogPost(Document): slug = StringField() tags = ListField(StringField()) @@ -2036,20 +2060,21 @@ class QuerySetTest(unittest.TestCase): BlogPost.objects.filter(id=post.id).update(push__tags="code") BlogPost.objects.filter(id=post.id).update(push__tags__0=["mongodb", "python"]) post.reload() - self.assertEqual(post.tags, ['mongodb', 'python', 'code']) + self.assertEqual(post.tags, ["mongodb", "python", "code"]) BlogPost.objects.filter(id=post.id).update(set__tags__2="java") post.reload() - self.assertEqual(post.tags, ['mongodb', 'python', 'java']) + self.assertEqual(post.tags, ["mongodb", "python", "java"]) # test push with singular value - BlogPost.objects.filter(id=post.id).update(push__tags__0='scala') + BlogPost.objects.filter(id=post.id).update(push__tags__0="scala") post.reload() - self.assertEqual(post.tags, ['scala', 'mongodb', 'python', 'java']) + self.assertEqual(post.tags, ["scala", "mongodb", "python", "java"]) def test_update_push_list_of_list(self): """Ensure that the 'push' update operation works in the list of list """ + class BlogPost(Document): slug = StringField() tags = ListField() @@ -2065,6 +2090,7 @@ class QuerySetTest(unittest.TestCase): def test_update_push_and_pull_add_to_set(self): """Ensure that the 'pull' update operation works correctly. """ + class BlogPost(Document): slug = StringField() tags = ListField(StringField()) @@ -2078,8 +2104,7 @@ class QuerySetTest(unittest.TestCase): post.reload() self.assertEqual(post.tags, ["code"]) - BlogPost.objects.filter(id=post.id).update( - push_all__tags=["mongodb", "code"]) + BlogPost.objects.filter(id=post.id).update(push_all__tags=["mongodb", "code"]) post.reload() self.assertEqual(post.tags, ["code", "mongodb", "code"]) @@ -2087,13 +2112,13 @@ class QuerySetTest(unittest.TestCase): post.reload() self.assertEqual(post.tags, ["mongodb"]) - BlogPost.objects(slug="test").update( - pull_all__tags=["mongodb", "code"]) + BlogPost.objects(slug="test").update(pull_all__tags=["mongodb", "code"]) post.reload() self.assertEqual(post.tags, []) BlogPost.objects(slug="test").update( - __raw__={"$addToSet": {"tags": {"$each": ["code", "mongodb", "code"]}}}) + __raw__={"$addToSet": {"tags": {"$each": ["code", "mongodb", "code"]}}} + ) post.reload() self.assertEqual(post.tags, ["code", "mongodb"]) @@ -2101,13 +2126,13 @@ class QuerySetTest(unittest.TestCase): class Item(Document): name = StringField(required=True) description = StringField(max_length=50) - parents = ListField(ReferenceField('self')) + parents = ListField(ReferenceField("self")) Item.drop_collection() - item = Item(name='test item').save() - parent_1 = Item(name='parent 1').save() - parent_2 = Item(name='parent 2').save() + item = Item(name="test item").save() + parent_1 = Item(name="parent 1").save() + parent_2 = Item(name="parent 2").save() item.update(add_to_set__parents=[parent_1, parent_2, parent_1]) item.reload() @@ -2115,12 +2140,11 @@ class QuerySetTest(unittest.TestCase): self.assertEqual([parent_1, parent_2], item.parents) def test_pull_nested(self): - class Collaborator(EmbeddedDocument): user = StringField() def __unicode__(self): - return '%s' % self.user + return "%s" % self.user class Site(Document): name = StringField(max_length=75, unique=True, required=True) @@ -2128,23 +2152,21 @@ class QuerySetTest(unittest.TestCase): Site.drop_collection() - c = Collaborator(user='Esteban') + c = Collaborator(user="Esteban") s = Site(name="test", collaborators=[c]).save() - Site.objects(id=s.id).update_one(pull__collaborators__user='Esteban') + Site.objects(id=s.id).update_one(pull__collaborators__user="Esteban") self.assertEqual(Site.objects.first().collaborators, []) with self.assertRaises(InvalidQueryError): - Site.objects(id=s.id).update_one( - pull_all__collaborators__user=['Ross']) + Site.objects(id=s.id).update_one(pull_all__collaborators__user=["Ross"]) def test_pull_from_nested_embedded(self): - class User(EmbeddedDocument): name = StringField() def __unicode__(self): - return '%s' % self.name + return "%s" % self.name class Collaborator(EmbeddedDocument): helpful = ListField(EmbeddedDocumentField(User)) @@ -2156,21 +2178,24 @@ class QuerySetTest(unittest.TestCase): Site.drop_collection() - c = User(name='Esteban') - f = User(name='Frank') - s = Site(name="test", collaborators=Collaborator( - helpful=[c], unhelpful=[f])).save() + c = User(name="Esteban") + f = User(name="Frank") + s = Site( + name="test", collaborators=Collaborator(helpful=[c], unhelpful=[f]) + ).save() Site.objects(id=s.id).update_one(pull__collaborators__helpful=c) - self.assertEqual(Site.objects.first().collaborators['helpful'], []) + self.assertEqual(Site.objects.first().collaborators["helpful"], []) Site.objects(id=s.id).update_one( - pull__collaborators__unhelpful={'name': 'Frank'}) - self.assertEqual(Site.objects.first().collaborators['unhelpful'], []) + pull__collaborators__unhelpful={"name": "Frank"} + ) + self.assertEqual(Site.objects.first().collaborators["unhelpful"], []) with self.assertRaises(InvalidQueryError): Site.objects(id=s.id).update_one( - pull_all__collaborators__helpful__name=['Ross']) + pull_all__collaborators__helpful__name=["Ross"] + ) def test_pull_from_nested_embedded_using_in_nin(self): """Ensure that the 'pull' update operation works on embedded documents using 'in' and 'nin' operators. @@ -2180,7 +2205,7 @@ class QuerySetTest(unittest.TestCase): name = StringField() def __unicode__(self): - return '%s' % self.name + return "%s" % self.name class Collaborator(EmbeddedDocument): helpful = ListField(EmbeddedDocumentField(User)) @@ -2192,60 +2217,62 @@ class QuerySetTest(unittest.TestCase): Site.drop_collection() - a = User(name='Esteban') - b = User(name='Frank') - x = User(name='Harry') - y = User(name='John') + a = User(name="Esteban") + b = User(name="Frank") + x = User(name="Harry") + y = User(name="John") - s = Site(name="test", collaborators=Collaborator( - helpful=[a, b], unhelpful=[x, y])).save() + s = Site( + name="test", collaborators=Collaborator(helpful=[a, b], unhelpful=[x, y]) + ).save() - Site.objects(id=s.id).update_one(pull__collaborators__helpful__name__in=['Esteban']) # Pull a - self.assertEqual(Site.objects.first().collaborators['helpful'], [b]) + Site.objects(id=s.id).update_one( + pull__collaborators__helpful__name__in=["Esteban"] + ) # Pull a + self.assertEqual(Site.objects.first().collaborators["helpful"], [b]) - Site.objects(id=s.id).update_one(pull__collaborators__unhelpful__name__nin=['John']) # Pull x - self.assertEqual(Site.objects.first().collaborators['unhelpful'], [y]) + Site.objects(id=s.id).update_one( + pull__collaborators__unhelpful__name__nin=["John"] + ) # Pull x + self.assertEqual(Site.objects.first().collaborators["unhelpful"], [y]) def test_pull_from_nested_mapfield(self): - class Collaborator(EmbeddedDocument): user = StringField() def __unicode__(self): - return '%s' % self.user + return "%s" % self.user class Site(Document): name = StringField(max_length=75, unique=True, required=True) - collaborators = MapField( - ListField(EmbeddedDocumentField(Collaborator))) + collaborators = MapField(ListField(EmbeddedDocumentField(Collaborator))) Site.drop_collection() - c = Collaborator(user='Esteban') - f = Collaborator(user='Frank') - s = Site(name="test", collaborators={'helpful': [c], 'unhelpful': [f]}) + c = Collaborator(user="Esteban") + f = Collaborator(user="Frank") + s = Site(name="test", collaborators={"helpful": [c], "unhelpful": [f]}) s.save() - Site.objects(id=s.id).update_one( - pull__collaborators__helpful__user='Esteban') - self.assertEqual(Site.objects.first().collaborators['helpful'], []) + Site.objects(id=s.id).update_one(pull__collaborators__helpful__user="Esteban") + self.assertEqual(Site.objects.first().collaborators["helpful"], []) Site.objects(id=s.id).update_one( - pull__collaborators__unhelpful={'user': 'Frank'}) - self.assertEqual(Site.objects.first().collaborators['unhelpful'], []) + pull__collaborators__unhelpful={"user": "Frank"} + ) + self.assertEqual(Site.objects.first().collaborators["unhelpful"], []) with self.assertRaises(InvalidQueryError): Site.objects(id=s.id).update_one( - pull_all__collaborators__helpful__user=['Ross']) + pull_all__collaborators__helpful__user=["Ross"] + ) def test_pull_in_genericembedded_field(self): - class Foo(EmbeddedDocument): name = StringField() class Bar(Document): - foos = ListField(GenericEmbeddedDocumentField( - choices=[Foo, ])) + foos = ListField(GenericEmbeddedDocumentField(choices=[Foo])) Bar.drop_collection() @@ -2261,15 +2288,14 @@ class QuerySetTest(unittest.TestCase): BlogTag.drop_collection() - BlogTag(name='garbage').save() - default_update = BlogTag.objects.update_one(name='new') + BlogTag(name="garbage").save() + default_update = BlogTag.objects.update_one(name="new") self.assertEqual(default_update, 1) - full_result_update = BlogTag.objects.update_one(name='new', full_result=True) + full_result_update = BlogTag.objects.update_one(name="new", full_result=True) self.assertIsInstance(full_result_update, UpdateResult) def test_update_one_pop_generic_reference(self): - class BlogTag(Document): name = StringField(required=True) @@ -2280,9 +2306,9 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() BlogTag.drop_collection() - tag_1 = BlogTag(name='code') + tag_1 = BlogTag(name="code") tag_1.save() - tag_2 = BlogTag(name='mongodb') + tag_2 = BlogTag(name="mongodb") tag_2.save() post = BlogPost(slug="test", tags=[tag_1]) @@ -2301,7 +2327,6 @@ class QuerySetTest(unittest.TestCase): BlogTag.drop_collection() def test_editting_embedded_objects(self): - class BlogTag(EmbeddedDocument): name = StringField(required=True) @@ -2311,8 +2336,8 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - tag_1 = BlogTag(name='code') - tag_2 = BlogTag(name='mongodb') + tag_1 = BlogTag(name="code") + tag_2 = BlogTag(name="mongodb") post = BlogPost(slug="test", tags=[tag_1]) post.save() @@ -2323,7 +2348,7 @@ class QuerySetTest(unittest.TestCase): BlogPost.objects(slug="test-2").update_one(set__tags__0__name="python") post.reload() - self.assertEqual(post.tags[0].name, 'python') + self.assertEqual(post.tags[0].name, "python") BlogPost.objects(slug="test-2").update_one(pop__tags=-1) post.reload() @@ -2332,13 +2357,12 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() def test_set_list_embedded_documents(self): - class Author(EmbeddedDocument): name = StringField() class Message(Document): title = StringField() - authors = ListField(EmbeddedDocumentField('Author')) + authors = ListField(EmbeddedDocumentField("Author")) Message.drop_collection() @@ -2346,15 +2370,19 @@ class QuerySetTest(unittest.TestCase): message.save() Message.objects(authors__name="Harry").update_one( - set__authors__S=Author(name="Ross")) + set__authors__S=Author(name="Ross") + ) message = message.reload() self.assertEqual(message.authors[0].name, "Ross") Message.objects(authors__name="Ross").update_one( - set__authors=[Author(name="Harry"), - Author(name="Ross"), - Author(name="Adam")]) + set__authors=[ + Author(name="Harry"), + Author(name="Ross"), + Author(name="Adam"), + ] + ) message = message.reload() self.assertEqual(message.authors[0].name, "Harry") @@ -2362,7 +2390,6 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(message.authors[2].name, "Adam") def test_set_generic_embedded_documents(self): - class Bar(EmbeddedDocument): name = StringField() @@ -2372,15 +2399,13 @@ class QuerySetTest(unittest.TestCase): User.drop_collection() - User(username='abc').save() - User.objects(username='abc').update( - set__bar=Bar(name='test'), upsert=True) + User(username="abc").save() + User.objects(username="abc").update(set__bar=Bar(name="test"), upsert=True) - user = User.objects(username='abc').first() + user = User.objects(username="abc").first() self.assertEqual(user.bar.name, "test") def test_reload_embedded_docs_instance(self): - class SubDoc(EmbeddedDocument): val = IntField() @@ -2393,7 +2418,6 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(doc.pk, doc.embedded._instance.pk) def test_reload_list_embedded_docs_instance(self): - class SubDoc(EmbeddedDocument): val = IntField() @@ -2412,16 +2436,16 @@ class QuerySetTest(unittest.TestCase): self.Person(name="User A", age=20).save() self.Person(name="User C", age=30).save() - names = [p.name for p in self.Person.objects.order_by('-age')] - self.assertEqual(names, ['User B', 'User C', 'User A']) + names = [p.name for p in self.Person.objects.order_by("-age")] + self.assertEqual(names, ["User B", "User C", "User A"]) - names = [p.name for p in self.Person.objects.order_by('+age')] - self.assertEqual(names, ['User A', 'User C', 'User B']) + names = [p.name for p in self.Person.objects.order_by("+age")] + self.assertEqual(names, ["User A", "User C", "User B"]) - names = [p.name for p in self.Person.objects.order_by('age')] - self.assertEqual(names, ['User A', 'User C', 'User B']) + names = [p.name for p in self.Person.objects.order_by("age")] + self.assertEqual(names, ["User A", "User C", "User B"]) - ages = [p.age for p in self.Person.objects.order_by('-name')] + ages = [p.age for p in self.Person.objects.order_by("-name")] self.assertEqual(ages, [30, 40, 20]) def test_order_by_optional(self): @@ -2432,31 +2456,22 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() blog_post_3 = BlogPost.objects.create( - title="Blog Post #3", - published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + title="Blog Post #3", published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) ) blog_post_2 = BlogPost.objects.create( - title="Blog Post #2", - published_date=datetime.datetime(2010, 1, 5, 0, 0, 0) + title="Blog Post #2", published_date=datetime.datetime(2010, 1, 5, 0, 0, 0) ) blog_post_4 = BlogPost.objects.create( - title="Blog Post #4", - published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) - ) - blog_post_1 = BlogPost.objects.create( - title="Blog Post #1", - published_date=None + title="Blog Post #4", published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) ) + blog_post_1 = BlogPost.objects.create(title="Blog Post #1", published_date=None) expected = [blog_post_1, blog_post_2, blog_post_3, blog_post_4] - self.assertSequence(BlogPost.objects.order_by('published_date'), - expected) - self.assertSequence(BlogPost.objects.order_by('+published_date'), - expected) + self.assertSequence(BlogPost.objects.order_by("published_date"), expected) + self.assertSequence(BlogPost.objects.order_by("+published_date"), expected) expected.reverse() - self.assertSequence(BlogPost.objects.order_by('-published_date'), - expected) + self.assertSequence(BlogPost.objects.order_by("-published_date"), expected) def test_order_by_list(self): class BlogPost(Document): @@ -2466,23 +2481,20 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() blog_post_1 = BlogPost.objects.create( - title="A", - published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + title="A", published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) ) blog_post_2 = BlogPost.objects.create( - title="B", - published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + title="B", published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) ) blog_post_3 = BlogPost.objects.create( - title="C", - published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) + title="C", published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) ) - qs = BlogPost.objects.order_by('published_date', 'title') + qs = BlogPost.objects.order_by("published_date", "title") expected = [blog_post_1, blog_post_2, blog_post_3] self.assertSequence(qs, expected) - qs = BlogPost.objects.order_by('-published_date', '-title') + qs = BlogPost.objects.order_by("-published_date", "-title") expected.reverse() self.assertSequence(qs, expected) @@ -2493,7 +2505,7 @@ class QuerySetTest(unittest.TestCase): self.Person(name="User A", age=20).save() self.Person(name="User C", age=30).save() - only_age = self.Person.objects.order_by('-age').only('age') + only_age = self.Person.objects.order_by("-age").only("age") names = [p.name for p in only_age] ages = [p.age for p in only_age] @@ -2502,19 +2514,19 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(names, [None, None, None]) self.assertEqual(ages, [40, 30, 20]) - qs = self.Person.objects.all().order_by('-age') + qs = self.Person.objects.all().order_by("-age") qs = qs.limit(10) ages = [p.age for p in qs] self.assertEqual(ages, [40, 30, 20]) qs = self.Person.objects.all().limit(10) - qs = qs.order_by('-age') + qs = qs.order_by("-age") ages = [p.age for p in qs] self.assertEqual(ages, [40, 30, 20]) qs = self.Person.objects.all().skip(0) - qs = qs.order_by('-age') + qs = qs.order_by("-age") ages = [p.age for p in qs] self.assertEqual(ages, [40, 30, 20]) @@ -2538,47 +2550,47 @@ class QuerySetTest(unittest.TestCase): Author(author=person_b).save() Author(author=person_c).save() - names = [ - a.author.name for a in Author.objects.order_by('-author__age')] - self.assertEqual(names, ['User A', 'User B', 'User C']) + names = [a.author.name for a in Author.objects.order_by("-author__age")] + self.assertEqual(names, ["User A", "User B", "User C"]) def test_comment(self): """Make sure adding a comment to the query gets added to the query""" MONGO_VER = self.mongodb_version _, CMD_QUERY_KEY = get_key_compat(MONGO_VER) - QUERY_KEY = 'filter' - COMMENT_KEY = 'comment' + QUERY_KEY = "filter" + COMMENT_KEY = "comment" class User(Document): age = IntField() with db_ops_tracker() as q: - adult1 = (User.objects.filter(age__gte=18) - .comment('looking for an adult') - .first()) + adult1 = ( + User.objects.filter(age__gte=18).comment("looking for an adult").first() + ) - adult2 = (User.objects.comment('looking for an adult') - .filter(age__gte=18) - .first()) + adult2 = ( + User.objects.comment("looking for an adult").filter(age__gte=18).first() + ) ops = q.get_ops() self.assertEqual(len(ops), 2) for op in ops: - self.assertEqual(op[CMD_QUERY_KEY][QUERY_KEY], {'age': {'$gte': 18}}) - self.assertEqual(op[CMD_QUERY_KEY][COMMENT_KEY], 'looking for an adult') + self.assertEqual(op[CMD_QUERY_KEY][QUERY_KEY], {"age": {"$gte": 18}}) + self.assertEqual(op[CMD_QUERY_KEY][COMMENT_KEY], "looking for an adult") def test_map_reduce(self): """Ensure map/reduce is both mapping and reducing. """ + class BlogPost(Document): title = StringField() - tags = ListField(StringField(), db_field='post-tag-list') + tags = ListField(StringField(), db_field="post-tag-list") BlogPost.drop_collection() - BlogPost(title="Post #1", tags=['music', 'film', 'print']).save() - BlogPost(title="Post #2", tags=['music', 'film']).save() - BlogPost(title="Post #3", tags=['film', 'photography']).save() + BlogPost(title="Post #1", tags=["music", "film", "print"]).save() + BlogPost(title="Post #2", tags=["music", "film"]).save() + BlogPost(title="Post #3", tags=["film", "photography"]).save() map_f = """ function() { @@ -2628,8 +2640,8 @@ class QuerySetTest(unittest.TestCase): post2.save() post3.save() - self.assertEqual(BlogPost._fields['title'].db_field, '_id') - self.assertEqual(BlogPost._meta['id_field'], 'title') + self.assertEqual(BlogPost._fields["title"].db_field, "_id") + self.assertEqual(BlogPost._meta["id_field"], "title") map_f = """ function() { @@ -2661,16 +2673,14 @@ class QuerySetTest(unittest.TestCase): """ Test map/reduce custom output """ - register_connection('test2', 'mongoenginetest2') + register_connection("test2", "mongoenginetest2") class Family(Document): - id = IntField( - primary_key=True) + id = IntField(primary_key=True) log = StringField() class Person(Document): - id = IntField( - primary_key=True) + id = IntField(primary_key=True) name = StringField() age = IntField() family = ReferenceField(Family) @@ -2745,7 +2755,8 @@ class QuerySetTest(unittest.TestCase): cursor = Family.objects.map_reduce( map_f=map_family, reduce_f=reduce_f, - output={'replace': 'family_map', 'db_alias': 'test2'}) + output={"replace": "family_map", "db_alias": "test2"}, + ) # start a map/reduce cursor.next() @@ -2753,43 +2764,56 @@ class QuerySetTest(unittest.TestCase): results = Person.objects.map_reduce( map_f=map_person, reduce_f=reduce_f, - output={'reduce': 'family_map', 'db_alias': 'test2'}) + output={"reduce": "family_map", "db_alias": "test2"}, + ) results = list(results) - collection = get_db('test2').family_map + collection = get_db("test2").family_map self.assertEqual( - collection.find_one({'_id': 1}), { - '_id': 1, - 'value': { - 'persons': [ - {'age': 21, 'name': u'Wilson Jr'}, - {'age': 45, 'name': u'Wilson Father'}, - {'age': 40, 'name': u'Eliana Costa'}, - {'age': 17, 'name': u'Tayza Mariana'}], - 'totalAge': 123} - }) + collection.find_one({"_id": 1}), + { + "_id": 1, + "value": { + "persons": [ + {"age": 21, "name": u"Wilson Jr"}, + {"age": 45, "name": u"Wilson Father"}, + {"age": 40, "name": u"Eliana Costa"}, + {"age": 17, "name": u"Tayza Mariana"}, + ], + "totalAge": 123, + }, + }, + ) self.assertEqual( - collection.find_one({'_id': 2}), { - '_id': 2, - 'value': { - 'persons': [ - {'age': 16, 'name': u'Isabella Luanna'}, - {'age': 36, 'name': u'Sandra Mara'}, - {'age': 10, 'name': u'Igor Gabriel'}], - 'totalAge': 62} - }) + collection.find_one({"_id": 2}), + { + "_id": 2, + "value": { + "persons": [ + {"age": 16, "name": u"Isabella Luanna"}, + {"age": 36, "name": u"Sandra Mara"}, + {"age": 10, "name": u"Igor Gabriel"}, + ], + "totalAge": 62, + }, + }, + ) self.assertEqual( - collection.find_one({'_id': 3}), { - '_id': 3, - 'value': { - 'persons': [ - {'age': 30, 'name': u'Arthur WA'}, - {'age': 25, 'name': u'Paula Leonel'}], - 'totalAge': 55} - }) + collection.find_one({"_id": 3}), + { + "_id": 3, + "value": { + "persons": [ + {"age": 30, "name": u"Arthur WA"}, + {"age": 25, "name": u"Paula Leonel"}, + ], + "totalAge": 55, + }, + }, + ) def test_map_reduce_finalize(self): """Ensure that map, reduce, and finalize run and introduce "scope" @@ -2798,10 +2822,10 @@ class QuerySetTest(unittest.TestCase): from time import mktime class Link(Document): - title = StringField(db_field='bpTitle') + title = StringField(db_field="bpTitle") up_votes = IntField() down_votes = IntField() - submitted = DateTimeField(db_field='sTime') + submitted = DateTimeField(db_field="sTime") Link.drop_collection() @@ -2811,30 +2835,42 @@ class QuerySetTest(unittest.TestCase): # Fri, 12 Feb 2010 14:36:00 -0600. Link ordering should # reflect order of insertion below, but is not influenced # by insertion order. - Link(title="Google Buzz auto-followed a woman's abusive ex ...", - up_votes=1079, - down_votes=553, - submitted=now - datetime.timedelta(hours=4)).save() - Link(title="We did it! Barbie is a computer engineer.", - up_votes=481, - down_votes=124, - submitted=now - datetime.timedelta(hours=2)).save() - Link(title="This Is A Mosquito Getting Killed By A Laser", - up_votes=1446, - down_votes=530, - submitted=now - datetime.timedelta(hours=13)).save() - Link(title="Arabic flashcards land physics student in jail.", - up_votes=215, - down_votes=105, - submitted=now - datetime.timedelta(hours=6)).save() - Link(title="The Burger Lab: Presenting, the Flood Burger", - up_votes=48, - down_votes=17, - submitted=now - datetime.timedelta(hours=5)).save() - Link(title="How to see polarization with the naked eye", - up_votes=74, - down_votes=13, - submitted=now - datetime.timedelta(hours=10)).save() + Link( + title="Google Buzz auto-followed a woman's abusive ex ...", + up_votes=1079, + down_votes=553, + submitted=now - datetime.timedelta(hours=4), + ).save() + Link( + title="We did it! Barbie is a computer engineer.", + up_votes=481, + down_votes=124, + submitted=now - datetime.timedelta(hours=2), + ).save() + Link( + title="This Is A Mosquito Getting Killed By A Laser", + up_votes=1446, + down_votes=530, + submitted=now - datetime.timedelta(hours=13), + ).save() + Link( + title="Arabic flashcards land physics student in jail.", + up_votes=215, + down_votes=105, + submitted=now - datetime.timedelta(hours=6), + ).save() + Link( + title="The Burger Lab: Presenting, the Flood Burger", + up_votes=48, + down_votes=17, + submitted=now - datetime.timedelta(hours=5), + ).save() + Link( + title="How to see polarization with the naked eye", + up_votes=74, + down_votes=13, + submitted=now - datetime.timedelta(hours=10), + ).save() map_f = """ function() { @@ -2885,17 +2921,15 @@ class QuerySetTest(unittest.TestCase): # provide the reddit epoch (used for ranking) as a variable available # to all phases of the map/reduce operation: map, reduce, and finalize. reddit_epoch = mktime(datetime.datetime(2005, 12, 8, 7, 46, 43).timetuple()) - scope = {'reddit_epoch': reddit_epoch} + scope = {"reddit_epoch": reddit_epoch} # run a map/reduce operation across all links. ordering is set # to "-value", which orders the "weight" value returned from # "finalize_f" in descending order. results = Link.objects.order_by("-value") - results = results.map_reduce(map_f, - reduce_f, - "myresults", - finalize_f=finalize_f, - scope=scope) + results = results.map_reduce( + map_f, reduce_f, "myresults", finalize_f=finalize_f, scope=scope + ) results = list(results) # assert troublesome Buzz article is ranked 1st @@ -2909,54 +2943,56 @@ class QuerySetTest(unittest.TestCase): def test_item_frequencies(self): """Ensure that item frequencies are properly generated from lists. """ + class BlogPost(Document): hits = IntField() - tags = ListField(StringField(), db_field='blogTags') + tags = ListField(StringField(), db_field="blogTags") BlogPost.drop_collection() - BlogPost(hits=1, tags=['music', 'film', 'actors', 'watch']).save() - BlogPost(hits=2, tags=['music', 'watch']).save() - BlogPost(hits=2, tags=['music', 'actors']).save() + BlogPost(hits=1, tags=["music", "film", "actors", "watch"]).save() + BlogPost(hits=2, tags=["music", "watch"]).save() + BlogPost(hits=2, tags=["music", "actors"]).save() def test_assertions(f): f = {key: int(val) for key, val in f.items()} - self.assertEqual( - set(['music', 'film', 'actors', 'watch']), set(f.keys())) - self.assertEqual(f['music'], 3) - self.assertEqual(f['actors'], 2) - self.assertEqual(f['watch'], 2) - self.assertEqual(f['film'], 1) + self.assertEqual(set(["music", "film", "actors", "watch"]), set(f.keys())) + self.assertEqual(f["music"], 3) + self.assertEqual(f["actors"], 2) + self.assertEqual(f["watch"], 2) + self.assertEqual(f["film"], 1) - exec_js = BlogPost.objects.item_frequencies('tags') - map_reduce = BlogPost.objects.item_frequencies('tags', map_reduce=True) + exec_js = BlogPost.objects.item_frequencies("tags") + map_reduce = BlogPost.objects.item_frequencies("tags", map_reduce=True) test_assertions(exec_js) test_assertions(map_reduce) # Ensure query is taken into account def test_assertions(f): f = {key: int(val) for key, val in f.items()} - self.assertEqual(set(['music', 'actors', 'watch']), set(f.keys())) - self.assertEqual(f['music'], 2) - self.assertEqual(f['actors'], 1) - self.assertEqual(f['watch'], 1) + self.assertEqual(set(["music", "actors", "watch"]), set(f.keys())) + self.assertEqual(f["music"], 2) + self.assertEqual(f["actors"], 1) + self.assertEqual(f["watch"], 1) - exec_js = BlogPost.objects(hits__gt=1).item_frequencies('tags') - map_reduce = BlogPost.objects( - hits__gt=1).item_frequencies('tags', map_reduce=True) + exec_js = BlogPost.objects(hits__gt=1).item_frequencies("tags") + map_reduce = BlogPost.objects(hits__gt=1).item_frequencies( + "tags", map_reduce=True + ) test_assertions(exec_js) test_assertions(map_reduce) # Check that normalization works def test_assertions(f): - self.assertAlmostEqual(f['music'], 3.0 / 8.0) - self.assertAlmostEqual(f['actors'], 2.0 / 8.0) - self.assertAlmostEqual(f['watch'], 2.0 / 8.0) - self.assertAlmostEqual(f['film'], 1.0 / 8.0) + self.assertAlmostEqual(f["music"], 3.0 / 8.0) + self.assertAlmostEqual(f["actors"], 2.0 / 8.0) + self.assertAlmostEqual(f["watch"], 2.0 / 8.0) + self.assertAlmostEqual(f["film"], 1.0 / 8.0) - exec_js = BlogPost.objects.item_frequencies('tags', normalize=True) + exec_js = BlogPost.objects.item_frequencies("tags", normalize=True) map_reduce = BlogPost.objects.item_frequencies( - 'tags', normalize=True, map_reduce=True) + "tags", normalize=True, map_reduce=True + ) test_assertions(exec_js) test_assertions(map_reduce) @@ -2966,8 +3002,8 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(f[1], 1) self.assertEqual(f[2], 2) - exec_js = BlogPost.objects.item_frequencies('hits') - map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True) + exec_js = BlogPost.objects.item_frequencies("hits") + map_reduce = BlogPost.objects.item_frequencies("hits", map_reduce=True) test_assertions(exec_js) test_assertions(map_reduce) @@ -2987,57 +3023,56 @@ class QuerySetTest(unittest.TestCase): Person.drop_collection() doc = Person(name="Guido") - doc.phone = Phone(number='62-3331-1656') + doc.phone = Phone(number="62-3331-1656") doc.save() doc = Person(name="Marr") - doc.phone = Phone(number='62-3331-1656') + doc.phone = Phone(number="62-3331-1656") doc.save() doc = Person(name="WP Junior") - doc.phone = Phone(number='62-3332-1656') + doc.phone = Phone(number="62-3332-1656") doc.save() def test_assertions(f): f = {key: int(val) for key, val in f.items()} - self.assertEqual( - set(['62-3331-1656', '62-3332-1656']), set(f.keys())) - self.assertEqual(f['62-3331-1656'], 2) - self.assertEqual(f['62-3332-1656'], 1) + self.assertEqual(set(["62-3331-1656", "62-3332-1656"]), set(f.keys())) + self.assertEqual(f["62-3331-1656"], 2) + self.assertEqual(f["62-3332-1656"], 1) - exec_js = Person.objects.item_frequencies('phone.number') - map_reduce = Person.objects.item_frequencies( - 'phone.number', map_reduce=True) + exec_js = Person.objects.item_frequencies("phone.number") + map_reduce = Person.objects.item_frequencies("phone.number", map_reduce=True) test_assertions(exec_js) test_assertions(map_reduce) # Ensure query is taken into account def test_assertions(f): f = {key: int(val) for key, val in f.items()} - self.assertEqual(set(['62-3331-1656']), set(f.keys())) - self.assertEqual(f['62-3331-1656'], 2) + self.assertEqual(set(["62-3331-1656"]), set(f.keys())) + self.assertEqual(f["62-3331-1656"], 2) - exec_js = Person.objects( - phone__number='62-3331-1656').item_frequencies('phone.number') - map_reduce = Person.objects( - phone__number='62-3331-1656').item_frequencies('phone.number', map_reduce=True) + exec_js = Person.objects(phone__number="62-3331-1656").item_frequencies( + "phone.number" + ) + map_reduce = Person.objects(phone__number="62-3331-1656").item_frequencies( + "phone.number", map_reduce=True + ) test_assertions(exec_js) test_assertions(map_reduce) # Check that normalization works def test_assertions(f): - self.assertEqual(f['62-3331-1656'], 2.0 / 3.0) - self.assertEqual(f['62-3332-1656'], 1.0 / 3.0) + self.assertEqual(f["62-3331-1656"], 2.0 / 3.0) + self.assertEqual(f["62-3332-1656"], 1.0 / 3.0) - exec_js = Person.objects.item_frequencies( - 'phone.number', normalize=True) + exec_js = Person.objects.item_frequencies("phone.number", normalize=True) map_reduce = Person.objects.item_frequencies( - 'phone.number', normalize=True, map_reduce=True) + "phone.number", normalize=True, map_reduce=True + ) test_assertions(exec_js) test_assertions(map_reduce) def test_item_frequencies_null_values(self): - class Person(Document): name = StringField() city = StringField() @@ -3047,16 +3082,15 @@ class QuerySetTest(unittest.TestCase): Person(name="Wilson Snr", city="CRB").save() Person(name="Wilson Jr").save() - freq = Person.objects.item_frequencies('city') - self.assertEqual(freq, {'CRB': 1.0, None: 1.0}) - freq = Person.objects.item_frequencies('city', normalize=True) - self.assertEqual(freq, {'CRB': 0.5, None: 0.5}) + freq = Person.objects.item_frequencies("city") + self.assertEqual(freq, {"CRB": 1.0, None: 1.0}) + freq = Person.objects.item_frequencies("city", normalize=True) + self.assertEqual(freq, {"CRB": 0.5, None: 0.5}) - freq = Person.objects.item_frequencies('city', map_reduce=True) - self.assertEqual(freq, {'CRB': 1.0, None: 1.0}) - freq = Person.objects.item_frequencies( - 'city', normalize=True, map_reduce=True) - self.assertEqual(freq, {'CRB': 0.5, None: 0.5}) + freq = Person.objects.item_frequencies("city", map_reduce=True) + self.assertEqual(freq, {"CRB": 1.0, None: 1.0}) + freq = Person.objects.item_frequencies("city", normalize=True, map_reduce=True) + self.assertEqual(freq, {"CRB": 0.5, None: 0.5}) def test_item_frequencies_with_null_embedded(self): class Data(EmbeddedDocument): @@ -3080,11 +3114,11 @@ class QuerySetTest(unittest.TestCase): p.extra = Extra(tag="friend") p.save() - ot = Person.objects.item_frequencies('extra.tag', map_reduce=False) - self.assertEqual(ot, {None: 1.0, u'friend': 1.0}) + ot = Person.objects.item_frequencies("extra.tag", map_reduce=False) + self.assertEqual(ot, {None: 1.0, u"friend": 1.0}) - ot = Person.objects.item_frequencies('extra.tag', map_reduce=True) - self.assertEqual(ot, {None: 1.0, u'friend': 1.0}) + ot = Person.objects.item_frequencies("extra.tag", map_reduce=True) + self.assertEqual(ot, {None: 1.0, u"friend": 1.0}) def test_item_frequencies_with_0_values(self): class Test(Document): @@ -3095,9 +3129,9 @@ class QuerySetTest(unittest.TestCase): t.val = 0 t.save() - ot = Test.objects.item_frequencies('val', map_reduce=True) + ot = Test.objects.item_frequencies("val", map_reduce=True) self.assertEqual(ot, {0: 1}) - ot = Test.objects.item_frequencies('val', map_reduce=False) + ot = Test.objects.item_frequencies("val", map_reduce=False) self.assertEqual(ot, {0: 1}) def test_item_frequencies_with_False_values(self): @@ -3109,9 +3143,9 @@ class QuerySetTest(unittest.TestCase): t.val = False t.save() - ot = Test.objects.item_frequencies('val', map_reduce=True) + ot = Test.objects.item_frequencies("val", map_reduce=True) self.assertEqual(ot, {False: 1}) - ot = Test.objects.item_frequencies('val', map_reduce=False) + ot = Test.objects.item_frequencies("val", map_reduce=False) self.assertEqual(ot, {False: 1}) def test_item_frequencies_normalize(self): @@ -3126,113 +3160,108 @@ class QuerySetTest(unittest.TestCase): for i in range(20): Test(val=2).save() - freqs = Test.objects.item_frequencies( - 'val', map_reduce=False, normalize=True) + freqs = Test.objects.item_frequencies("val", map_reduce=False, normalize=True) self.assertEqual(freqs, {1: 50.0 / 70, 2: 20.0 / 70}) - freqs = Test.objects.item_frequencies( - 'val', map_reduce=True, normalize=True) + freqs = Test.objects.item_frequencies("val", map_reduce=True, normalize=True) self.assertEqual(freqs, {1: 50.0 / 70, 2: 20.0 / 70}) def test_average(self): """Ensure that field can be averaged correctly. """ - self.Person(name='person', age=0).save() - self.assertEqual(int(self.Person.objects.average('age')), 0) + self.Person(name="person", age=0).save() + self.assertEqual(int(self.Person.objects.average("age")), 0) ages = [23, 54, 12, 94, 27] for i, age in enumerate(ages): - self.Person(name='test%s' % i, age=age).save() + self.Person(name="test%s" % i, age=age).save() avg = float(sum(ages)) / (len(ages) + 1) # take into account the 0 - self.assertAlmostEqual(int(self.Person.objects.average('age')), avg) + self.assertAlmostEqual(int(self.Person.objects.average("age")), avg) - self.Person(name='ageless person').save() - self.assertEqual(int(self.Person.objects.average('age')), avg) + self.Person(name="ageless person").save() + self.assertEqual(int(self.Person.objects.average("age")), avg) # dot notation - self.Person( - name='person meta', person_meta=self.PersonMeta(weight=0)).save() + self.Person(name="person meta", person_meta=self.PersonMeta(weight=0)).save() self.assertAlmostEqual( - int(self.Person.objects.average('person_meta.weight')), 0) + int(self.Person.objects.average("person_meta.weight")), 0 + ) for i, weight in enumerate(ages): self.Person( - name='test meta%i', person_meta=self.PersonMeta(weight=weight)).save() + name="test meta%i", person_meta=self.PersonMeta(weight=weight) + ).save() self.assertAlmostEqual( - int(self.Person.objects.average('person_meta.weight')), avg + int(self.Person.objects.average("person_meta.weight")), avg ) - self.Person(name='test meta none').save() - self.assertEqual( - int(self.Person.objects.average('person_meta.weight')), avg - ) + self.Person(name="test meta none").save() + self.assertEqual(int(self.Person.objects.average("person_meta.weight")), avg) # test summing over a filtered queryset over_50 = [a for a in ages if a >= 50] avg = float(sum(over_50)) / len(over_50) - self.assertEqual( - self.Person.objects.filter(age__gte=50).average('age'), - avg - ) + self.assertEqual(self.Person.objects.filter(age__gte=50).average("age"), avg) def test_sum(self): """Ensure that field can be summed over correctly. """ ages = [23, 54, 12, 94, 27] for i, age in enumerate(ages): - self.Person(name='test%s' % i, age=age).save() + self.Person(name="test%s" % i, age=age).save() - self.assertEqual(self.Person.objects.sum('age'), sum(ages)) + self.assertEqual(self.Person.objects.sum("age"), sum(ages)) - self.Person(name='ageless person').save() - self.assertEqual(self.Person.objects.sum('age'), sum(ages)) + self.Person(name="ageless person").save() + self.assertEqual(self.Person.objects.sum("age"), sum(ages)) for i, age in enumerate(ages): - self.Person(name='test meta%s' % - i, person_meta=self.PersonMeta(weight=age)).save() + self.Person( + name="test meta%s" % i, person_meta=self.PersonMeta(weight=age) + ).save() - self.assertEqual( - self.Person.objects.sum('person_meta.weight'), sum(ages) - ) + self.assertEqual(self.Person.objects.sum("person_meta.weight"), sum(ages)) - self.Person(name='weightless person').save() - self.assertEqual(self.Person.objects.sum('age'), sum(ages)) + self.Person(name="weightless person").save() + self.assertEqual(self.Person.objects.sum("age"), sum(ages)) # test summing over a filtered queryset self.assertEqual( - self.Person.objects.filter(age__gte=50).sum('age'), - sum([a for a in ages if a >= 50]) + self.Person.objects.filter(age__gte=50).sum("age"), + sum([a for a in ages if a >= 50]), ) def test_sum_over_db_field(self): """Ensure that a field mapped to a db field with a different name can be summed over correctly. """ + class UserVisit(Document): - num_visits = IntField(db_field='visits') + num_visits = IntField(db_field="visits") UserVisit.drop_collection() UserVisit.objects.create(num_visits=10) UserVisit.objects.create(num_visits=5) - self.assertEqual(UserVisit.objects.sum('num_visits'), 15) + self.assertEqual(UserVisit.objects.sum("num_visits"), 15) def test_average_over_db_field(self): """Ensure that a field mapped to a db field with a different name can have its average computed correctly. """ + class UserVisit(Document): - num_visits = IntField(db_field='visits') + num_visits = IntField(db_field="visits") UserVisit.drop_collection() UserVisit.objects.create(num_visits=20) UserVisit.objects.create(num_visits=10) - self.assertEqual(UserVisit.objects.average('num_visits'), 15) + self.assertEqual(UserVisit.objects.average("num_visits"), 15) def test_embedded_average(self): class Pay(EmbeddedDocument): @@ -3240,17 +3269,16 @@ class QuerySetTest(unittest.TestCase): class Doc(Document): name = StringField() - pay = EmbeddedDocumentField( - Pay) + pay = EmbeddedDocumentField(Pay) Doc.drop_collection() - Doc(name='Wilson Junior', pay=Pay(value=150)).save() - Doc(name='Isabella Luanna', pay=Pay(value=530)).save() - Doc(name='Tayza mariana', pay=Pay(value=165)).save() - Doc(name='Eliana Costa', pay=Pay(value=115)).save() + Doc(name="Wilson Junior", pay=Pay(value=150)).save() + Doc(name="Isabella Luanna", pay=Pay(value=530)).save() + Doc(name="Tayza mariana", pay=Pay(value=165)).save() + Doc(name="Eliana Costa", pay=Pay(value=115)).save() - self.assertEqual(Doc.objects.average('pay.value'), 240) + self.assertEqual(Doc.objects.average("pay.value"), 240) def test_embedded_array_average(self): class Pay(EmbeddedDocument): @@ -3262,12 +3290,12 @@ class QuerySetTest(unittest.TestCase): Doc.drop_collection() - Doc(name='Wilson Junior', pay=Pay(values=[150, 100])).save() - Doc(name='Isabella Luanna', pay=Pay(values=[530, 100])).save() - Doc(name='Tayza mariana', pay=Pay(values=[165, 100])).save() - Doc(name='Eliana Costa', pay=Pay(values=[115, 100])).save() + Doc(name="Wilson Junior", pay=Pay(values=[150, 100])).save() + Doc(name="Isabella Luanna", pay=Pay(values=[530, 100])).save() + Doc(name="Tayza mariana", pay=Pay(values=[165, 100])).save() + Doc(name="Eliana Costa", pay=Pay(values=[115, 100])).save() - self.assertEqual(Doc.objects.average('pay.values'), 170) + self.assertEqual(Doc.objects.average("pay.values"), 170) def test_array_average(self): class Doc(Document): @@ -3280,7 +3308,7 @@ class QuerySetTest(unittest.TestCase): Doc(values=[165, 100]).save() Doc(values=[115, 100]).save() - self.assertEqual(Doc.objects.average('values'), 170) + self.assertEqual(Doc.objects.average("values"), 170) def test_embedded_sum(self): class Pay(EmbeddedDocument): @@ -3292,12 +3320,12 @@ class QuerySetTest(unittest.TestCase): Doc.drop_collection() - Doc(name='Wilson Junior', pay=Pay(value=150)).save() - Doc(name='Isabella Luanna', pay=Pay(value=530)).save() - Doc(name='Tayza mariana', pay=Pay(value=165)).save() - Doc(name='Eliana Costa', pay=Pay(value=115)).save() + Doc(name="Wilson Junior", pay=Pay(value=150)).save() + Doc(name="Isabella Luanna", pay=Pay(value=530)).save() + Doc(name="Tayza mariana", pay=Pay(value=165)).save() + Doc(name="Eliana Costa", pay=Pay(value=115)).save() - self.assertEqual(Doc.objects.sum('pay.value'), 960) + self.assertEqual(Doc.objects.sum("pay.value"), 960) def test_embedded_array_sum(self): class Pay(EmbeddedDocument): @@ -3309,12 +3337,12 @@ class QuerySetTest(unittest.TestCase): Doc.drop_collection() - Doc(name='Wilson Junior', pay=Pay(values=[150, 100])).save() - Doc(name='Isabella Luanna', pay=Pay(values=[530, 100])).save() - Doc(name='Tayza mariana', pay=Pay(values=[165, 100])).save() - Doc(name='Eliana Costa', pay=Pay(values=[115, 100])).save() + Doc(name="Wilson Junior", pay=Pay(values=[150, 100])).save() + Doc(name="Isabella Luanna", pay=Pay(values=[530, 100])).save() + Doc(name="Tayza mariana", pay=Pay(values=[165, 100])).save() + Doc(name="Eliana Costa", pay=Pay(values=[115, 100])).save() - self.assertEqual(Doc.objects.sum('pay.values'), 1360) + self.assertEqual(Doc.objects.sum("pay.values"), 1360) def test_array_sum(self): class Doc(Document): @@ -3327,21 +3355,24 @@ class QuerySetTest(unittest.TestCase): Doc(values=[165, 100]).save() Doc(values=[115, 100]).save() - self.assertEqual(Doc.objects.sum('values'), 1360) + self.assertEqual(Doc.objects.sum("values"), 1360) def test_distinct(self): """Ensure that the QuerySet.distinct method works. """ - self.Person(name='Mr Orange', age=20).save() - self.Person(name='Mr White', age=20).save() - self.Person(name='Mr Orange', age=30).save() - self.Person(name='Mr Pink', age=30).save() - self.assertEqual(set(self.Person.objects.distinct('name')), - set(['Mr Orange', 'Mr White', 'Mr Pink'])) - self.assertEqual(set(self.Person.objects.distinct('age')), - set([20, 30])) - self.assertEqual(set(self.Person.objects(age=30).distinct('name')), - set(['Mr Orange', 'Mr Pink'])) + self.Person(name="Mr Orange", age=20).save() + self.Person(name="Mr White", age=20).save() + self.Person(name="Mr Orange", age=30).save() + self.Person(name="Mr Pink", age=30).save() + self.assertEqual( + set(self.Person.objects.distinct("name")), + set(["Mr Orange", "Mr White", "Mr Pink"]), + ) + self.assertEqual(set(self.Person.objects.distinct("age")), set([20, 30])) + self.assertEqual( + set(self.Person.objects(age=30).distinct("name")), + set(["Mr Orange", "Mr Pink"]), + ) def test_distinct_handles_references(self): class Foo(Document): @@ -3367,53 +3398,58 @@ class QuerySetTest(unittest.TestCase): content = StringField() is_active = BooleanField(default=True) - meta = {'indexes': [ - {'fields': ['$title', "$content"], - 'default_language': 'portuguese', - 'weights': {'title': 10, 'content': 2} - } - ]} + meta = { + "indexes": [ + { + "fields": ["$title", "$content"], + "default_language": "portuguese", + "weights": {"title": 10, "content": 2}, + } + ] + } News.drop_collection() info = News.objects._collection.index_information() - self.assertIn('title_text_content_text', info) - self.assertIn('textIndexVersion', info['title_text_content_text']) + self.assertIn("title_text_content_text", info) + self.assertIn("textIndexVersion", info["title_text_content_text"]) - News(title="Neymar quebrou a vertebra", - content="O Brasil sofre com a perda de Neymar").save() + News( + title="Neymar quebrou a vertebra", + content="O Brasil sofre com a perda de Neymar", + ).save() - News(title="Brasil passa para as quartas de finais", - content="Com o brasil nas quartas de finais teremos um " - "jogo complicado com a alemanha").save() + News( + title="Brasil passa para as quartas de finais", + content="Com o brasil nas quartas de finais teremos um " + "jogo complicado com a alemanha", + ).save() - count = News.objects.search_text( - "neymar", language="portuguese").count() + count = News.objects.search_text("neymar", language="portuguese").count() self.assertEqual(count, 1) - count = News.objects.search_text( - "brasil -neymar").count() + count = News.objects.search_text("brasil -neymar").count() self.assertEqual(count, 1) - News(title=u"As eleições no Brasil já estão em planejamento", - content=u"A candidata dilma roussef já começa o teu planejamento", - is_active=False).save() + News( + title=u"As eleições no Brasil já estão em planejamento", + content=u"A candidata dilma roussef já começa o teu planejamento", + is_active=False, + ).save() - new = News.objects(is_active=False).search_text( - "dilma", language="pt").first() + new = News.objects(is_active=False).search_text("dilma", language="pt").first() - query = News.objects(is_active=False).search_text( - "dilma", language="pt")._query + query = News.objects(is_active=False).search_text("dilma", language="pt")._query self.assertEqual( - query, {'$text': { - '$search': 'dilma', '$language': 'pt'}, - 'is_active': False}) + query, + {"$text": {"$search": "dilma", "$language": "pt"}, "is_active": False}, + ) self.assertFalse(new.is_active) - self.assertIn('dilma', new.content) - self.assertIn('planejamento', new.title) + self.assertIn("dilma", new.content) + self.assertIn("planejamento", new.title) query = News.objects.search_text("candidata") self.assertEqual(query._search_text, "candidata") @@ -3422,15 +3458,14 @@ class QuerySetTest(unittest.TestCase): self.assertIsInstance(new.get_text_score(), float) # count - query = News.objects.search_text('brasil').order_by('$text_score') + query = News.objects.search_text("brasil").order_by("$text_score") self.assertEqual(query._search_text, "brasil") self.assertEqual(query.count(), 3) - self.assertEqual(query._query, {'$text': {'$search': 'brasil'}}) + self.assertEqual(query._query, {"$text": {"$search": "brasil"}}) cursor_args = query._cursor_args - cursor_args_fields = cursor_args['projection'] - self.assertEqual( - cursor_args_fields, {'_text_score': {'$meta': 'textScore'}}) + cursor_args_fields = cursor_args["projection"] + self.assertEqual(cursor_args_fields, {"_text_score": {"$meta": "textScore"}}) text_scores = [i.get_text_score() for i in query] self.assertEqual(len(text_scores), 3) @@ -3440,20 +3475,19 @@ class QuerySetTest(unittest.TestCase): max_text_score = text_scores[0] # get item - item = News.objects.search_text( - 'brasil').order_by('$text_score').first() + item = News.objects.search_text("brasil").order_by("$text_score").first() self.assertEqual(item.get_text_score(), max_text_score) def test_distinct_handles_references_to_alias(self): - register_connection('testdb', 'mongoenginetest2') + register_connection("testdb", "mongoenginetest2") class Foo(Document): bar = ReferenceField("Bar") - meta = {'db_alias': 'testdb'} + meta = {"db_alias": "testdb"} class Bar(Document): text = StringField() - meta = {'db_alias': 'testdb'} + meta = {"db_alias": "testdb"} Bar.drop_collection() Foo.drop_collection() @@ -3469,8 +3503,9 @@ class QuerySetTest(unittest.TestCase): def test_distinct_handles_db_field(self): """Ensure that distinct resolves field name to db_field as expected. """ + class Product(Document): - product_id = IntField(db_field='pid') + product_id = IntField(db_field="pid") Product.drop_collection() @@ -3478,15 +3513,12 @@ class QuerySetTest(unittest.TestCase): Product(product_id=2).save() Product(product_id=1).save() - self.assertEqual(set(Product.objects.distinct('product_id')), - set([1, 2])) - self.assertEqual(set(Product.objects.distinct('pid')), - set([1, 2])) + self.assertEqual(set(Product.objects.distinct("product_id")), set([1, 2])) + self.assertEqual(set(Product.objects.distinct("pid")), set([1, 2])) Product.drop_collection() def test_distinct_ListField_EmbeddedDocumentField(self): - class Author(EmbeddedDocument): name = StringField() @@ -3524,8 +3556,8 @@ class QuerySetTest(unittest.TestCase): Book.drop_collection() - europe = Continent(continent_name='europe') - asia = Continent(continent_name='asia') + europe = Continent(continent_name="europe") + asia = Continent(continent_name="asia") scotland = Country(country_name="Scotland", continent=europe) tibet = Country(country_name="Tibet", continent=asia) @@ -3544,13 +3576,12 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(continent_list, [europe, asia]) def test_distinct_ListField_ReferenceField(self): - class Bar(Document): text = StringField() class Foo(Document): - bar = ReferenceField('Bar') - bar_lst = ListField(ReferenceField('Bar')) + bar = ReferenceField("Bar") + bar_lst = ListField(ReferenceField("Bar")) Bar.drop_collection() Foo.drop_collection() @@ -3569,6 +3600,7 @@ class QuerySetTest(unittest.TestCase): def test_custom_manager(self): """Ensure that custom QuerySetManager instances work as expected. """ + class BlogPost(Document): tags = ListField(StringField()) deleted = BooleanField(default=False) @@ -3586,32 +3618,30 @@ class QuerySetTest(unittest.TestCase): @queryset_manager def music_posts(doc_cls, queryset, deleted=False): - return queryset(tags='music', - deleted=deleted).order_by('date') + return queryset(tags="music", deleted=deleted).order_by("date") BlogPost.drop_collection() - post1 = BlogPost(tags=['music', 'film']).save() - post2 = BlogPost(tags=['music']).save() - post3 = BlogPost(tags=['film', 'actors']).save() - post4 = BlogPost(tags=['film', 'actors', 'music'], deleted=True).save() + post1 = BlogPost(tags=["music", "film"]).save() + post2 = BlogPost(tags=["music"]).save() + post3 = BlogPost(tags=["film", "actors"]).save() + post4 = BlogPost(tags=["film", "actors", "music"], deleted=True).save() - self.assertEqual([p.id for p in BlogPost.objects()], - [post1.id, post2.id, post3.id]) - self.assertEqual([p.id for p in BlogPost.objects_1_arg()], - [post1.id, post2.id, post3.id]) - self.assertEqual([p.id for p in BlogPost.music_posts()], - [post1.id, post2.id]) + self.assertEqual( + [p.id for p in BlogPost.objects()], [post1.id, post2.id, post3.id] + ) + self.assertEqual( + [p.id for p in BlogPost.objects_1_arg()], [post1.id, post2.id, post3.id] + ) + self.assertEqual([p.id for p in BlogPost.music_posts()], [post1.id, post2.id]) - self.assertEqual([p.id for p in BlogPost.music_posts(True)], - [post4.id]) + self.assertEqual([p.id for p in BlogPost.music_posts(True)], [post4.id]) BlogPost.drop_collection() def test_custom_manager_overriding_objects_works(self): - class Foo(Document): - bar = StringField(default='bar') + bar = StringField(default="bar") active = BooleanField(default=False) @queryset_manager @@ -3635,9 +3665,8 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(1, Foo.objects.count()) def test_inherit_objects(self): - class Foo(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} active = BooleanField(default=True) @queryset_manager @@ -3652,9 +3681,8 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(0, Bar.objects.count()) def test_inherit_objects_override(self): - class Foo(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} active = BooleanField(default=True) @queryset_manager @@ -3662,7 +3690,6 @@ class QuerySetTest(unittest.TestCase): return queryset(active=True) class Bar(Foo): - @queryset_manager def objects(klass, queryset): return queryset(active=False) @@ -3675,12 +3702,13 @@ class QuerySetTest(unittest.TestCase): def test_query_value_conversion(self): """Ensure that query values are properly converted when necessary. """ + class BlogPost(Document): author = ReferenceField(self.Person) BlogPost.drop_collection() - person = self.Person(name='test', age=30) + person = self.Person(name="test", age=30) person.save() post = BlogPost(author=person) @@ -3701,14 +3729,15 @@ class QuerySetTest(unittest.TestCase): def test_update_value_conversion(self): """Ensure that values used in updates are converted before use. """ + class Group(Document): members = ListField(ReferenceField(self.Person)) Group.drop_collection() - user1 = self.Person(name='user1') + user1 = self.Person(name="user1") user1.save() - user2 = self.Person(name='user2') + user2 = self.Person(name="user2") user2.save() group = Group() @@ -3726,6 +3755,7 @@ class QuerySetTest(unittest.TestCase): def test_bulk(self): """Ensure bulk querying by object id returns a proper dict. """ + class BlogPost(Document): title = StringField() @@ -3764,13 +3794,13 @@ class QuerySetTest(unittest.TestCase): def test_custom_querysets(self): """Ensure that custom QuerySet classes may be used. """ - class CustomQuerySet(QuerySet): + class CustomQuerySet(QuerySet): def not_empty(self): return self.count() > 0 class Post(Document): - meta = {'queryset_class': CustomQuerySet} + meta = {"queryset_class": CustomQuerySet} Post.drop_collection() @@ -3787,7 +3817,6 @@ class QuerySetTest(unittest.TestCase): """ class CustomQuerySet(QuerySet): - def not_empty(self): return self.count() > 0 @@ -3812,7 +3841,6 @@ class QuerySetTest(unittest.TestCase): """ class CustomQuerySetManager(QuerySetManager): - @staticmethod def get_queryset(doc_cls, queryset): return queryset(is_published=True) @@ -3835,12 +3863,11 @@ class QuerySetTest(unittest.TestCase): """ class CustomQuerySet(QuerySet): - def not_empty(self): return self.count() > 0 class Base(Document): - meta = {'abstract': True, 'queryset_class': CustomQuerySet} + meta = {"abstract": True, "queryset_class": CustomQuerySet} class Post(Base): pass @@ -3859,7 +3886,6 @@ class QuerySetTest(unittest.TestCase): """ class CustomQuerySet(QuerySet): - def not_empty(self): return self.count() > 0 @@ -3867,7 +3893,7 @@ class QuerySetTest(unittest.TestCase): queryset_class = CustomQuerySet class Base(Document): - meta = {'abstract': True} + meta = {"abstract": True} objects = CustomQuerySetManager() class Post(Base): @@ -3891,10 +3917,13 @@ class QuerySetTest(unittest.TestCase): for i in range(10): Post(title="Post %s" % i).save() - self.assertEqual(5, Post.objects.limit(5).skip(5).count(with_limit_and_skip=True)) + self.assertEqual( + 5, Post.objects.limit(5).skip(5).count(with_limit_and_skip=True) + ) self.assertEqual( - 10, Post.objects.limit(5).skip(5).count(with_limit_and_skip=False)) + 10, Post.objects.limit(5).skip(5).count(with_limit_and_skip=False) + ) def test_count_and_none(self): """Test count works with None()""" @@ -3916,11 +3945,12 @@ class QuerySetTest(unittest.TestCase): class A(Document): b = ListField(EmbeddedDocumentField(B)) - self.assertEqual(A.objects(b=[{'c': 'c'}]).count(), 0) + self.assertEqual(A.objects(b=[{"c": "c"}]).count(), 0) def test_call_after_limits_set(self): """Ensure that re-filtering after slicing works """ + class Post(Document): title = StringField() @@ -3937,6 +3967,7 @@ class QuerySetTest(unittest.TestCase): def test_order_then_filter(self): """Ensure that ordering still works after filtering. """ + class Number(Document): n = IntField() @@ -3946,14 +3977,15 @@ class QuerySetTest(unittest.TestCase): n1 = Number.objects.create(n=1) self.assertEqual(list(Number.objects), [n2, n1]) - self.assertEqual(list(Number.objects.order_by('n')), [n1, n2]) - self.assertEqual(list(Number.objects.order_by('n').filter()), [n1, n2]) + self.assertEqual(list(Number.objects.order_by("n")), [n1, n2]) + self.assertEqual(list(Number.objects.order_by("n").filter()), [n1, n2]) Number.drop_collection() def test_clone(self): """Ensure that cloning clones complex querysets """ + class Number(Document): n = IntField() @@ -3983,19 +4015,20 @@ class QuerySetTest(unittest.TestCase): def test_using(self): """Ensure that switching databases for a queryset is possible """ + class Number2(Document): n = IntField() Number2.drop_collection() - with switch_db(Number2, 'test2') as Number2: + with switch_db(Number2, "test2") as Number2: Number2.drop_collection() for i in range(1, 10): t = Number2(n=i) - t.switch_db('test2') + t.switch_db("test2") t.save() - self.assertEqual(len(Number2.objects.using('test2')), 9) + self.assertEqual(len(Number2.objects.using("test2")), 9) def test_unset_reference(self): class Comment(Document): @@ -4007,7 +4040,7 @@ class QuerySetTest(unittest.TestCase): Comment.drop_collection() Post.drop_collection() - comment = Comment.objects.create(text='test') + comment = Comment.objects.create(text="test") post = Post.objects.create(comment=comment) self.assertEqual(post.comment, comment) @@ -4020,7 +4053,7 @@ class QuerySetTest(unittest.TestCase): def test_order_works_with_custom_db_field_names(self): class Number(Document): - n = IntField(db_field='number') + n = IntField(db_field="number") Number.drop_collection() @@ -4028,13 +4061,14 @@ class QuerySetTest(unittest.TestCase): n1 = Number.objects.create(n=1) self.assertEqual(list(Number.objects), [n2, n1]) - self.assertEqual(list(Number.objects.order_by('n')), [n1, n2]) + self.assertEqual(list(Number.objects.order_by("n")), [n1, n2]) Number.drop_collection() def test_order_works_with_primary(self): """Ensure that order_by and primary work. """ + class Number(Document): n = IntField(primary_key=True) @@ -4044,28 +4078,29 @@ class QuerySetTest(unittest.TestCase): Number(n=2).save() Number(n=3).save() - numbers = [n.n for n in Number.objects.order_by('-n')] + numbers = [n.n for n in Number.objects.order_by("-n")] self.assertEqual([3, 2, 1], numbers) - numbers = [n.n for n in Number.objects.order_by('+n')] + numbers = [n.n for n in Number.objects.order_by("+n")] self.assertEqual([1, 2, 3], numbers) Number.drop_collection() def test_ensure_index(self): """Ensure that manual creation of indexes works. """ + class Comment(Document): message = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} - Comment.ensure_index('message') + Comment.ensure_index("message") info = Comment.objects._collection.index_information() - info = [(value['key'], - value.get('unique', False), - value.get('sparse', False)) - for key, value in iteritems(info)] - self.assertIn(([('_cls', 1), ('message', 1)], False, False), info) + info = [ + (value["key"], value.get("unique", False), value.get("sparse", False)) + for key, value in iteritems(info) + ] + self.assertIn(([("_cls", 1), ("message", 1)], False, False), info) def test_where(self): """Ensure that where clauses work. @@ -4084,23 +4119,25 @@ class QuerySetTest(unittest.TestCase): b.save() c.save() - query = IntPair.objects.where('this[~fielda] >= this[~fieldb]') - self.assertEqual( - 'this["fielda"] >= this["fieldb"]', query._where_clause) + query = IntPair.objects.where("this[~fielda] >= this[~fieldb]") + self.assertEqual('this["fielda"] >= this["fieldb"]', query._where_clause) results = list(query) self.assertEqual(2, len(results)) self.assertIn(a, results) self.assertIn(c, results) - query = IntPair.objects.where('this[~fielda] == this[~fieldb]') + query = IntPair.objects.where("this[~fielda] == this[~fieldb]") results = list(query) self.assertEqual(1, len(results)) self.assertIn(a, results) query = IntPair.objects.where( - 'function() { return this[~fielda] >= this[~fieldb] }') + "function() { return this[~fielda] >= this[~fieldb] }" + ) self.assertEqual( - 'function() { return this["fielda"] >= this["fieldb"] }', query._where_clause) + 'function() { return this["fielda"] >= this["fieldb"] }', + query._where_clause, + ) results = list(query) self.assertEqual(2, len(results)) self.assertIn(a, results) @@ -4110,7 +4147,6 @@ class QuerySetTest(unittest.TestCase): list(IntPair.objects.where(fielda__gte=3)) def test_scalar(self): - class Organization(Document): name = StringField() @@ -4127,13 +4163,13 @@ class QuerySetTest(unittest.TestCase): # Efficient way to get all unique organization names for a given # set of users (Pretend this has additional filtering.) - user_orgs = set(User.objects.scalar('organization')) - orgs = Organization.objects(id__in=user_orgs).scalar('name') - self.assertEqual(list(orgs), ['White House']) + user_orgs = set(User.objects.scalar("organization")) + orgs = Organization.objects(id__in=user_orgs).scalar("name") + self.assertEqual(list(orgs), ["White House"]) # Efficient for generating listings, too. - orgs = Organization.objects.scalar('name').in_bulk(list(user_orgs)) - user_map = User.objects.scalar('name', 'organization') + orgs = Organization.objects.scalar("name").in_bulk(list(user_orgs)) + user_map = User.objects.scalar("name", "organization") user_listing = [(user, orgs[org]) for user, org in user_map] self.assertEqual([("Bob Dole", "White House")], user_listing) @@ -4148,7 +4184,7 @@ class QuerySetTest(unittest.TestCase): TestDoc(x=20, y=False).save() TestDoc(x=30, y=True).save() - plist = list(TestDoc.objects.scalar('x', 'y')) + plist = list(TestDoc.objects.scalar("x", "y")) self.assertEqual(len(plist), 3) self.assertEqual(plist[0], (10, True)) @@ -4166,21 +4202,16 @@ class QuerySetTest(unittest.TestCase): UserDoc(name="Eliana", age=37).save() UserDoc(name="Tayza", age=15).save() - ulist = list(UserDoc.objects.scalar('name', 'age')) + ulist = list(UserDoc.objects.scalar("name", "age")) - self.assertEqual(ulist, [ - (u'Wilson Jr', 19), - (u'Wilson', 43), - (u'Eliana', 37), - (u'Tayza', 15)]) + self.assertEqual( + ulist, + [(u"Wilson Jr", 19), (u"Wilson", 43), (u"Eliana", 37), (u"Tayza", 15)], + ) - ulist = list(UserDoc.objects.scalar('name').order_by('age')) + ulist = list(UserDoc.objects.scalar("name").order_by("age")) - self.assertEqual(ulist, [ - (u'Tayza'), - (u'Wilson Jr'), - (u'Eliana'), - (u'Wilson')]) + self.assertEqual(ulist, [(u"Tayza"), (u"Wilson Jr"), (u"Eliana"), (u"Wilson")]) def test_scalar_embedded(self): class Profile(EmbeddedDocument): @@ -4197,30 +4228,45 @@ class QuerySetTest(unittest.TestCase): Person.drop_collection() - Person(profile=Profile(name="Wilson Jr", age=19), - locale=Locale(city="Corumba-GO", country="Brazil")).save() + Person( + profile=Profile(name="Wilson Jr", age=19), + locale=Locale(city="Corumba-GO", country="Brazil"), + ).save() - Person(profile=Profile(name="Gabriel Falcao", age=23), - locale=Locale(city="New York", country="USA")).save() + Person( + profile=Profile(name="Gabriel Falcao", age=23), + locale=Locale(city="New York", country="USA"), + ).save() - Person(profile=Profile(name="Lincoln de souza", age=28), - locale=Locale(city="Belo Horizonte", country="Brazil")).save() + Person( + profile=Profile(name="Lincoln de souza", age=28), + locale=Locale(city="Belo Horizonte", country="Brazil"), + ).save() - Person(profile=Profile(name="Walter cruz", age=30), - locale=Locale(city="Brasilia", country="Brazil")).save() + Person( + profile=Profile(name="Walter cruz", age=30), + locale=Locale(city="Brasilia", country="Brazil"), + ).save() self.assertEqual( - list(Person.objects.order_by( - 'profile__age').scalar('profile__name')), - [u'Wilson Jr', u'Gabriel Falcao', u'Lincoln de souza', u'Walter cruz']) + list(Person.objects.order_by("profile__age").scalar("profile__name")), + [u"Wilson Jr", u"Gabriel Falcao", u"Lincoln de souza", u"Walter cruz"], + ) - ulist = list(Person.objects.order_by('locale.city') - .scalar('profile__name', 'profile__age', 'locale__city')) - self.assertEqual(ulist, - [(u'Lincoln de souza', 28, u'Belo Horizonte'), - (u'Walter cruz', 30, u'Brasilia'), - (u'Wilson Jr', 19, u'Corumba-GO'), - (u'Gabriel Falcao', 23, u'New York')]) + ulist = list( + Person.objects.order_by("locale.city").scalar( + "profile__name", "profile__age", "locale__city" + ) + ) + self.assertEqual( + ulist, + [ + (u"Lincoln de souza", 28, u"Belo Horizonte"), + (u"Walter cruz", 30, u"Brasilia"), + (u"Wilson Jr", 19, u"Corumba-GO"), + (u"Gabriel Falcao", 23, u"New York"), + ], + ) def test_scalar_decimal(self): from decimal import Decimal @@ -4230,10 +4276,10 @@ class QuerySetTest(unittest.TestCase): rating = DecimalField() Person.drop_collection() - Person(name="Wilson Jr", rating=Decimal('1.0')).save() + Person(name="Wilson Jr", rating=Decimal("1.0")).save() - ulist = list(Person.objects.scalar('name', 'rating')) - self.assertEqual(ulist, [(u'Wilson Jr', Decimal('1.0'))]) + ulist = list(Person.objects.scalar("name", "rating")) + self.assertEqual(ulist, [(u"Wilson Jr", Decimal("1.0"))]) def test_scalar_reference_field(self): class State(Document): @@ -4251,8 +4297,8 @@ class QuerySetTest(unittest.TestCase): Person(name="Wilson JR", state=s1).save() - plist = list(Person.objects.scalar('name', 'state')) - self.assertEqual(plist, [(u'Wilson JR', s1)]) + plist = list(Person.objects.scalar("name", "state")) + self.assertEqual(plist, [(u"Wilson JR", s1)]) def test_scalar_generic_reference_field(self): class State(Document): @@ -4270,8 +4316,8 @@ class QuerySetTest(unittest.TestCase): Person(name="Wilson JR", state=s1).save() - plist = list(Person.objects.scalar('name', 'state')) - self.assertEqual(plist, [(u'Wilson JR', s1)]) + plist = list(Person.objects.scalar("name", "state")) + self.assertEqual(plist, [(u"Wilson JR", s1)]) def test_generic_reference_field_with_only_and_as_pymongo(self): class TestPerson(Document): @@ -4284,26 +4330,32 @@ class QuerySetTest(unittest.TestCase): TestPerson.drop_collection() TestActivity.drop_collection() - person = TestPerson(name='owner') + person = TestPerson(name="owner") person.save() - a1 = TestActivity(name='a1', owner=person) + a1 = TestActivity(name="a1", owner=person) a1.save() - activity = TestActivity.objects(owner=person).scalar('id', 'owner').no_dereference().first() + activity = ( + TestActivity.objects(owner=person) + .scalar("id", "owner") + .no_dereference() + .first() + ) self.assertEqual(activity[0], a1.pk) - self.assertEqual(activity[1]['_ref'], DBRef('test_person', person.pk)) + self.assertEqual(activity[1]["_ref"], DBRef("test_person", person.pk)) - activity = TestActivity.objects(owner=person).only('id', 'owner')[0] + activity = TestActivity.objects(owner=person).only("id", "owner")[0] self.assertEqual(activity.pk, a1.pk) self.assertEqual(activity.owner, person) - activity = TestActivity.objects(owner=person).only('id', 'owner').as_pymongo().first() - self.assertEqual(activity['_id'], a1.pk) - self.assertTrue(activity['owner']['_ref'], DBRef('test_person', person.pk)) + activity = ( + TestActivity.objects(owner=person).only("id", "owner").as_pymongo().first() + ) + self.assertEqual(activity["_id"], a1.pk) + self.assertTrue(activity["owner"]["_ref"], DBRef("test_person", person.pk)) def test_scalar_db_field(self): - class TestDoc(Document): x = IntField() y = BooleanField() @@ -4314,14 +4366,13 @@ class QuerySetTest(unittest.TestCase): TestDoc(x=20, y=False).save() TestDoc(x=30, y=True).save() - plist = list(TestDoc.objects.scalar('x', 'y')) + plist = list(TestDoc.objects.scalar("x", "y")) self.assertEqual(len(plist), 3) self.assertEqual(plist[0], (10, True)) self.assertEqual(plist[1], (20, False)) self.assertEqual(plist[2], (30, True)) def test_scalar_primary_key(self): - class SettingValue(Document): key = StringField(primary_key=True) value = StringField() @@ -4330,8 +4381,8 @@ class QuerySetTest(unittest.TestCase): s = SettingValue(key="test", value="test value") s.save() - val = SettingValue.objects.scalar('key', 'value') - self.assertEqual(list(val), [('test', 'test value')]) + val = SettingValue.objects.scalar("key", "value") + self.assertEqual(list(val), [("test", "test value")]) def test_scalar_cursor_behaviour(self): """Ensure that a query returns a valid set of results. @@ -4342,83 +4393,94 @@ class QuerySetTest(unittest.TestCase): person2.save() # Find all people in the collection - people = self.Person.objects.scalar('name') + people = self.Person.objects.scalar("name") self.assertEqual(people.count(), 2) results = list(people) self.assertEqual(results[0], "User A") self.assertEqual(results[1], "User B") # Use a query to filter the people found to just person1 - people = self.Person.objects(age=20).scalar('name') + people = self.Person.objects(age=20).scalar("name") self.assertEqual(people.count(), 1) person = people.next() self.assertEqual(person, "User A") # Test limit - people = list(self.Person.objects.limit(1).scalar('name')) + people = list(self.Person.objects.limit(1).scalar("name")) self.assertEqual(len(people), 1) - self.assertEqual(people[0], 'User A') + self.assertEqual(people[0], "User A") # Test skip - people = list(self.Person.objects.skip(1).scalar('name')) + people = list(self.Person.objects.skip(1).scalar("name")) self.assertEqual(len(people), 1) - self.assertEqual(people[0], 'User B') + self.assertEqual(people[0], "User B") person3 = self.Person(name="User C", age=40) person3.save() # Test slice limit - people = list(self.Person.objects[:2].scalar('name')) + people = list(self.Person.objects[:2].scalar("name")) self.assertEqual(len(people), 2) - self.assertEqual(people[0], 'User A') - self.assertEqual(people[1], 'User B') + self.assertEqual(people[0], "User A") + self.assertEqual(people[1], "User B") # Test slice skip - people = list(self.Person.objects[1:].scalar('name')) + people = list(self.Person.objects[1:].scalar("name")) self.assertEqual(len(people), 2) - self.assertEqual(people[0], 'User B') - self.assertEqual(people[1], 'User C') + self.assertEqual(people[0], "User B") + self.assertEqual(people[1], "User C") # Test slice limit and skip - people = list(self.Person.objects[1:2].scalar('name')) + people = list(self.Person.objects[1:2].scalar("name")) self.assertEqual(len(people), 1) - self.assertEqual(people[0], 'User B') + self.assertEqual(people[0], "User B") - people = list(self.Person.objects[1:1].scalar('name')) + people = list(self.Person.objects[1:1].scalar("name")) self.assertEqual(len(people), 0) # Test slice out of range - people = list(self.Person.objects.scalar('name')[80000:80001]) + people = list(self.Person.objects.scalar("name")[80000:80001]) self.assertEqual(len(people), 0) # Test larger slice __repr__ self.Person.objects.delete() for i in range(55): - self.Person(name='A%s' % i, age=i).save() + self.Person(name="A%s" % i, age=i).save() - self.assertEqual(self.Person.objects.scalar('name').count(), 55) + self.assertEqual(self.Person.objects.scalar("name").count(), 55) self.assertEqual( - "A0", "%s" % self.Person.objects.order_by('name').scalar('name').first()) + "A0", "%s" % self.Person.objects.order_by("name").scalar("name").first() + ) self.assertEqual( - "A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0]) + "A0", "%s" % self.Person.objects.scalar("name").order_by("name")[0] + ) if six.PY3: - self.assertEqual("['A1', 'A2']", "%s" % self.Person.objects.order_by( - 'age').scalar('name')[1:3]) - self.assertEqual("['A51', 'A52']", "%s" % self.Person.objects.order_by( - 'age').scalar('name')[51:53]) + self.assertEqual( + "['A1', 'A2']", + "%s" % self.Person.objects.order_by("age").scalar("name")[1:3], + ) + self.assertEqual( + "['A51', 'A52']", + "%s" % self.Person.objects.order_by("age").scalar("name")[51:53], + ) else: - self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by( - 'age').scalar('name')[1:3]) - self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by( - 'age').scalar('name')[51:53]) + self.assertEqual( + "[u'A1', u'A2']", + "%s" % self.Person.objects.order_by("age").scalar("name")[1:3], + ) + self.assertEqual( + "[u'A51', u'A52']", + "%s" % self.Person.objects.order_by("age").scalar("name")[51:53], + ) # with_id and in_bulk - person = self.Person.objects.order_by('name').first() - self.assertEqual("A0", "%s" % - self.Person.objects.scalar('name').with_id(person.id)) + person = self.Person.objects.order_by("name").first() + self.assertEqual( + "A0", "%s" % self.Person.objects.scalar("name").with_id(person.id) + ) - pks = self.Person.objects.order_by('age').scalar('pk')[1:3] - names = self.Person.objects.scalar('name').in_bulk(list(pks)).values() + pks = self.Person.objects.order_by("age").scalar("pk")[1:3] + names = self.Person.objects.scalar("name").in_bulk(list(pks)).values() if six.PY3: expected = "['A1', 'A2']" else: @@ -4430,51 +4492,61 @@ class QuerySetTest(unittest.TestCase): shape = StringField() color = StringField() thick = BooleanField() - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} class Bar(Document): foo = ListField(EmbeddedDocumentField(Foo)) - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} Bar.drop_collection() - b1 = Bar(foo=[Foo(shape="square", color="purple", thick=False), - Foo(shape="circle", color="red", thick=True)]) + b1 = Bar( + foo=[ + Foo(shape="square", color="purple", thick=False), + Foo(shape="circle", color="red", thick=True), + ] + ) b1.save() - b2 = Bar(foo=[Foo(shape="square", color="red", thick=True), - Foo(shape="circle", color="purple", thick=False)]) + b2 = Bar( + foo=[ + Foo(shape="square", color="red", thick=True), + Foo(shape="circle", color="purple", thick=False), + ] + ) b2.save() - b3 = Bar(foo=[Foo(shape="square", thick=True), - Foo(shape="circle", color="purple", thick=False)]) + b3 = Bar( + foo=[ + Foo(shape="square", thick=True), + Foo(shape="circle", color="purple", thick=False), + ] + ) b3.save() - ak = list( - Bar.objects(foo__match={'shape': "square", "color": "purple"})) + ak = list(Bar.objects(foo__match={"shape": "square", "color": "purple"})) self.assertEqual([b1], ak) - ak = list( - Bar.objects(foo__elemMatch={'shape': "square", "color": "purple"})) + ak = list(Bar.objects(foo__elemMatch={"shape": "square", "color": "purple"})) self.assertEqual([b1], ak) ak = list(Bar.objects(foo__match=Foo(shape="square", color="purple"))) self.assertEqual([b1], ak) ak = list( - Bar.objects(foo__elemMatch={'shape': "square", "color__exists": True})) + Bar.objects(foo__elemMatch={"shape": "square", "color__exists": True}) + ) + self.assertEqual([b1, b2], ak) + + ak = list(Bar.objects(foo__match={"shape": "square", "color__exists": True})) self.assertEqual([b1, b2], ak) ak = list( - Bar.objects(foo__match={'shape': "square", "color__exists": True})) - self.assertEqual([b1, b2], ak) - - ak = list( - Bar.objects(foo__elemMatch={'shape': "square", "color__exists": False})) + Bar.objects(foo__elemMatch={"shape": "square", "color__exists": False}) + ) self.assertEqual([b3], ak) - ak = list( - Bar.objects(foo__match={'shape': "square", "color__exists": False})) + ak = list(Bar.objects(foo__match={"shape": "square", "color__exists": False})) self.assertEqual([b3], ak) def test_upsert_includes_cls(self): @@ -4485,24 +4557,25 @@ class QuerySetTest(unittest.TestCase): test = StringField() Test.drop_collection() - Test.objects(test='foo').update_one(upsert=True, set__test='foo') - self.assertNotIn('_cls', Test._collection.find_one()) + Test.objects(test="foo").update_one(upsert=True, set__test="foo") + self.assertNotIn("_cls", Test._collection.find_one()) class Test(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} test = StringField() Test.drop_collection() - Test.objects(test='foo').update_one(upsert=True, set__test='foo') - self.assertIn('_cls', Test._collection.find_one()) + Test.objects(test="foo").update_one(upsert=True, set__test="foo") + self.assertIn("_cls", Test._collection.find_one()) def test_update_upsert_looks_like_a_digit(self): class MyDoc(DynamicDocument): pass + MyDoc.drop_collection() self.assertEqual(1, MyDoc.objects.update_one(upsert=True, inc__47=1)) - self.assertEqual(MyDoc.objects.get()['47'], 1) + self.assertEqual(MyDoc.objects.get()["47"], 1) def test_dictfield_key_looks_like_a_digit(self): """Only should work with DictField even if they have numeric keys.""" @@ -4511,86 +4584,84 @@ class QuerySetTest(unittest.TestCase): test = DictField() MyDoc.drop_collection() - doc = MyDoc(test={'47': 1}) + doc = MyDoc(test={"47": 1}) doc.save() - self.assertEqual(MyDoc.objects.only('test__47').get().test['47'], 1) + self.assertEqual(MyDoc.objects.only("test__47").get().test["47"], 1) def test_read_preference(self): class Bar(Document): txt = StringField() - meta = { - 'indexes': ['txt'] - } + meta = {"indexes": ["txt"]} Bar.drop_collection() bars = list(Bar.objects(read_preference=ReadPreference.PRIMARY)) self.assertEqual([], bars) - self.assertRaises(TypeError, Bar.objects, read_preference='Primary') + self.assertRaises(TypeError, Bar.objects, read_preference="Primary") # read_preference as a kwarg bars = Bar.objects(read_preference=ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, - ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._cursor._Cursor__read_preference, - ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) + self.assertEqual( + bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED + ) # read_preference as a query set method bars = Bar.objects.read_preference(ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, - ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._cursor._Cursor__read_preference, - ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) + self.assertEqual( + bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED + ) # read_preference after skip - bars = Bar.objects.skip(1) \ - .read_preference(ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, - ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._cursor._Cursor__read_preference, - ReadPreference.SECONDARY_PREFERRED) + bars = Bar.objects.skip(1).read_preference(ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) + self.assertEqual( + bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED + ) # read_preference after limit - bars = Bar.objects.limit(1) \ - .read_preference(ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, - ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._cursor._Cursor__read_preference, - ReadPreference.SECONDARY_PREFERRED) + bars = Bar.objects.limit(1).read_preference(ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) + self.assertEqual( + bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED + ) # read_preference after order_by - bars = Bar.objects.order_by('txt') \ - .read_preference(ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, - ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._cursor._Cursor__read_preference, - ReadPreference.SECONDARY_PREFERRED) + bars = Bar.objects.order_by("txt").read_preference( + ReadPreference.SECONDARY_PREFERRED + ) + self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) + self.assertEqual( + bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED + ) # read_preference after hint - bars = Bar.objects.hint([('txt', 1)]) \ - .read_preference(ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, - ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._cursor._Cursor__read_preference, - ReadPreference.SECONDARY_PREFERRED) + bars = Bar.objects.hint([("txt", 1)]).read_preference( + ReadPreference.SECONDARY_PREFERRED + ) + self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) + self.assertEqual( + bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED + ) def test_read_preference_aggregation_framework(self): class Bar(Document): txt = StringField() - meta = { - 'indexes': ['txt'] - } + meta = {"indexes": ["txt"]} + # Aggregates with read_preference - bars = Bar.objects \ - .read_preference(ReadPreference.SECONDARY_PREFERRED) \ - .aggregate() - self.assertEqual(bars._CommandCursor__collection.read_preference, - ReadPreference.SECONDARY_PREFERRED) + bars = Bar.objects.read_preference( + ReadPreference.SECONDARY_PREFERRED + ).aggregate() + self.assertEqual( + bars._CommandCursor__collection.read_preference, + ReadPreference.SECONDARY_PREFERRED, + ) def test_json_simple(self): - class Embedded(EmbeddedDocument): string = StringField() @@ -4603,7 +4674,7 @@ class QuerySetTest(unittest.TestCase): Doc(string="Bye", embedded_field=Embedded(string="Bye")).save() Doc().save() - json_data = Doc.objects.to_json(sort_keys=True, separators=(',', ':')) + json_data = Doc.objects.to_json(sort_keys=True, separators=(",", ":")) doc_objects = list(Doc.objects) self.assertEqual(doc_objects, Doc.objects.from_json(json_data)) @@ -4616,33 +4687,34 @@ class QuerySetTest(unittest.TestCase): pass class Doc(Document): - string_field = StringField(default='1') + string_field = StringField(default="1") int_field = IntField(default=1) float_field = FloatField(default=1.1) boolean_field = BooleanField(default=True) datetime_field = DateTimeField(default=datetime.datetime.now) embedded_document_field = EmbeddedDocumentField( - EmbeddedDoc, default=lambda: EmbeddedDoc()) + EmbeddedDoc, default=lambda: EmbeddedDoc() + ) list_field = ListField(default=lambda: [1, 2, 3]) dict_field = DictField(default=lambda: {"hello": "world"}) objectid_field = ObjectIdField(default=ObjectId) - reference_field = ReferenceField( - Simple, default=lambda: Simple().save()) + reference_field = ReferenceField(Simple, default=lambda: Simple().save()) map_field = MapField(IntField(), default=lambda: {"simple": 1}) decimal_field = DecimalField(default=1.0) complex_datetime_field = ComplexDateTimeField(default=datetime.datetime.now) url_field = URLField(default="http://mongoengine.org") dynamic_field = DynamicField(default=1) generic_reference_field = GenericReferenceField( - default=lambda: Simple().save()) - sorted_list_field = SortedListField(IntField(), - default=lambda: [1, 2, 3]) + default=lambda: Simple().save() + ) + sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3]) email_field = EmailField(default="ross@example.com") geo_point_field = GeoPointField(default=lambda: [1, 2]) sequence_field = SequenceField() uuid_field = UUIDField(default=uuid.uuid4) generic_embedded_document_field = GenericEmbeddedDocumentField( - default=lambda: EmbeddedDoc()) + default=lambda: EmbeddedDoc() + ) Simple.drop_collection() Doc.drop_collection() @@ -4667,111 +4739,96 @@ class QuerySetTest(unittest.TestCase): User.drop_collection() - User.objects.create(id='Bob', name="Bob Dole", age=89, price=Decimal('1.11')) + User.objects.create(id="Bob", name="Bob Dole", age=89, price=Decimal("1.11")) User.objects.create( - id='Barak', + id="Barak", name="Barak Obama", age=51, - price=Decimal('2.22'), - last_login=LastLogin( - location='White House', - ip='104.107.108.116' - ) + price=Decimal("2.22"), + last_login=LastLogin(location="White House", ip="104.107.108.116"), ) results = User.objects.as_pymongo() + self.assertEqual(set(results[0].keys()), set(["_id", "name", "age", "price"])) self.assertEqual( - set(results[0].keys()), - set(['_id', 'name', 'age', 'price']) - ) - self.assertEqual( - set(results[1].keys()), - set(['_id', 'name', 'age', 'price', 'last_login']) + set(results[1].keys()), set(["_id", "name", "age", "price", "last_login"]) ) - results = User.objects.only('id', 'name').as_pymongo() - self.assertEqual(set(results[0].keys()), set(['_id', 'name'])) + results = User.objects.only("id", "name").as_pymongo() + self.assertEqual(set(results[0].keys()), set(["_id", "name"])) - users = User.objects.only('name', 'price').as_pymongo() + users = User.objects.only("name", "price").as_pymongo() results = list(users) self.assertIsInstance(results[0], dict) self.assertIsInstance(results[1], dict) - self.assertEqual(results[0]['name'], 'Bob Dole') - self.assertEqual(results[0]['price'], 1.11) - self.assertEqual(results[1]['name'], 'Barak Obama') - self.assertEqual(results[1]['price'], 2.22) + self.assertEqual(results[0]["name"], "Bob Dole") + self.assertEqual(results[0]["price"], 1.11) + self.assertEqual(results[1]["name"], "Barak Obama") + self.assertEqual(results[1]["price"], 2.22) - users = User.objects.only('name', 'last_login').as_pymongo() + users = User.objects.only("name", "last_login").as_pymongo() results = list(users) self.assertIsInstance(results[0], dict) self.assertIsInstance(results[1], dict) - self.assertEqual(results[0], { - '_id': 'Bob', - 'name': 'Bob Dole' - }) - self.assertEqual(results[1], { - '_id': 'Barak', - 'name': 'Barak Obama', - 'last_login': { - 'location': 'White House', - 'ip': '104.107.108.116' - } - }) + self.assertEqual(results[0], {"_id": "Bob", "name": "Bob Dole"}) + self.assertEqual( + results[1], + { + "_id": "Barak", + "name": "Barak Obama", + "last_login": {"location": "White House", "ip": "104.107.108.116"}, + }, + ) def test_as_pymongo_returns_cls_attribute_when_using_inheritance(self): class User(Document): name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} User.drop_collection() user = User(name="Bob Dole").save() result = User.objects.as_pymongo().first() - self.assertEqual( - result, - { - '_cls': 'User', - '_id': user.id, - 'name': 'Bob Dole' - } - ) + self.assertEqual(result, {"_cls": "User", "_id": user.id, "name": "Bob Dole"}) def test_as_pymongo_json_limit_fields(self): - class User(Document): email = EmailField(unique=True, required=True) - password_hash = StringField( - db_field='password_hash', required=True) - password_salt = StringField( - db_field='password_salt', required=True) + password_hash = StringField(db_field="password_hash", required=True) + password_salt = StringField(db_field="password_salt", required=True) User.drop_collection() - User(email="ross@example.com", password_salt="SomeSalt", - password_hash="SomeHash").save() + User( + email="ross@example.com", password_salt="SomeSalt", password_hash="SomeHash" + ).save() serialized_user = User.objects.exclude( - 'password_salt', 'password_hash').as_pymongo()[0] - self.assertEqual({'_id', 'email'}, set(serialized_user.keys())) + "password_salt", "password_hash" + ).as_pymongo()[0] + self.assertEqual({"_id", "email"}, set(serialized_user.keys())) serialized_user = User.objects.exclude( - 'id', 'password_salt', 'password_hash').to_json() + "id", "password_salt", "password_hash" + ).to_json() self.assertEqual('[{"email": "ross@example.com"}]', serialized_user) - serialized_user = User.objects.only('email').as_pymongo()[0] - self.assertEqual({'_id', 'email'}, set(serialized_user.keys())) + serialized_user = User.objects.only("email").as_pymongo()[0] + self.assertEqual({"_id", "email"}, set(serialized_user.keys())) - serialized_user = User.objects.exclude( - 'password_salt').only('email').as_pymongo()[0] - self.assertEqual({'_id', 'email'}, set(serialized_user.keys())) + serialized_user = ( + User.objects.exclude("password_salt").only("email").as_pymongo()[0] + ) + self.assertEqual({"_id", "email"}, set(serialized_user.keys())) - serialized_user = User.objects.exclude( - 'password_salt', 'id').only('email').as_pymongo()[0] - self.assertEqual({'email'}, set(serialized_user.keys())) + serialized_user = ( + User.objects.exclude("password_salt", "id").only("email").as_pymongo()[0] + ) + self.assertEqual({"email"}, set(serialized_user.keys())) - serialized_user = User.objects.exclude( - 'password_salt', 'id').only('email').to_json() - self.assertEqual('[{"email": "ross@example.com"}]', - serialized_user) + serialized_user = ( + User.objects.exclude("password_salt", "id").only("email").to_json() + ) + self.assertEqual('[{"email": "ross@example.com"}]', serialized_user) def test_only_after_count(self): """Test that only() works after count()""" @@ -4780,9 +4837,9 @@ class QuerySetTest(unittest.TestCase): name = StringField() age = IntField() address = StringField() + User.drop_collection() - user = User(name="User", age=50, - address="Moscow, Russia").save() + user = User(name="User", age=50, address="Moscow, Russia").save() user_queryset = User.objects(age=50) @@ -4796,7 +4853,6 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(result, {"_id": user.id, "name": "User", "age": 50}) def test_no_dereference(self): - class Organization(Document): name = StringField() @@ -4842,12 +4898,14 @@ class QuerySetTest(unittest.TestCase): self.assertFalse(qs_no_deref._auto_dereference) # Make sure the instance field is different from the class field - instance_org_field = user_no_deref._fields['organization'] + instance_org_field = user_no_deref._fields["organization"] self.assertIsNot(instance_org_field, cls_organization_field) self.assertFalse(instance_org_field._auto_dereference) self.assertIsInstance(user_no_deref.organization, DBRef) - self.assertTrue(cls_organization_field._auto_dereference, True) # Make sure the class Field wasn't altered + self.assertTrue( + cls_organization_field._auto_dereference, True + ) # Make sure the class Field wasn't altered def test_no_dereference_no_side_effect_on_existing_instance(self): # Relates to issue #1677 - ensures no regression of the bug @@ -4863,8 +4921,7 @@ class QuerySetTest(unittest.TestCase): Organization.drop_collection() org = Organization(name="whatever").save() - User(organization=org, - organization_gen=org).save() + User(organization=org, organization_gen=org).save() qs = User.objects() user = qs.first() @@ -4873,7 +4930,7 @@ class QuerySetTest(unittest.TestCase): user_no_deref = qs_no_deref.first() # ReferenceField - no_derf_org = user_no_deref.organization # was triggering the bug + no_derf_org = user_no_deref.organization # was triggering the bug self.assertIsInstance(no_derf_org, DBRef) self.assertIsInstance(user.organization, Organization) @@ -4883,7 +4940,6 @@ class QuerySetTest(unittest.TestCase): self.assertIsInstance(user.organization_gen, Organization) def test_no_dereference_embedded_doc(self): - class User(Document): name = StringField() @@ -4906,17 +4962,15 @@ class QuerySetTest(unittest.TestCase): member = Member(name="Flash", user=user) - company = Organization(name="Mongo Inc", - ceo=user, - member=member, - admins=[user], - members=[member]) + company = Organization( + name="Mongo Inc", ceo=user, member=member, admins=[user], members=[member] + ) company.save() org = Organization.objects().no_dereference().first() - self.assertNotEqual(id(org._fields['admins']), id(Organization.admins)) - self.assertFalse(org._fields['admins']._auto_dereference) + self.assertNotEqual(id(org._fields["admins"]), id(Organization.admins)) + self.assertFalse(org._fields["admins"]._auto_dereference) admin = org.admins[0] self.assertIsInstance(admin, DBRef) @@ -4981,14 +5035,14 @@ class QuerySetTest(unittest.TestCase): Person.drop_collection() qs = Person.objects.no_cache() - self.assertEqual(repr(qs), '[]') + self.assertEqual(repr(qs), "[]") def test_no_cached_on_a_cached_queryset_raise_error(self): class Person(Document): name = StringField() Person.drop_collection() - Person(name='a').save() + Person(name="a").save() qs = Person.objects() _ = list(qs) with self.assertRaises(OperationError) as ctx_err: @@ -5008,7 +5062,6 @@ class QuerySetTest(unittest.TestCase): self.assertIsInstance(qs, QuerySet) def test_cache_not_cloned(self): - class User(Document): name = StringField() @@ -5020,7 +5073,7 @@ class QuerySetTest(unittest.TestCase): User(name="Alice").save() User(name="Bob").save() - users = User.objects.all().order_by('name') + users = User.objects.all().order_by("name") self.assertEqual("%s" % users, "[, ]") self.assertEqual(2, len(users._result_cache)) @@ -5030,6 +5083,7 @@ class QuerySetTest(unittest.TestCase): def test_no_cache(self): """Ensure you can add meta data to file""" + class Noddy(Document): fields = DictField() @@ -5063,7 +5117,7 @@ class QuerySetTest(unittest.TestCase): def test_nested_queryset_iterator(self): # Try iterating the same queryset twice, nested. - names = ['Alice', 'Bob', 'Chuck', 'David', 'Eric', 'Francis', 'George'] + names = ["Alice", "Bob", "Chuck", "David", "Eric", "Francis", "George"] class User(Document): name = StringField() @@ -5076,7 +5130,7 @@ class QuerySetTest(unittest.TestCase): for name in names: User(name=name).save() - users = User.objects.all().order_by('name') + users = User.objects.all().order_by("name") outer_count = 0 inner_count = 0 inner_total_count = 0 @@ -5114,7 +5168,7 @@ class QuerySetTest(unittest.TestCase): x = IntField() y = IntField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class B(A): z = IntField() @@ -5151,6 +5205,7 @@ class QuerySetTest(unittest.TestCase): def test_query_generic_embedded_document(self): """Ensure that querying sub field on generic_embedded_field works """ + class A(EmbeddedDocument): a_name = StringField() @@ -5161,19 +5216,16 @@ class QuerySetTest(unittest.TestCase): document = GenericEmbeddedDocumentField(choices=(A, B)) Doc.drop_collection() - Doc(document=A(a_name='A doc')).save() - Doc(document=B(b_name='B doc')).save() + Doc(document=A(a_name="A doc")).save() + Doc(document=B(b_name="B doc")).save() # Using raw in filter working fine - self.assertEqual(Doc.objects( - __raw__={'document.a_name': 'A doc'}).count(), 1) - self.assertEqual(Doc.objects( - __raw__={'document.b_name': 'B doc'}).count(), 1) - self.assertEqual(Doc.objects(document__a_name='A doc').count(), 1) - self.assertEqual(Doc.objects(document__b_name='B doc').count(), 1) + self.assertEqual(Doc.objects(__raw__={"document.a_name": "A doc"}).count(), 1) + self.assertEqual(Doc.objects(__raw__={"document.b_name": "B doc"}).count(), 1) + self.assertEqual(Doc.objects(document__a_name="A doc").count(), 1) + self.assertEqual(Doc.objects(document__b_name="B doc").count(), 1) def test_query_reference_to_custom_pk_doc(self): - class A(Document): id = StringField(primary_key=True) @@ -5183,7 +5235,7 @@ class QuerySetTest(unittest.TestCase): A.drop_collection() B.drop_collection() - a = A.objects.create(id='custom_id') + a = A.objects.create(id="custom_id") B.objects.create(a=a) self.assertEqual(B.objects.count(), 1) @@ -5191,13 +5243,10 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(B.objects.get(a=a.id).a, a) def test_cls_query_in_subclassed_docs(self): - class Animal(Document): name = StringField() - meta = { - 'allow_inheritance': True - } + meta = {"allow_inheritance": True} class Dog(Animal): pass @@ -5205,21 +5254,23 @@ class QuerySetTest(unittest.TestCase): class Cat(Animal): pass - self.assertEqual(Animal.objects(name='Charlie')._query, { - 'name': 'Charlie', - '_cls': {'$in': ('Animal', 'Animal.Dog', 'Animal.Cat')} - }) - self.assertEqual(Dog.objects(name='Charlie')._query, { - 'name': 'Charlie', - '_cls': 'Animal.Dog' - }) - self.assertEqual(Cat.objects(name='Charlie')._query, { - 'name': 'Charlie', - '_cls': 'Animal.Cat' - }) + self.assertEqual( + Animal.objects(name="Charlie")._query, + { + "name": "Charlie", + "_cls": {"$in": ("Animal", "Animal.Dog", "Animal.Cat")}, + }, + ) + self.assertEqual( + Dog.objects(name="Charlie")._query, + {"name": "Charlie", "_cls": "Animal.Dog"}, + ) + self.assertEqual( + Cat.objects(name="Charlie")._query, + {"name": "Charlie", "_cls": "Animal.Cat"}, + ) def test_can_have_field_same_name_as_query_operator(self): - class Size(Document): name = StringField() @@ -5236,7 +5287,6 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(Example.objects(size__in=[instance_size]).count(), 1) def test_cursor_in_an_if_stmt(self): - class Test(Document): test_field = StringField() @@ -5244,23 +5294,23 @@ class QuerySetTest(unittest.TestCase): queryset = Test.objects if queryset: - raise AssertionError('Empty cursor returns True') + raise AssertionError("Empty cursor returns True") test = Test() - test.test_field = 'test' + test.test_field = "test" test.save() queryset = Test.objects if not test: - raise AssertionError('Cursor has data and returned False') + raise AssertionError("Cursor has data and returned False") queryset.next() if not queryset: - raise AssertionError('Cursor has data and it must returns True,' - ' even in the last item.') + raise AssertionError( + "Cursor has data and it must returns True, even in the last item." + ) def test_bool_performance(self): - class Person(Document): name = StringField() @@ -5273,10 +5323,11 @@ class QuerySetTest(unittest.TestCase): pass self.assertEqual(q, 1) - op = q.db.system.profile.find({"ns": - {"$ne": "%s.system.indexes" % q.db.name}})[0] + op = q.db.system.profile.find( + {"ns": {"$ne": "%s.system.indexes" % q.db.name}} + )[0] - self.assertEqual(op['nreturned'], 1) + self.assertEqual(op["nreturned"], 1) def test_bool_with_ordering(self): ORDER_BY_KEY, CMD_QUERY_KEY = get_key_compat(self.mongodb_version) @@ -5289,26 +5340,28 @@ class QuerySetTest(unittest.TestCase): Person(name="Test").save() # Check that bool(queryset) does not uses the orderby - qs = Person.objects.order_by('name') + qs = Person.objects.order_by("name") with query_counter() as q: if bool(qs): pass - op = q.db.system.profile.find({"ns": - {"$ne": "%s.system.indexes" % q.db.name}})[0] + op = q.db.system.profile.find( + {"ns": {"$ne": "%s.system.indexes" % q.db.name}} + )[0] self.assertNotIn(ORDER_BY_KEY, op[CMD_QUERY_KEY]) # Check that normal query uses orderby - qs2 = Person.objects.order_by('name') + qs2 = Person.objects.order_by("name") with query_counter() as q: for x in qs2: pass - op = q.db.system.profile.find({"ns": - {"$ne": "%s.system.indexes" % q.db.name}})[0] + op = q.db.system.profile.find( + {"ns": {"$ne": "%s.system.indexes" % q.db.name}} + )[0] self.assertIn(ORDER_BY_KEY, op[CMD_QUERY_KEY]) @@ -5317,9 +5370,7 @@ class QuerySetTest(unittest.TestCase): class Person(Document): name = StringField() - meta = { - 'ordering': ['name'] - } + meta = {"ordering": ["name"]} Person.drop_collection() @@ -5332,15 +5383,20 @@ class QuerySetTest(unittest.TestCase): if Person.objects: pass - op = q.db.system.profile.find({"ns": - {"$ne": "%s.system.indexes" % q.db.name}})[0] + op = q.db.system.profile.find( + {"ns": {"$ne": "%s.system.indexes" % q.db.name}} + )[0] - self.assertNotIn('$orderby', op[CMD_QUERY_KEY], - 'BaseQuerySet must remove orderby from meta in boolen test') + self.assertNotIn( + "$orderby", + op[CMD_QUERY_KEY], + "BaseQuerySet must remove orderby from meta in boolen test", + ) - self.assertEqual(Person.objects.first().name, 'A') - self.assertTrue(Person.objects._has_data(), - 'Cursor has data and returned False') + self.assertEqual(Person.objects.first().name, "A") + self.assertTrue( + Person.objects._has_data(), "Cursor has data and returned False" + ) def test_queryset_aggregation_framework(self): class Person(Document): @@ -5355,40 +5411,44 @@ class QuerySetTest(unittest.TestCase): Person.objects.insert([p1, p2, p3]) data = Person.objects(age__lte=22).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} + {"$project": {"name": {"$toUpper": "$name"}}} ) - self.assertEqual(list(data), [ - {'_id': p1.pk, 'name': "ISABELLA LUANNA"}, - {'_id': p2.pk, 'name': "WILSON JUNIOR"} - ]) - - data = Person.objects(age__lte=22).order_by('-name').aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} + self.assertEqual( + list(data), + [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + ], ) - self.assertEqual(list(data), [ - {'_id': p2.pk, 'name': "WILSON JUNIOR"}, - {'_id': p1.pk, 'name': "ISABELLA LUANNA"} - ]) + data = ( + Person.objects(age__lte=22) + .order_by("-name") + .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) + ) - data = Person.objects(age__gte=17, age__lte=40).order_by('-age').aggregate({ - '$group': { - '_id': None, - 'total': {'$sum': 1}, - 'avg': {'$avg': '$age'} - } - }) - self.assertEqual(list(data), [ - {'_id': None, 'avg': 29, 'total': 2} - ]) + self.assertEqual( + list(data), + [ + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + ], + ) - data = Person.objects().aggregate({'$match': {'name': 'Isabella Luanna'}}) - self.assertEqual(list(data), [ - {u'_id': p1.pk, - u'age': 16, - u'name': u'Isabella Luanna'}] - ) + data = ( + Person.objects(age__gte=17, age__lte=40) + .order_by("-age") + .aggregate( + {"$group": {"_id": None, "total": {"$sum": 1}, "avg": {"$avg": "$age"}}} + ) + ) + self.assertEqual(list(data), [{"_id": None, "avg": 29, "total": 2}]) + + data = Person.objects().aggregate({"$match": {"name": "Isabella Luanna"}}) + self.assertEqual( + list(data), [{u"_id": p1.pk, u"age": 16, u"name": u"Isabella Luanna"}] + ) def test_queryset_aggregation_with_skip(self): class Person(Document): @@ -5403,13 +5463,16 @@ class QuerySetTest(unittest.TestCase): Person.objects.insert([p1, p2, p3]) data = Person.objects.skip(1).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} + {"$project": {"name": {"$toUpper": "$name"}}} ) - self.assertEqual(list(data), [ - {'_id': p2.pk, 'name': "WILSON JUNIOR"}, - {'_id': p3.pk, 'name': "SANDRA MARA"} - ]) + self.assertEqual( + list(data), + [ + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + ], + ) def test_queryset_aggregation_with_limit(self): class Person(Document): @@ -5424,12 +5487,10 @@ class QuerySetTest(unittest.TestCase): Person.objects.insert([p1, p2, p3]) data = Person.objects.limit(1).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} + {"$project": {"name": {"$toUpper": "$name"}}} ) - self.assertEqual(list(data), [ - {'_id': p1.pk, 'name': "ISABELLA LUANNA"} - ]) + self.assertEqual(list(data), [{"_id": p1.pk, "name": "ISABELLA LUANNA"}]) def test_queryset_aggregation_with_sort(self): class Person(Document): @@ -5443,15 +5504,18 @@ class QuerySetTest(unittest.TestCase): p3 = Person(name="Sandra Mara", age=37) Person.objects.insert([p1, p2, p3]) - data = Person.objects.order_by('name').aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} + data = Person.objects.order_by("name").aggregate( + {"$project": {"name": {"$toUpper": "$name"}}} ) - self.assertEqual(list(data), [ - {'_id': p1.pk, 'name': "ISABELLA LUANNA"}, - {'_id': p3.pk, 'name': "SANDRA MARA"}, - {'_id': p2.pk, 'name': "WILSON JUNIOR"} - ]) + self.assertEqual( + list(data), + [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + ], + ) def test_queryset_aggregation_with_skip_with_limit(self): class Person(Document): @@ -5466,18 +5530,18 @@ class QuerySetTest(unittest.TestCase): Person.objects.insert([p1, p2, p3]) data = list( - Person.objects.skip(1).limit(1).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} - ) + Person.objects.skip(1) + .limit(1) + .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) ) - self.assertEqual(list(data), [ - {'_id': p2.pk, 'name': "WILSON JUNIOR"}, - ]) + self.assertEqual(list(data), [{"_id": p2.pk, "name": "WILSON JUNIOR"}]) # Make sure limit/skip chaining order has no impact - data2 = Person.objects.limit(1).skip(1).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} + data2 = ( + Person.objects.limit(1) + .skip(1) + .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) ) self.assertEqual(data, list(data2)) @@ -5494,34 +5558,40 @@ class QuerySetTest(unittest.TestCase): p3 = Person(name="Sandra Mara", age=37) Person.objects.insert([p1, p2, p3]) - data = Person.objects.order_by('name').limit(2).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} + data = ( + Person.objects.order_by("name") + .limit(2) + .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) ) - self.assertEqual(list(data), [ - {'_id': p1.pk, 'name': "ISABELLA LUANNA"}, - {'_id': p3.pk, 'name': "SANDRA MARA"} - ]) + self.assertEqual( + list(data), + [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + ], + ) # Verify adding limit/skip steps works as expected - data = Person.objects.order_by('name').limit(2).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}}, - {'$limit': 1}, + data = ( + Person.objects.order_by("name") + .limit(2) + .aggregate({"$project": {"name": {"$toUpper": "$name"}}}, {"$limit": 1}) ) - self.assertEqual(list(data), [ - {'_id': p1.pk, 'name': "ISABELLA LUANNA"}, - ]) + self.assertEqual(list(data), [{"_id": p1.pk, "name": "ISABELLA LUANNA"}]) - data = Person.objects.order_by('name').limit(2).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}}, - {'$skip': 1}, - {'$limit': 1}, + data = ( + Person.objects.order_by("name") + .limit(2) + .aggregate( + {"$project": {"name": {"$toUpper": "$name"}}}, + {"$skip": 1}, + {"$limit": 1}, + ) ) - self.assertEqual(list(data), [ - {'_id': p3.pk, 'name': "SANDRA MARA"}, - ]) + self.assertEqual(list(data), [{"_id": p3.pk, "name": "SANDRA MARA"}]) def test_queryset_aggregation_with_sort_with_skip(self): class Person(Document): @@ -5535,13 +5605,13 @@ class QuerySetTest(unittest.TestCase): p3 = Person(name="Sandra Mara", age=37) Person.objects.insert([p1, p2, p3]) - data = Person.objects.order_by('name').skip(2).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} + data = ( + Person.objects.order_by("name") + .skip(2) + .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) ) - self.assertEqual(list(data), [ - {'_id': p2.pk, 'name': "WILSON JUNIOR"} - ]) + self.assertEqual(list(data), [{"_id": p2.pk, "name": "WILSON JUNIOR"}]) def test_queryset_aggregation_with_sort_with_skip_with_limit(self): class Person(Document): @@ -5555,35 +5625,42 @@ class QuerySetTest(unittest.TestCase): p3 = Person(name="Sandra Mara", age=37) Person.objects.insert([p1, p2, p3]) - data = Person.objects.order_by('name').skip(1).limit(1).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} + data = ( + Person.objects.order_by("name") + .skip(1) + .limit(1) + .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) ) - self.assertEqual(list(data), [ - {'_id': p3.pk, 'name': "SANDRA MARA"} - ]) + self.assertEqual(list(data), [{"_id": p3.pk, "name": "SANDRA MARA"}]) def test_delete_count(self): [self.Person(name="User {0}".format(i), age=i * 10).save() for i in range(1, 4)] - self.assertEqual(self.Person.objects().delete(), 3) # test ordinary QuerySey delete count + self.assertEqual( + self.Person.objects().delete(), 3 + ) # test ordinary QuerySey delete count [self.Person(name="User {0}".format(i), age=i * 10).save() for i in range(1, 4)] - self.assertEqual(self.Person.objects().skip(1).delete(), 2) # test Document delete with existing documents + self.assertEqual( + self.Person.objects().skip(1).delete(), 2 + ) # test Document delete with existing documents self.Person.objects().delete() - self.assertEqual(self.Person.objects().skip(1).delete(), 0) # test Document delete without existing documents + self.assertEqual( + self.Person.objects().skip(1).delete(), 0 + ) # test Document delete without existing documents def test_max_time_ms(self): # 778: max_time_ms can get only int or None as input - self.assertRaises(TypeError, - self.Person.objects(name="name").max_time_ms, - 'not a number') + self.assertRaises( + TypeError, self.Person.objects(name="name").max_time_ms, "not a number" + ) def test_subclass_field_query(self): class Animal(Document): is_mamal = BooleanField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class Cat(Animal): whiskers_length = FloatField() @@ -5605,14 +5682,15 @@ class QuerySetTest(unittest.TestCase): Person.drop_collection() - Person._get_collection().insert_one({'name': 'a', 'id': ''}) + Person._get_collection().insert_one({"name": "a", "id": ""}) for p in Person.objects(): - self.assertEqual(p.name, 'a') + self.assertEqual(p.name, "a") def test_len_during_iteration(self): """Tests that calling len on a queyset during iteration doesn't stop paging. """ + class Data(Document): pass @@ -5645,6 +5723,7 @@ class QuerySetTest(unittest.TestCase): in a given queryset even if there are multiple iterations of it happening at the same time. """ + class Data(Document): pass @@ -5663,6 +5742,7 @@ class QuerySetTest(unittest.TestCase): """Ensure that using the `__in` operator on a non-iterable raises an error. """ + class User(Document): name = StringField() @@ -5673,9 +5753,10 @@ class QuerySetTest(unittest.TestCase): User.drop_collection() BlogPost.drop_collection() - author = User.objects.create(name='Test User') - post = BlogPost.objects.create(content='Had a good coffee today...', - authors=[author]) + author = User.objects.create(name="Test User") + post = BlogPost.objects.create( + content="Had a good coffee today...", authors=[author] + ) # Make sure using `__in` with a list works blog_posts = BlogPost.objects(authors__in=[author]) @@ -5699,5 +5780,5 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(self.Person.objects.count(with_limit_and_skip=True), 4) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/queryset/transform.py b/tests/queryset/transform.py index 2c2d018c..cfcd8c22 100644 --- a/tests/queryset/transform.py +++ b/tests/queryset/transform.py @@ -9,25 +9,29 @@ __all__ = ("TransformTest",) class TransformTest(unittest.TestCase): - def setUp(self): - connect(db='mongoenginetest') + connect(db="mongoenginetest") def test_transform_query(self): """Ensure that the _transform_query function operates correctly. """ - self.assertEqual(transform.query(name='test', age=30), - {'name': 'test', 'age': 30}) - self.assertEqual(transform.query(age__lt=30), - {'age': {'$lt': 30}}) - self.assertEqual(transform.query(age__gt=20, age__lt=50), - {'age': {'$gt': 20, '$lt': 50}}) - self.assertEqual(transform.query(age=20, age__gt=50), - {'$and': [{'age': {'$gt': 50}}, {'age': 20}]}) - self.assertEqual(transform.query(friend__age__gte=30), - {'friend.age': {'$gte': 30}}) - self.assertEqual(transform.query(name__exists=True), - {'name': {'$exists': True}}) + self.assertEqual( + transform.query(name="test", age=30), {"name": "test", "age": 30} + ) + self.assertEqual(transform.query(age__lt=30), {"age": {"$lt": 30}}) + self.assertEqual( + transform.query(age__gt=20, age__lt=50), {"age": {"$gt": 20, "$lt": 50}} + ) + self.assertEqual( + transform.query(age=20, age__gt=50), + {"$and": [{"age": {"$gt": 50}}, {"age": 20}]}, + ) + self.assertEqual( + transform.query(friend__age__gte=30), {"friend.age": {"$gte": 30}} + ) + self.assertEqual( + transform.query(name__exists=True), {"name": {"$exists": True}} + ) def test_transform_update(self): class LisDoc(Document): @@ -46,7 +50,11 @@ class TransformTest(unittest.TestCase): DicDoc().save() doc = Doc().save() - for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")): + for k, v in ( + ("set", "$set"), + ("set_on_insert", "$setOnInsert"), + ("push", "$push"), + ): update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc}) self.assertIsInstance(update[v]["dictField.test"], dict) @@ -57,55 +65,61 @@ class TransformTest(unittest.TestCase): update = transform.update(DicDoc, pull__dictField__test=doc) self.assertIsInstance(update["$pull"]["dictField"]["test"], dict) - update = transform.update(LisDoc, pull__foo__in=['a']) - self.assertEqual(update, {'$pull': {'foo': {'$in': ['a']}}}) + update = transform.update(LisDoc, pull__foo__in=["a"]) + self.assertEqual(update, {"$pull": {"foo": {"$in": ["a"]}}}) def test_transform_update_push(self): """Ensure the differences in behvaior between 'push' and 'push_all'""" + class BlogPost(Document): tags = ListField(StringField()) - update = transform.update(BlogPost, push__tags=['mongo', 'db']) - self.assertEqual(update, {'$push': {'tags': ['mongo', 'db']}}) + update = transform.update(BlogPost, push__tags=["mongo", "db"]) + self.assertEqual(update, {"$push": {"tags": ["mongo", "db"]}}) - update = transform.update(BlogPost, push_all__tags=['mongo', 'db']) - self.assertEqual(update, {'$push': {'tags': {'$each': ['mongo', 'db']}}}) + update = transform.update(BlogPost, push_all__tags=["mongo", "db"]) + self.assertEqual(update, {"$push": {"tags": {"$each": ["mongo", "db"]}}}) def test_transform_update_no_operator_default_to_set(self): """Ensure the differences in behvaior between 'push' and 'push_all'""" + class BlogPost(Document): tags = ListField(StringField()) - update = transform.update(BlogPost, tags=['mongo', 'db']) - self.assertEqual(update, {'$set': {'tags': ['mongo', 'db']}}) + update = transform.update(BlogPost, tags=["mongo", "db"]) + self.assertEqual(update, {"$set": {"tags": ["mongo", "db"]}}) def test_query_field_name(self): """Ensure that the correct field name is used when querying. """ + class Comment(EmbeddedDocument): - content = StringField(db_field='commentContent') + content = StringField(db_field="commentContent") class BlogPost(Document): - title = StringField(db_field='postTitle') - comments = ListField(EmbeddedDocumentField(Comment), - db_field='postComments') + title = StringField(db_field="postTitle") + comments = ListField( + EmbeddedDocumentField(Comment), db_field="postComments" + ) BlogPost.drop_collection() - data = {'title': 'Post 1', 'comments': [Comment(content='test')]} + data = {"title": "Post 1", "comments": [Comment(content="test")]} post = BlogPost(**data) post.save() - self.assertIn('postTitle', BlogPost.objects(title=data['title'])._query) - self.assertFalse('title' in - BlogPost.objects(title=data['title'])._query) - self.assertEqual(BlogPost.objects(title=data['title']).count(), 1) + self.assertIn("postTitle", BlogPost.objects(title=data["title"])._query) + self.assertFalse("title" in BlogPost.objects(title=data["title"])._query) + self.assertEqual(BlogPost.objects(title=data["title"]).count(), 1) - self.assertIn('_id', BlogPost.objects(pk=post.id)._query) + self.assertIn("_id", BlogPost.objects(pk=post.id)._query) self.assertEqual(BlogPost.objects(pk=post.id).count(), 1) - self.assertIn('postComments.commentContent', BlogPost.objects(comments__content='test')._query) - self.assertEqual(BlogPost.objects(comments__content='test').count(), 1) + self.assertIn( + "postComments.commentContent", + BlogPost.objects(comments__content="test")._query, + ) + self.assertEqual(BlogPost.objects(comments__content="test").count(), 1) BlogPost.drop_collection() @@ -113,18 +127,19 @@ class TransformTest(unittest.TestCase): """Ensure that the correct "primary key" field name is used when querying """ + class BlogPost(Document): - title = StringField(primary_key=True, db_field='postTitle') + title = StringField(primary_key=True, db_field="postTitle") BlogPost.drop_collection() - data = {'title': 'Post 1'} + data = {"title": "Post 1"} post = BlogPost(**data) post.save() - self.assertIn('_id', BlogPost.objects(pk=data['title'])._query) - self.assertIn('_id', BlogPost.objects(title=data['title'])._query) - self.assertEqual(BlogPost.objects(pk=data['title']).count(), 1) + self.assertIn("_id", BlogPost.objects(pk=data["title"])._query) + self.assertIn("_id", BlogPost.objects(title=data["title"])._query) + self.assertEqual(BlogPost.objects(pk=data["title"]).count(), 1) BlogPost.drop_collection() @@ -156,78 +171,125 @@ class TransformTest(unittest.TestCase): """ Test raw plays nicely """ + class Foo(Document): name = StringField() a = StringField() b = StringField() c = StringField() - meta = { - 'allow_inheritance': False - } + meta = {"allow_inheritance": False} - query = Foo.objects(__raw__={'$nor': [{'name': 'bar'}]})._query - self.assertEqual(query, {'$nor': [{'name': 'bar'}]}) + query = Foo.objects(__raw__={"$nor": [{"name": "bar"}]})._query + self.assertEqual(query, {"$nor": [{"name": "bar"}]}) - q1 = {'$or': [{'a': 1}, {'b': 1}]} + q1 = {"$or": [{"a": 1}, {"b": 1}]} query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query - self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1}) + self.assertEqual(query, {"$or": [{"a": 1}, {"b": 1}], "c": 1}) def test_raw_and_merging(self): class Doc(Document): - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} - raw_query = Doc.objects(__raw__={ - 'deleted': False, - 'scraped': 'yes', - '$nor': [ - {'views.extracted': 'no'}, - {'attachments.views.extracted': 'no'} - ] - })._query + raw_query = Doc.objects( + __raw__={ + "deleted": False, + "scraped": "yes", + "$nor": [ + {"views.extracted": "no"}, + {"attachments.views.extracted": "no"}, + ], + } + )._query - self.assertEqual(raw_query, { - 'deleted': False, - 'scraped': 'yes', - '$nor': [ - {'views.extracted': 'no'}, - {'attachments.views.extracted': 'no'} - ] - }) + self.assertEqual( + raw_query, + { + "deleted": False, + "scraped": "yes", + "$nor": [ + {"views.extracted": "no"}, + {"attachments.views.extracted": "no"}, + ], + }, + ) def test_geojson_PointField(self): class Location(Document): loc = PointField() update = transform.update(Location, set__loc=[1, 2]) - self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}}) + self.assertEqual( + update, {"$set": {"loc": {"type": "Point", "coordinates": [1, 2]}}} + ) - update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1, 2]}) - self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}}) + update = transform.update( + Location, set__loc={"type": "Point", "coordinates": [1, 2]} + ) + self.assertEqual( + update, {"$set": {"loc": {"type": "Point", "coordinates": [1, 2]}}} + ) def test_geojson_LineStringField(self): class Location(Document): line = LineStringField() update = transform.update(Location, set__line=[[1, 2], [2, 2]]) - self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}}) + self.assertEqual( + update, + {"$set": {"line": {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}}, + ) - update = transform.update(Location, set__line={"type": "LineString", "coordinates": [[1, 2], [2, 2]]}) - self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}}) + update = transform.update( + Location, set__line={"type": "LineString", "coordinates": [[1, 2], [2, 2]]} + ) + self.assertEqual( + update, + {"$set": {"line": {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}}, + ) def test_geojson_PolygonField(self): class Location(Document): poly = PolygonField() - update = transform.update(Location, set__poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]) - self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}}) + update = transform.update( + Location, set__poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]] + ) + self.assertEqual( + update, + { + "$set": { + "poly": { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], + } + } + }, + ) - update = transform.update(Location, set__poly={"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) - self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}}) + update = transform.update( + Location, + set__poly={ + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], + }, + ) + self.assertEqual( + update, + { + "$set": { + "poly": { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], + } + } + }, + ) def test_type(self): class Doc(Document): df = DynamicField() + Doc(df=True).save() Doc(df=7).save() Doc(df="df").save() @@ -252,7 +314,7 @@ class TransformTest(unittest.TestCase): self.assertEqual(1, Doc.objects(item__type__="axe").count()) self.assertEqual(1, Doc.objects(item__name__="Heroic axe").count()) - Doc.objects(id=doc.id).update(set__item__type__='sword') + Doc.objects(id=doc.id).update(set__item__type__="sword") self.assertEqual(1, Doc.objects(item__type__="sword").count()) self.assertEqual(0, Doc.objects(item__type__="axe").count()) @@ -272,6 +334,7 @@ class TransformTest(unittest.TestCase): Test added to check pull operation in update for EmbeddedDocumentListField which is inside a EmbeddedDocumentField """ + class Word(EmbeddedDocument): word = StringField() index = IntField() @@ -284,18 +347,27 @@ class TransformTest(unittest.TestCase): title = StringField() content = EmbeddedDocumentField(SubDoc) - word = Word(word='abc', index=1) + word = Word(word="abc", index=1) update = transform.update(MainDoc, pull__content__text=word) - self.assertEqual(update, {'$pull': {'content.text': SON([('word', u'abc'), ('index', 1)])}}) + self.assertEqual( + update, {"$pull": {"content.text": SON([("word", u"abc"), ("index", 1)])}} + ) - update = transform.update(MainDoc, pull__content__heading='xyz') - self.assertEqual(update, {'$pull': {'content.heading': 'xyz'}}) + update = transform.update(MainDoc, pull__content__heading="xyz") + self.assertEqual(update, {"$pull": {"content.heading": "xyz"}}) - update = transform.update(MainDoc, pull__content__text__word__in=['foo', 'bar']) - self.assertEqual(update, {'$pull': {'content.text': {'word': {'$in': ['foo', 'bar']}}}}) + update = transform.update(MainDoc, pull__content__text__word__in=["foo", "bar"]) + self.assertEqual( + update, {"$pull": {"content.text": {"word": {"$in": ["foo", "bar"]}}}} + ) - update = transform.update(MainDoc, pull__content__text__word__nin=['foo', 'bar']) - self.assertEqual(update, {'$pull': {'content.text': {'word': {'$nin': ['foo', 'bar']}}}}) + update = transform.update( + MainDoc, pull__content__text__word__nin=["foo", "bar"] + ) + self.assertEqual( + update, {"$pull": {"content.text": {"word": {"$nin": ["foo", "bar"]}}}} + ) -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/tests/queryset/visitor.py b/tests/queryset/visitor.py index 22d274a8..0a22416f 100644 --- a/tests/queryset/visitor.py +++ b/tests/queryset/visitor.py @@ -12,14 +12,13 @@ __all__ = ("QTest",) class QTest(unittest.TestCase): - def setUp(self): - connect(db='mongoenginetest') + connect(db="mongoenginetest") class Person(Document): name = StringField() age = IntField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} Person.drop_collection() self.Person = Person @@ -30,22 +29,22 @@ class QTest(unittest.TestCase): q1 = Q() q2 = Q(age__gte=18) q3 = Q() - q4 = Q(name='test') + q4 = Q(name="test") q5 = Q() class Person(Document): name = StringField() age = IntField() - query = {'$or': [{'age': {'$gte': 18}}, {'name': 'test'}]} + query = {"$or": [{"age": {"$gte": 18}}, {"name": "test"}]} self.assertEqual((q1 | q2 | q3 | q4 | q5).to_query(Person), query) - query = {'age': {'$gte': 18}, 'name': 'test'} + query = {"age": {"$gte": 18}, "name": "test"} self.assertEqual((q1 & q2 & q3 & q4 & q5).to_query(Person), query) def test_q_with_dbref(self): """Ensure Q objects handle DBRefs correctly""" - connect(db='mongoenginetest') + connect(db="mongoenginetest") class User(Document): pass @@ -62,15 +61,18 @@ class QTest(unittest.TestCase): def test_and_combination(self): """Ensure that Q-objects correctly AND together. """ + class TestDoc(Document): x = IntField() y = StringField() query = (Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc) - self.assertEqual(query, {'$and': [{'x': {'$lt': 7}}, {'x': {'$lt': 3}}]}) + self.assertEqual(query, {"$and": [{"x": {"$lt": 7}}, {"x": {"$lt": 3}}]}) query = (Q(y="a") & Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc) - self.assertEqual(query, {'$and': [{'y': "a"}, {'x': {'$lt': 7}}, {'x': {'$lt': 3}}]}) + self.assertEqual( + query, {"$and": [{"y": "a"}, {"x": {"$lt": 7}}, {"x": {"$lt": 3}}]} + ) # Check normal cases work without an error query = Q(x__lt=7) & Q(x__gt=3) @@ -78,69 +80,74 @@ class QTest(unittest.TestCase): q1 = Q(x__lt=7) q2 = Q(x__gt=3) query = (q1 & q2).to_query(TestDoc) - self.assertEqual(query, {'x': {'$lt': 7, '$gt': 3}}) + self.assertEqual(query, {"x": {"$lt": 7, "$gt": 3}}) # More complex nested example - query = Q(x__lt=100) & Q(y__ne='NotMyString') - query &= Q(y__in=['a', 'b', 'c']) & Q(x__gt=-100) + query = Q(x__lt=100) & Q(y__ne="NotMyString") + query &= Q(y__in=["a", "b", "c"]) & Q(x__gt=-100) mongo_query = { - 'x': {'$lt': 100, '$gt': -100}, - 'y': {'$ne': 'NotMyString', '$in': ['a', 'b', 'c']}, + "x": {"$lt": 100, "$gt": -100}, + "y": {"$ne": "NotMyString", "$in": ["a", "b", "c"]}, } self.assertEqual(query.to_query(TestDoc), mongo_query) def test_or_combination(self): """Ensure that Q-objects correctly OR together. """ + class TestDoc(Document): x = IntField() q1 = Q(x__lt=3) q2 = Q(x__gt=7) query = (q1 | q2).to_query(TestDoc) - self.assertEqual(query, { - '$or': [ - {'x': {'$lt': 3}}, - {'x': {'$gt': 7}}, - ] - }) + self.assertEqual(query, {"$or": [{"x": {"$lt": 3}}, {"x": {"$gt": 7}}]}) def test_and_or_combination(self): """Ensure that Q-objects handle ANDing ORed components. """ + class TestDoc(Document): x = IntField() y = BooleanField() TestDoc.drop_collection() - query = (Q(x__gt=0) | Q(x__exists=False)) + query = Q(x__gt=0) | Q(x__exists=False) query &= Q(x__lt=100) - self.assertEqual(query.to_query(TestDoc), {'$and': [ - {'$or': [{'x': {'$gt': 0}}, - {'x': {'$exists': False}}]}, - {'x': {'$lt': 100}}] - }) + self.assertEqual( + query.to_query(TestDoc), + { + "$and": [ + {"$or": [{"x": {"$gt": 0}}, {"x": {"$exists": False}}]}, + {"x": {"$lt": 100}}, + ] + }, + ) - q1 = (Q(x__gt=0) | Q(x__exists=False)) - q2 = (Q(x__lt=100) | Q(y=True)) + q1 = Q(x__gt=0) | Q(x__exists=False) + q2 = Q(x__lt=100) | Q(y=True) query = (q1 & q2).to_query(TestDoc) TestDoc(x=101).save() TestDoc(x=10).save() TestDoc(y=True).save() - self.assertEqual(query, { - '$and': [ - {'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]}, - {'$or': [{'x': {'$lt': 100}}, {'y': True}]} - ] - }) + self.assertEqual( + query, + { + "$and": [ + {"$or": [{"x": {"$gt": 0}}, {"x": {"$exists": False}}]}, + {"$or": [{"x": {"$lt": 100}}, {"y": True}]}, + ] + }, + ) self.assertEqual(2, TestDoc.objects(q1 & q2).count()) def test_or_and_or_combination(self): """Ensure that Q-objects handle ORing ANDed ORed components. :) """ + class TestDoc(Document): x = IntField() y = BooleanField() @@ -151,18 +158,29 @@ class QTest(unittest.TestCase): TestDoc(x=99, y=False).save() TestDoc(x=101, y=False).save() - q1 = (Q(x__gt=0) & (Q(y=True) | Q(y__exists=False))) - q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False))) + q1 = Q(x__gt=0) & (Q(y=True) | Q(y__exists=False)) + q2 = Q(x__lt=100) & (Q(y=False) | Q(y__exists=False)) query = (q1 | q2).to_query(TestDoc) - self.assertEqual(query, { - '$or': [ - {'$and': [{'x': {'$gt': 0}}, - {'$or': [{'y': True}, {'y': {'$exists': False}}]}]}, - {'$and': [{'x': {'$lt': 100}}, - {'$or': [{'y': False}, {'y': {'$exists': False}}]}]} - ] - }) + self.assertEqual( + query, + { + "$or": [ + { + "$and": [ + {"x": {"$gt": 0}}, + {"$or": [{"y": True}, {"y": {"$exists": False}}]}, + ] + }, + { + "$and": [ + {"x": {"$lt": 100}}, + {"$or": [{"y": False}, {"y": {"$exists": False}}]}, + ] + }, + ] + }, + ) self.assertEqual(2, TestDoc.objects(q1 | q2).count()) def test_multiple_occurence_in_field(self): @@ -170,8 +188,8 @@ class QTest(unittest.TestCase): name = StringField(max_length=40) title = StringField(max_length=40) - q1 = Q(name__contains='te') | Q(title__contains='te') - q2 = Q(name__contains='12') | Q(title__contains='12') + q1 = Q(name__contains="te") | Q(title__contains="te") + q2 = Q(name__contains="12") | Q(title__contains="12") q3 = q1 & q2 @@ -180,7 +198,6 @@ class QTest(unittest.TestCase): self.assertEqual(query["$and"][1], q2.to_query(Test)) def test_q_clone(self): - class TestDoc(Document): x = IntField() @@ -205,6 +222,7 @@ class QTest(unittest.TestCase): def test_q(self): """Ensure that Q objects may be used to query for documents. """ + class BlogPost(Document): title = StringField() publish_date = DateTimeField() @@ -212,22 +230,26 @@ class QTest(unittest.TestCase): BlogPost.drop_collection() - post1 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 8), published=False) + post1 = BlogPost( + title="Test 1", publish_date=datetime.datetime(2010, 1, 8), published=False + ) post1.save() - post2 = BlogPost(title='Test 2', publish_date=datetime.datetime(2010, 1, 15), published=True) + post2 = BlogPost( + title="Test 2", publish_date=datetime.datetime(2010, 1, 15), published=True + ) post2.save() - post3 = BlogPost(title='Test 3', published=True) + post3 = BlogPost(title="Test 3", published=True) post3.save() - post4 = BlogPost(title='Test 4', publish_date=datetime.datetime(2010, 1, 8)) + post4 = BlogPost(title="Test 4", publish_date=datetime.datetime(2010, 1, 8)) post4.save() - post5 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 15)) + post5 = BlogPost(title="Test 1", publish_date=datetime.datetime(2010, 1, 15)) post5.save() - post6 = BlogPost(title='Test 1', published=False) + post6 = BlogPost(title="Test 1", published=False) post6.save() # Check ObjectId lookup works @@ -235,13 +257,13 @@ class QTest(unittest.TestCase): self.assertEqual(obj, post1) # Check Q object combination with one does not exist - q = BlogPost.objects(Q(title='Test 5') | Q(published=True)) + q = BlogPost.objects(Q(title="Test 5") | Q(published=True)) posts = [post.id for post in q] published_posts = (post2, post3) self.assertTrue(all(obj.id in posts for obj in published_posts)) - q = BlogPost.objects(Q(title='Test 1') | Q(published=True)) + q = BlogPost.objects(Q(title="Test 1") | Q(published=True)) posts = [post.id for post in q] published_posts = (post1, post2, post3, post5, post6) self.assertTrue(all(obj.id in posts for obj in published_posts)) @@ -259,85 +281,91 @@ class QTest(unittest.TestCase): BlogPost.drop_collection() # Check the 'in' operator - self.Person(name='user1', age=20).save() - self.Person(name='user2', age=20).save() - self.Person(name='user3', age=30).save() - self.Person(name='user4', age=40).save() + self.Person(name="user1", age=20).save() + self.Person(name="user2", age=20).save() + self.Person(name="user3", age=30).save() + self.Person(name="user4", age=40).save() self.assertEqual(self.Person.objects(Q(age__in=[20])).count(), 2) self.assertEqual(self.Person.objects(Q(age__in=[20, 30])).count(), 3) # Test invalid query objs with self.assertRaises(InvalidQueryError): - self.Person.objects('user1') + self.Person.objects("user1") # filter should fail, too with self.assertRaises(InvalidQueryError): - self.Person.objects.filter('user1') + self.Person.objects.filter("user1") def test_q_regex(self): """Ensure that Q objects can be queried using regexes. """ - person = self.Person(name='Guido van Rossum') + person = self.Person(name="Guido van Rossum") person.save() - obj = self.Person.objects(Q(name=re.compile('^Gui'))).first() + obj = self.Person.objects(Q(name=re.compile("^Gui"))).first() self.assertEqual(obj, person) - obj = self.Person.objects(Q(name=re.compile('^gui'))).first() + obj = self.Person.objects(Q(name=re.compile("^gui"))).first() self.assertEqual(obj, None) - obj = self.Person.objects(Q(name=re.compile('^gui', re.I))).first() + obj = self.Person.objects(Q(name=re.compile("^gui", re.I))).first() self.assertEqual(obj, person) - obj = self.Person.objects(Q(name__not=re.compile('^bob'))).first() + obj = self.Person.objects(Q(name__not=re.compile("^bob"))).first() self.assertEqual(obj, person) - obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first() + obj = self.Person.objects(Q(name__not=re.compile("^Gui"))).first() self.assertEqual(obj, None) def test_q_repr(self): - self.assertEqual(repr(Q()), 'Q(**{})') - self.assertEqual(repr(Q(name='test')), "Q(**{'name': 'test'})") + self.assertEqual(repr(Q()), "Q(**{})") + self.assertEqual(repr(Q(name="test")), "Q(**{'name': 'test'})") self.assertEqual( - repr(Q(name='test') & Q(age__gte=18)), - "(Q(**{'name': 'test'}) & Q(**{'age__gte': 18}))") + repr(Q(name="test") & Q(age__gte=18)), + "(Q(**{'name': 'test'}) & Q(**{'age__gte': 18}))", + ) self.assertEqual( - repr(Q(name='test') | Q(age__gte=18)), - "(Q(**{'name': 'test'}) | Q(**{'age__gte': 18}))") + repr(Q(name="test") | Q(age__gte=18)), + "(Q(**{'name': 'test'}) | Q(**{'age__gte': 18}))", + ) def test_q_lists(self): """Ensure that Q objects query ListFields correctly. """ + class BlogPost(Document): tags = ListField(StringField()) BlogPost.drop_collection() - BlogPost(tags=['python', 'mongo']).save() - BlogPost(tags=['python']).save() + BlogPost(tags=["python", "mongo"]).save() + BlogPost(tags=["python"]).save() - self.assertEqual(BlogPost.objects(Q(tags='mongo')).count(), 1) - self.assertEqual(BlogPost.objects(Q(tags='python')).count(), 2) + self.assertEqual(BlogPost.objects(Q(tags="mongo")).count(), 1) + self.assertEqual(BlogPost.objects(Q(tags="python")).count(), 2) BlogPost.drop_collection() def test_q_merge_queries_edge_case(self): - class User(Document): email = EmailField(required=False) name = StringField() User.drop_collection() pk = ObjectId() - User(email='example@example.com', pk=pk).save() + User(email="example@example.com", pk=pk).save() - self.assertEqual(1, User.objects.filter(Q(email='example@example.com') | - Q(name='John Doe')).limit(2).filter(pk=pk).count()) + self.assertEqual( + 1, + User.objects.filter(Q(email="example@example.com") | Q(name="John Doe")) + .limit(2) + .filter(pk=pk) + .count(), + ) def test_chained_q_or_filtering(self): - class Post(EmbeddedDocument): name = StringField(required=True) @@ -350,9 +378,16 @@ class QTest(unittest.TestCase): Item(postables=[Post(name="a"), Post(name="c")]).save() Item(postables=[Post(name="a"), Post(name="b"), Post(name="c")]).save() - self.assertEqual(Item.objects(Q(postables__name="a") & Q(postables__name="b")).count(), 2) - self.assertEqual(Item.objects.filter(postables__name="a").filter(postables__name="b").count(), 2) + self.assertEqual( + Item.objects(Q(postables__name="a") & Q(postables__name="b")).count(), 2 + ) + self.assertEqual( + Item.objects.filter(postables__name="a") + .filter(postables__name="b") + .count(), + 2, + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_common.py b/tests/test_common.py index 04ad5b34..5d702668 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -5,7 +5,6 @@ from mongoengine import Document class TestCommon(unittest.TestCase): - def test__import_class(self): doc_cls = _import_class("Document") self.assertIs(doc_cls, Document) diff --git a/tests/test_connection.py b/tests/test_connection.py index d3fcc395..25007132 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -14,12 +14,21 @@ import pymongo from bson.tz_util import utc from mongoengine import ( - connect, register_connection, - Document, DateTimeField, - disconnect_all, StringField) + connect, + register_connection, + Document, + DateTimeField, + disconnect_all, + StringField, +) import mongoengine.connection -from mongoengine.connection import (MongoEngineConnectionError, get_db, - get_connection, disconnect, DEFAULT_DATABASE_NAME) +from mongoengine.connection import ( + MongoEngineConnectionError, + get_db, + get_connection, + disconnect, + DEFAULT_DATABASE_NAME, +) def get_tz_awareness(connection): @@ -27,7 +36,6 @@ def get_tz_awareness(connection): class ConnectionTest(unittest.TestCase): - @classmethod def setUpClass(cls): disconnect_all() @@ -43,44 +51,46 @@ class ConnectionTest(unittest.TestCase): def test_connect(self): """Ensure that the connect() method works properly.""" - connect('mongoenginetest') + connect("mongoenginetest") conn = get_connection() self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) db = get_db() self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'mongoenginetest') + self.assertEqual(db.name, "mongoenginetest") - connect('mongoenginetest2', alias='testdb') - conn = get_connection('testdb') + connect("mongoenginetest2", alias="testdb") + conn = get_connection("testdb") self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) def test_connect_disconnect_works_properly(self): class History1(Document): name = StringField() - meta = {'db_alias': 'db1'} + meta = {"db_alias": "db1"} class History2(Document): name = StringField() - meta = {'db_alias': 'db2'} + meta = {"db_alias": "db2"} - connect('db1', alias='db1') - connect('db2', alias='db2') + connect("db1", alias="db1") + connect("db2", alias="db2") History1.drop_collection() History2.drop_collection() - h = History1(name='default').save() - h1 = History2(name='db1').save() + h = History1(name="default").save() + h1 = History2(name="db1").save() - self.assertEqual(list(History1.objects().as_pymongo()), - [{'_id': h.id, 'name': 'default'}]) - self.assertEqual(list(History2.objects().as_pymongo()), - [{'_id': h1.id, 'name': 'db1'}]) + self.assertEqual( + list(History1.objects().as_pymongo()), [{"_id": h.id, "name": "default"}] + ) + self.assertEqual( + list(History2.objects().as_pymongo()), [{"_id": h1.id, "name": "db1"}] + ) - disconnect('db1') - disconnect('db2') + disconnect("db1") + disconnect("db2") with self.assertRaises(MongoEngineConnectionError): list(History1.objects().as_pymongo()) @@ -88,13 +98,15 @@ class ConnectionTest(unittest.TestCase): with self.assertRaises(MongoEngineConnectionError): list(History2.objects().as_pymongo()) - connect('db1', alias='db1') - connect('db2', alias='db2') + connect("db1", alias="db1") + connect("db2", alias="db2") - self.assertEqual(list(History1.objects().as_pymongo()), - [{'_id': h.id, 'name': 'default'}]) - self.assertEqual(list(History2.objects().as_pymongo()), - [{'_id': h1.id, 'name': 'db1'}]) + self.assertEqual( + list(History1.objects().as_pymongo()), [{"_id": h.id, "name": "default"}] + ) + self.assertEqual( + list(History2.objects().as_pymongo()), [{"_id": h1.id, "name": "db1"}] + ) def test_connect_different_documents_to_different_database(self): class History(Document): @@ -102,99 +114,110 @@ class ConnectionTest(unittest.TestCase): class History1(Document): name = StringField() - meta = {'db_alias': 'db1'} + meta = {"db_alias": "db1"} class History2(Document): name = StringField() - meta = {'db_alias': 'db2'} + meta = {"db_alias": "db2"} connect() - connect('db1', alias='db1') - connect('db2', alias='db2') + connect("db1", alias="db1") + connect("db2", alias="db2") History.drop_collection() History1.drop_collection() History2.drop_collection() - h = History(name='default').save() - h1 = History1(name='db1').save() - h2 = History2(name='db2').save() + h = History(name="default").save() + h1 = History1(name="db1").save() + h2 = History2(name="db2").save() self.assertEqual(History._collection.database.name, DEFAULT_DATABASE_NAME) - self.assertEqual(History1._collection.database.name, 'db1') - self.assertEqual(History2._collection.database.name, 'db2') + self.assertEqual(History1._collection.database.name, "db1") + self.assertEqual(History2._collection.database.name, "db2") - self.assertEqual(list(History.objects().as_pymongo()), - [{'_id': h.id, 'name': 'default'}]) - self.assertEqual(list(History1.objects().as_pymongo()), - [{'_id': h1.id, 'name': 'db1'}]) - self.assertEqual(list(History2.objects().as_pymongo()), - [{'_id': h2.id, 'name': 'db2'}]) + self.assertEqual( + list(History.objects().as_pymongo()), [{"_id": h.id, "name": "default"}] + ) + self.assertEqual( + list(History1.objects().as_pymongo()), [{"_id": h1.id, "name": "db1"}] + ) + self.assertEqual( + list(History2.objects().as_pymongo()), [{"_id": h2.id, "name": "db2"}] + ) def test_connect_fails_if_connect_2_times_with_default_alias(self): - connect('mongoenginetest') + connect("mongoenginetest") with self.assertRaises(MongoEngineConnectionError) as ctx_err: - connect('mongoenginetest2') - self.assertEqual("A different connection with alias `default` was already registered. Use disconnect() first", str(ctx_err.exception)) + connect("mongoenginetest2") + self.assertEqual( + "A different connection with alias `default` was already registered. Use disconnect() first", + str(ctx_err.exception), + ) def test_connect_fails_if_connect_2_times_with_custom_alias(self): - connect('mongoenginetest', alias='alias1') + connect("mongoenginetest", alias="alias1") with self.assertRaises(MongoEngineConnectionError) as ctx_err: - connect('mongoenginetest2', alias='alias1') + connect("mongoenginetest2", alias="alias1") - self.assertEqual("A different connection with alias `alias1` was already registered. Use disconnect() first", str(ctx_err.exception)) + self.assertEqual( + "A different connection with alias `alias1` was already registered. Use disconnect() first", + str(ctx_err.exception), + ) - def test_connect_fails_if_similar_connection_settings_arent_defined_the_same_way(self): + def test_connect_fails_if_similar_connection_settings_arent_defined_the_same_way( + self + ): """Intended to keep the detecton function simple but robust""" - db_name = 'mongoenginetest' - db_alias = 'alias1' - connect(db=db_name, alias=db_alias, host='localhost', port=27017) + db_name = "mongoenginetest" + db_alias = "alias1" + connect(db=db_name, alias=db_alias, host="localhost", port=27017) with self.assertRaises(MongoEngineConnectionError): - connect(host='mongodb://localhost:27017/%s' % db_name, alias=db_alias) + connect(host="mongodb://localhost:27017/%s" % db_name, alias=db_alias) def test_connect_passes_silently_connect_multiple_times_with_same_config(self): # test default connection to `test` connect() connect() self.assertEqual(len(mongoengine.connection._connections), 1) - connect('test01', alias='test01') - connect('test01', alias='test01') + connect("test01", alias="test01") + connect("test01", alias="test01") self.assertEqual(len(mongoengine.connection._connections), 2) - connect(host='mongodb://localhost:27017/mongoenginetest02', alias='test02') - connect(host='mongodb://localhost:27017/mongoenginetest02', alias='test02') + connect(host="mongodb://localhost:27017/mongoenginetest02", alias="test02") + connect(host="mongodb://localhost:27017/mongoenginetest02", alias="test02") self.assertEqual(len(mongoengine.connection._connections), 3) def test_connect_with_invalid_db_name(self): """Ensure that connect() method fails fast if db name is invalid """ with self.assertRaises(InvalidName): - connect('mongomock://localhost') + connect("mongomock://localhost") def test_connect_with_db_name_external(self): """Ensure that connect() works if db name is $external """ """Ensure that the connect() method works properly.""" - connect('$external') + connect("$external") conn = get_connection() self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) db = get_db() self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, '$external') + self.assertEqual(db.name, "$external") - connect('$external', alias='testdb') - conn = get_connection('testdb') + connect("$external", alias="testdb") + conn = get_connection("testdb") self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) def test_connect_with_invalid_db_name_type(self): """Ensure that connect() method fails fast if db name has invalid type """ with self.assertRaises(TypeError): - non_string_db_name = ['e. g. list instead of a string'] + non_string_db_name = ["e. g. list instead of a string"] connect(non_string_db_name) def test_connect_in_mocking(self): @@ -203,34 +226,47 @@ class ConnectionTest(unittest.TestCase): try: import mongomock except ImportError: - raise SkipTest('you need mongomock installed to run this testcase') + raise SkipTest("you need mongomock installed to run this testcase") - connect('mongoenginetest', host='mongomock://localhost') + connect("mongoenginetest", host="mongomock://localhost") conn = get_connection() self.assertIsInstance(conn, mongomock.MongoClient) - connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2') - conn = get_connection('testdb2') + connect("mongoenginetest2", host="mongomock://localhost", alias="testdb2") + conn = get_connection("testdb2") self.assertIsInstance(conn, mongomock.MongoClient) - connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3') - conn = get_connection('testdb3') + connect( + "mongoenginetest3", + host="mongodb://localhost", + is_mock=True, + alias="testdb3", + ) + conn = get_connection("testdb3") self.assertIsInstance(conn, mongomock.MongoClient) - connect('mongoenginetest4', is_mock=True, alias='testdb4') - conn = get_connection('testdb4') + connect("mongoenginetest4", is_mock=True, alias="testdb4") + conn = get_connection("testdb4") self.assertIsInstance(conn, mongomock.MongoClient) - connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5') - conn = get_connection('testdb5') + connect( + host="mongodb://localhost:27017/mongoenginetest5", + is_mock=True, + alias="testdb5", + ) + conn = get_connection("testdb5") self.assertIsInstance(conn, mongomock.MongoClient) - connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6') - conn = get_connection('testdb6') + connect(host="mongomock://localhost:27017/mongoenginetest6", alias="testdb6") + conn = get_connection("testdb6") self.assertIsInstance(conn, mongomock.MongoClient) - connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7') - conn = get_connection('testdb7') + connect( + host="mongomock://localhost:27017/mongoenginetest7", + is_mock=True, + alias="testdb7", + ) + conn = get_connection("testdb7") self.assertIsInstance(conn, mongomock.MongoClient) def test_connect_with_host_list(self): @@ -241,30 +277,39 @@ class ConnectionTest(unittest.TestCase): try: import mongomock except ImportError: - raise SkipTest('you need mongomock installed to run this testcase') + raise SkipTest("you need mongomock installed to run this testcase") - connect(host=['mongomock://localhost']) + connect(host=["mongomock://localhost"]) conn = get_connection() self.assertIsInstance(conn, mongomock.MongoClient) - connect(host=['mongodb://localhost'], is_mock=True, alias='testdb2') - conn = get_connection('testdb2') + connect(host=["mongodb://localhost"], is_mock=True, alias="testdb2") + conn = get_connection("testdb2") self.assertIsInstance(conn, mongomock.MongoClient) - connect(host=['localhost'], is_mock=True, alias='testdb3') - conn = get_connection('testdb3') + connect(host=["localhost"], is_mock=True, alias="testdb3") + conn = get_connection("testdb3") self.assertIsInstance(conn, mongomock.MongoClient) - connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4') - conn = get_connection('testdb4') + connect( + host=["mongomock://localhost:27017", "mongomock://localhost:27018"], + alias="testdb4", + ) + conn = get_connection("testdb4") self.assertIsInstance(conn, mongomock.MongoClient) - connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True, alias='testdb5') - conn = get_connection('testdb5') + connect( + host=["mongodb://localhost:27017", "mongodb://localhost:27018"], + is_mock=True, + alias="testdb5", + ) + conn = get_connection("testdb5") self.assertIsInstance(conn, mongomock.MongoClient) - connect(host=['localhost:27017', 'localhost:27018'], is_mock=True, alias='testdb6') - conn = get_connection('testdb6') + connect( + host=["localhost:27017", "localhost:27018"], is_mock=True, alias="testdb6" + ) + conn = get_connection("testdb6") self.assertIsInstance(conn, mongomock.MongoClient) def test_disconnect_cleans_globals(self): @@ -273,7 +318,7 @@ class ConnectionTest(unittest.TestCase): dbs = mongoengine.connection._dbs connection_settings = mongoengine.connection._connection_settings - connect('mongoenginetest') + connect("mongoenginetest") self.assertEqual(len(connections), 1) self.assertEqual(len(dbs), 0) @@ -292,7 +337,7 @@ class ConnectionTest(unittest.TestCase): def test_disconnect_cleans_cached_collection_attribute_in_document(self): """Ensure that the disconnect() method works properly""" - conn1 = connect('mongoenginetest') + conn1 = connect("mongoenginetest") class History(Document): pass @@ -301,7 +346,7 @@ class ConnectionTest(unittest.TestCase): History.drop_collection() - History.objects.first() # will trigger the caching of _collection attribute + History.objects.first() # will trigger the caching of _collection attribute self.assertIsNotNone(History._collection) disconnect() @@ -310,15 +355,17 @@ class ConnectionTest(unittest.TestCase): with self.assertRaises(MongoEngineConnectionError) as ctx_err: History.objects.first() - self.assertEqual("You have not defined a default connection", str(ctx_err.exception)) + self.assertEqual( + "You have not defined a default connection", str(ctx_err.exception) + ) def test_connect_disconnect_works_on_same_document(self): """Ensure that the connect/disconnect works properly with a single Document""" - db1 = 'db1' - db2 = 'db2' + db1 = "db1" + db2 = "db2" # Ensure freshness of the 2 databases through pymongo - client = MongoClient('localhost', 27017) + client = MongoClient("localhost", 27017) client.drop_database(db1) client.drop_database(db2) @@ -328,44 +375,44 @@ class ConnectionTest(unittest.TestCase): class User(Document): name = StringField(required=True) - user1 = User(name='John is in db1').save() + user1 = User(name="John is in db1").save() disconnect() # Make sure save doesnt work at this stage with self.assertRaises(MongoEngineConnectionError): - User(name='Wont work').save() + User(name="Wont work").save() # Save in db2 connect(db2) - user2 = User(name='Bob is in db2').save() + user2 = User(name="Bob is in db2").save() disconnect() db1_users = list(client[db1].user.find()) - self.assertEqual(db1_users, [{'_id': user1.id, 'name': 'John is in db1'}]) + self.assertEqual(db1_users, [{"_id": user1.id, "name": "John is in db1"}]) db2_users = list(client[db2].user.find()) - self.assertEqual(db2_users, [{'_id': user2.id, 'name': 'Bob is in db2'}]) + self.assertEqual(db2_users, [{"_id": user2.id, "name": "Bob is in db2"}]) def test_disconnect_silently_pass_if_alias_does_not_exist(self): connections = mongoengine.connection._connections self.assertEqual(len(connections), 0) - disconnect(alias='not_exist') + disconnect(alias="not_exist") def test_disconnect_all(self): connections = mongoengine.connection._connections dbs = mongoengine.connection._dbs connection_settings = mongoengine.connection._connection_settings - connect('mongoenginetest') - connect('mongoenginetest2', alias='db1') + connect("mongoenginetest") + connect("mongoenginetest2", alias="db1") class History(Document): pass class History1(Document): name = StringField() - meta = {'db_alias': 'db1'} + meta = {"db_alias": "db1"} - History.drop_collection() # will trigger the caching of _collection attribute + History.drop_collection() # will trigger the caching of _collection attribute History.objects.first() History1.drop_collection() History1.objects.first() @@ -398,11 +445,11 @@ class ConnectionTest(unittest.TestCase): def test_sharing_connections(self): """Ensure that connections are shared when the connection settings are exactly the same """ - connect('mongoenginetests', alias='testdb1') - expected_connection = get_connection('testdb1') + connect("mongoenginetests", alias="testdb1") + expected_connection = get_connection("testdb1") - connect('mongoenginetests', alias='testdb2') - actual_connection = get_connection('testdb2') + connect("mongoenginetests", alias="testdb2") + actual_connection = get_connection("testdb2") expected_connection.server_info() @@ -410,7 +457,7 @@ class ConnectionTest(unittest.TestCase): def test_connect_uri(self): """Ensure that the connect() method works properly with URIs.""" - c = connect(db='mongoenginetest', alias='admin') + c = connect(db="mongoenginetest", alias="admin") c.admin.system.users.delete_many({}) c.mongoenginetest.system.users.delete_many({}) @@ -418,14 +465,16 @@ class ConnectionTest(unittest.TestCase): c.admin.authenticate("admin", "password") c.admin.command("createUser", "username", pwd="password", roles=["dbOwner"]) - connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') + connect( + "testdb_uri", host="mongodb://username:password@localhost/mongoenginetest" + ) conn = get_connection() self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) db = get_db() self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'mongoenginetest') + self.assertEqual(db.name, "mongoenginetest") c.admin.system.users.delete_many({}) c.mongoenginetest.system.users.delete_many({}) @@ -434,35 +483,35 @@ class ConnectionTest(unittest.TestCase): """Ensure connect() method works properly if the URI doesn't include a database name. """ - connect("mongoenginetest", host='mongodb://localhost/') + connect("mongoenginetest", host="mongodb://localhost/") conn = get_connection() self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) db = get_db() self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'mongoenginetest') + self.assertEqual(db.name, "mongoenginetest") def test_connect_uri_default_db(self): """Ensure connect() defaults to the right database name if the URI and the database_name don't explicitly specify it. """ - connect(host='mongodb://localhost/') + connect(host="mongodb://localhost/") conn = get_connection() self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) db = get_db() self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'test') + self.assertEqual(db.name, "test") def test_uri_without_credentials_doesnt_override_conn_settings(self): """Ensure connect() uses the username & password params if the URI doesn't explicitly specify them. """ - c = connect(host='mongodb://localhost/mongoenginetest', - username='user', - password='pass') + c = connect( + host="mongodb://localhost/mongoenginetest", username="user", password="pass" + ) # OperationFailure means that mongoengine attempted authentication # w/ the provided username/password and failed - that's the desired @@ -474,27 +523,31 @@ class ConnectionTest(unittest.TestCase): option in the URI. """ # Create users - c = connect('mongoenginetest') + c = connect("mongoenginetest") c.admin.system.users.delete_many({}) c.admin.command("createUser", "username2", pwd="password", roles=["dbOwner"]) # Authentication fails without "authSource" test_conn = connect( - 'mongoenginetest', alias='test1', - host='mongodb://username2:password@localhost/mongoenginetest' + "mongoenginetest", + alias="test1", + host="mongodb://username2:password@localhost/mongoenginetest", ) self.assertRaises(OperationFailure, test_conn.server_info) # Authentication succeeds with "authSource" authd_conn = connect( - 'mongoenginetest', alias='test2', - host=('mongodb://username2:password@localhost/' - 'mongoenginetest?authSource=admin') + "mongoenginetest", + alias="test2", + host=( + "mongodb://username2:password@localhost/" + "mongoenginetest?authSource=admin" + ), ) - db = get_db('test2') + db = get_db("test2") self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'mongoenginetest') + self.assertEqual(db.name, "mongoenginetest") # Clear all users authd_conn.admin.system.users.delete_many({}) @@ -502,82 +555,86 @@ class ConnectionTest(unittest.TestCase): def test_register_connection(self): """Ensure that connections with different aliases may be registered. """ - register_connection('testdb', 'mongoenginetest2') + register_connection("testdb", "mongoenginetest2") self.assertRaises(MongoEngineConnectionError, get_connection) - conn = get_connection('testdb') + conn = get_connection("testdb") self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) - db = get_db('testdb') + db = get_db("testdb") self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'mongoenginetest2') + self.assertEqual(db.name, "mongoenginetest2") def test_register_connection_defaults(self): """Ensure that defaults are used when the host and port are None. """ - register_connection('testdb', 'mongoenginetest', host=None, port=None) + register_connection("testdb", "mongoenginetest", host=None, port=None) - conn = get_connection('testdb') + conn = get_connection("testdb") self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) def test_connection_kwargs(self): """Ensure that connection kwargs get passed to pymongo.""" - connect('mongoenginetest', alias='t1', tz_aware=True) - conn = get_connection('t1') + connect("mongoenginetest", alias="t1", tz_aware=True) + conn = get_connection("t1") self.assertTrue(get_tz_awareness(conn)) - connect('mongoenginetest2', alias='t2') - conn = get_connection('t2') + connect("mongoenginetest2", alias="t2") + conn = get_connection("t2") self.assertFalse(get_tz_awareness(conn)) def test_connection_pool_via_kwarg(self): """Ensure we can specify a max connection pool size using a connection kwarg. """ - pool_size_kwargs = {'maxpoolsize': 100} + pool_size_kwargs = {"maxpoolsize": 100} - conn = connect('mongoenginetest', alias='max_pool_size_via_kwarg', **pool_size_kwargs) + conn = connect( + "mongoenginetest", alias="max_pool_size_via_kwarg", **pool_size_kwargs + ) self.assertEqual(conn.max_pool_size, 100) def test_connection_pool_via_uri(self): """Ensure we can specify a max connection pool size using an option in a connection URI. """ - conn = connect(host='mongodb://localhost/test?maxpoolsize=100', alias='max_pool_size_via_uri') + conn = connect( + host="mongodb://localhost/test?maxpoolsize=100", + alias="max_pool_size_via_uri", + ) self.assertEqual(conn.max_pool_size, 100) def test_write_concern(self): """Ensure write concern can be specified in connect() via a kwarg or as part of the connection URI. """ - conn1 = connect(alias='conn1', host='mongodb://localhost/testing?w=1&j=true') - conn2 = connect('testing', alias='conn2', w=1, j=True) - self.assertEqual(conn1.write_concern.document, {'w': 1, 'j': True}) - self.assertEqual(conn2.write_concern.document, {'w': 1, 'j': True}) + conn1 = connect(alias="conn1", host="mongodb://localhost/testing?w=1&j=true") + conn2 = connect("testing", alias="conn2", w=1, j=True) + self.assertEqual(conn1.write_concern.document, {"w": 1, "j": True}) + self.assertEqual(conn2.write_concern.document, {"w": 1, "j": True}) def test_connect_with_replicaset_via_uri(self): """Ensure connect() works when specifying a replicaSet via the MongoDB URI. """ - c = connect(host='mongodb://localhost/test?replicaSet=local-rs') + c = connect(host="mongodb://localhost/test?replicaSet=local-rs") db = get_db() self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'test') + self.assertEqual(db.name, "test") def test_connect_with_replicaset_via_kwargs(self): """Ensure connect() works when specifying a replicaSet via the connection kwargs """ - c = connect(replicaset='local-rs') - self.assertEqual(c._MongoClient__options.replica_set_name, - 'local-rs') + c = connect(replicaset="local-rs") + self.assertEqual(c._MongoClient__options.replica_set_name, "local-rs") db = get_db() self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'test') + self.assertEqual(db.name, "test") def test_connect_tz_aware(self): - connect('mongoenginetest', tz_aware=True) + connect("mongoenginetest", tz_aware=True) d = datetime.datetime(2010, 5, 5, tzinfo=utc) class DateDoc(Document): @@ -590,37 +647,39 @@ class ConnectionTest(unittest.TestCase): self.assertEqual(d, date_doc.the_date) def test_read_preference_from_parse(self): - conn = connect(host="mongodb://a1.vpc,a2.vpc,a3.vpc/prod?readPreference=secondaryPreferred") + conn = connect( + host="mongodb://a1.vpc,a2.vpc,a3.vpc/prod?readPreference=secondaryPreferred" + ) self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_PREFERRED) def test_multiple_connection_settings(self): - connect('mongoenginetest', alias='t1', host="localhost") + connect("mongoenginetest", alias="t1", host="localhost") - connect('mongoenginetest2', alias='t2', host="127.0.0.1") + connect("mongoenginetest2", alias="t2", host="127.0.0.1") mongo_connections = mongoengine.connection._connections self.assertEqual(len(mongo_connections.items()), 2) - self.assertIn('t1', mongo_connections.keys()) - self.assertIn('t2', mongo_connections.keys()) + self.assertIn("t1", mongo_connections.keys()) + self.assertIn("t2", mongo_connections.keys()) # Handle PyMongo 3+ Async Connection # Ensure we are connected, throws ServerSelectionTimeoutError otherwise. # Purposely not catching exception to fail test if thrown. - mongo_connections['t1'].server_info() - mongo_connections['t2'].server_info() - self.assertEqual(mongo_connections['t1'].address[0], 'localhost') - self.assertEqual(mongo_connections['t2'].address[0], '127.0.0.1') + mongo_connections["t1"].server_info() + mongo_connections["t2"].server_info() + self.assertEqual(mongo_connections["t1"].address[0], "localhost") + self.assertEqual(mongo_connections["t2"].address[0], "127.0.0.1") def test_connect_2_databases_uses_same_client_if_only_dbname_differs(self): - c1 = connect(alias='testdb1', db='testdb1') - c2 = connect(alias='testdb2', db='testdb2') + c1 = connect(alias="testdb1", db="testdb1") + c2 = connect(alias="testdb2", db="testdb2") self.assertIs(c1, c2) def test_connect_2_databases_uses_different_client_if_different_parameters(self): - c1 = connect(alias='testdb1', db='testdb1', username='u1') - c2 = connect(alias='testdb2', db='testdb2', username='u2') + c1 = connect(alias="testdb1", db="testdb1", username="u1") + c2 = connect(alias="testdb2", db="testdb2", username="u2") self.assertIsNot(c1, c2) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index 529032fe..dc9b9bf3 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -2,17 +2,20 @@ import unittest from mongoengine import * from mongoengine.connection import get_db -from mongoengine.context_managers import (switch_db, switch_collection, - no_sub_classes, no_dereference, - query_counter) +from mongoengine.context_managers import ( + switch_db, + switch_collection, + no_sub_classes, + no_dereference, + query_counter, +) from mongoengine.pymongo_support import count_documents class ContextManagersTest(unittest.TestCase): - def test_switch_db_context_manager(self): - connect('mongoenginetest') - register_connection('testdb-1', 'mongoenginetest2') + connect("mongoenginetest") + register_connection("testdb-1", "mongoenginetest2") class Group(Document): name = StringField() @@ -22,7 +25,7 @@ class ContextManagersTest(unittest.TestCase): Group(name="hello - default").save() self.assertEqual(1, Group.objects.count()) - with switch_db(Group, 'testdb-1') as Group: + with switch_db(Group, "testdb-1") as Group: self.assertEqual(0, Group.objects.count()) @@ -36,21 +39,21 @@ class ContextManagersTest(unittest.TestCase): self.assertEqual(1, Group.objects.count()) def test_switch_collection_context_manager(self): - connect('mongoenginetest') - register_connection(alias='testdb-1', db='mongoenginetest2') + connect("mongoenginetest") + register_connection(alias="testdb-1", db="mongoenginetest2") class Group(Document): name = StringField() - Group.drop_collection() # drops in default + Group.drop_collection() # drops in default - with switch_collection(Group, 'group1') as Group: - Group.drop_collection() # drops in group1 + with switch_collection(Group, "group1") as Group: + Group.drop_collection() # drops in group1 Group(name="hello - group").save() self.assertEqual(1, Group.objects.count()) - with switch_collection(Group, 'group1') as Group: + with switch_collection(Group, "group1") as Group: self.assertEqual(0, Group.objects.count()) @@ -66,7 +69,7 @@ class ContextManagersTest(unittest.TestCase): def test_no_dereference_context_manager_object_id(self): """Ensure that DBRef items in ListFields aren't dereferenced. """ - connect('mongoenginetest') + connect("mongoenginetest") class User(Document): name = StringField() @@ -80,14 +83,14 @@ class ContextManagersTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - User(name='user %s' % i).save() + User(name="user %s" % i).save() user = User.objects.first() Group(ref=user, members=User.objects, generic=user).save() with no_dereference(Group) as NoDeRefGroup: - self.assertTrue(Group._fields['members']._auto_dereference) - self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference) + self.assertTrue(Group._fields["members"]._auto_dereference) + self.assertFalse(NoDeRefGroup._fields["members"]._auto_dereference) with no_dereference(Group) as Group: group = Group.objects.first() @@ -104,7 +107,7 @@ class ContextManagersTest(unittest.TestCase): def test_no_dereference_context_manager_dbref(self): """Ensure that DBRef items in ListFields aren't dereferenced. """ - connect('mongoenginetest') + connect("mongoenginetest") class User(Document): name = StringField() @@ -118,31 +121,29 @@ class ContextManagersTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - User(name='user %s' % i).save() + User(name="user %s" % i).save() user = User.objects.first() Group(ref=user, members=User.objects, generic=user).save() with no_dereference(Group) as NoDeRefGroup: - self.assertTrue(Group._fields['members']._auto_dereference) - self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference) + self.assertTrue(Group._fields["members"]._auto_dereference) + self.assertFalse(NoDeRefGroup._fields["members"]._auto_dereference) with no_dereference(Group) as Group: group = Group.objects.first() - self.assertTrue(all([not isinstance(m, User) - for m in group.members])) + self.assertTrue(all([not isinstance(m, User) for m in group.members])) self.assertNotIsInstance(group.ref, User) self.assertNotIsInstance(group.generic, User) - self.assertTrue(all([isinstance(m, User) - for m in group.members])) + self.assertTrue(all([isinstance(m, User) for m in group.members])) self.assertIsInstance(group.ref, User) self.assertIsInstance(group.generic, User) def test_no_sub_classes(self): class A(Document): x = IntField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class B(A): z = IntField() @@ -188,20 +189,20 @@ class ContextManagersTest(unittest.TestCase): def test_no_sub_classes_modification_to_document_class_are_temporary(self): class A(Document): x = IntField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class B(A): z = IntField() - self.assertEqual(A._subclasses, ('A', 'A.B')) + self.assertEqual(A._subclasses, ("A", "A.B")) with no_sub_classes(A): - self.assertEqual(A._subclasses, ('A',)) - self.assertEqual(A._subclasses, ('A', 'A.B')) + self.assertEqual(A._subclasses, ("A",)) + self.assertEqual(A._subclasses, ("A", "A.B")) - self.assertEqual(B._subclasses, ('A.B',)) + self.assertEqual(B._subclasses, ("A.B",)) with no_sub_classes(B): - self.assertEqual(B._subclasses, ('A.B',)) - self.assertEqual(B._subclasses, ('A.B',)) + self.assertEqual(B._subclasses, ("A.B",)) + self.assertEqual(B._subclasses, ("A.B",)) def test_no_subclass_context_manager_does_not_swallow_exception(self): class User(Document): @@ -218,7 +219,7 @@ class ContextManagersTest(unittest.TestCase): raise TypeError() def test_query_counter_temporarily_modifies_profiling_level(self): - connect('mongoenginetest') + connect("mongoenginetest") db = get_db() initial_profiling_level = db.profiling_level() @@ -231,11 +232,13 @@ class ContextManagersTest(unittest.TestCase): self.assertEqual(db.profiling_level(), 2) self.assertEqual(db.profiling_level(), NEW_LEVEL) except Exception: - db.set_profiling_level(initial_profiling_level) # Ensures it gets reseted no matter the outcome of the test + db.set_profiling_level( + initial_profiling_level + ) # Ensures it gets reseted no matter the outcome of the test raise def test_query_counter(self): - connect('mongoenginetest') + connect("mongoenginetest") db = get_db() collection = db.query_counter @@ -245,7 +248,7 @@ class ContextManagersTest(unittest.TestCase): count_documents(collection, {}) def issue_1_insert_query(): - collection.insert_one({'test': 'garbage'}) + collection.insert_one({"test": "garbage"}) def issue_1_find_query(): collection.find_one() @@ -253,7 +256,9 @@ class ContextManagersTest(unittest.TestCase): counter = 0 with query_counter() as q: self.assertEqual(q, counter) - self.assertEqual(q, counter) # Ensures previous count query did not get counted + self.assertEqual( + q, counter + ) # Ensures previous count query did not get counted for _ in range(10): issue_1_insert_query() @@ -270,23 +275,25 @@ class ContextManagersTest(unittest.TestCase): counter += 1 self.assertEqual(q, counter) - self.assertEqual(int(q), counter) # test __int__ + self.assertEqual(int(q), counter) # test __int__ self.assertEqual(repr(q), str(int(q))) # test __repr__ - self.assertGreater(q, -1) # test __gt__ - self.assertGreaterEqual(q, int(q)) # test __gte__ + self.assertGreater(q, -1) # test __gt__ + self.assertGreaterEqual(q, int(q)) # test __gte__ self.assertNotEqual(q, -1) self.assertLess(q, 1000) self.assertLessEqual(q, int(q)) def test_query_counter_counts_getmore_queries(self): - connect('mongoenginetest') + connect("mongoenginetest") db = get_db() collection = db.query_counter collection.drop() - many_docs = [{'test': 'garbage %s' % i} for i in range(150)] - collection.insert_many(many_docs) # first batch of documents contains 101 documents + many_docs = [{"test": "garbage %s" % i} for i in range(150)] + collection.insert_many( + many_docs + ) # first batch of documents contains 101 documents with query_counter() as q: self.assertEqual(q, 0) @@ -294,24 +301,26 @@ class ContextManagersTest(unittest.TestCase): self.assertEqual(q, 2) # 1st select + 1 getmore def test_query_counter_ignores_particular_queries(self): - connect('mongoenginetest') + connect("mongoenginetest") db = get_db() collection = db.query_counter - collection.insert_many([{'test': 'garbage %s' % i} for i in range(10)]) + collection.insert_many([{"test": "garbage %s" % i} for i in range(10)]) with query_counter() as q: self.assertEqual(q, 0) cursor = collection.find() - self.assertEqual(q, 0) # cursor wasn't opened yet - _ = next(cursor) # opens the cursor and fires the find query + self.assertEqual(q, 0) # cursor wasn't opened yet + _ = next(cursor) # opens the cursor and fires the find query self.assertEqual(q, 1) - cursor.close() # issues a `killcursors` query that is ignored by the context + cursor.close() # issues a `killcursors` query that is ignored by the context self.assertEqual(q, 1) - _ = db.system.indexes.find_one() # queries on db.system.indexes are ignored as well + _ = ( + db.system.indexes.find_one() + ) # queries on db.system.indexes are ignored as well self.assertEqual(q, 1) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index a9ef98e7..7def2ac7 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -14,128 +14,129 @@ class DocumentStub(object): class TestBaseDict(unittest.TestCase): - @staticmethod def _get_basedict(dict_items): """Get a BaseList bound to a fake document instance""" fake_doc = DocumentStub() - base_list = BaseDict(dict_items, instance=None, name='my_name') - base_list._instance = fake_doc # hack to inject the mock, it does not work in the constructor + base_list = BaseDict(dict_items, instance=None, name="my_name") + base_list._instance = ( + fake_doc + ) # hack to inject the mock, it does not work in the constructor return base_list def test___init___(self): class MyDoc(Document): pass - dict_items = {'k': 'v'} + dict_items = {"k": "v"} doc = MyDoc() - base_dict = BaseDict(dict_items, instance=doc, name='my_name') + base_dict = BaseDict(dict_items, instance=doc, name="my_name") self.assertIsInstance(base_dict._instance, Document) - self.assertEqual(base_dict._name, 'my_name') + self.assertEqual(base_dict._name, "my_name") self.assertEqual(base_dict, dict_items) def test_setdefault_calls_mark_as_changed(self): base_dict = self._get_basedict({}) - base_dict.setdefault('k', 'v') + base_dict.setdefault("k", "v") self.assertEqual(base_dict._instance._changed_fields, [base_dict._name]) def test_popitems_calls_mark_as_changed(self): - base_dict = self._get_basedict({'k': 'v'}) - self.assertEqual(base_dict.popitem(), ('k', 'v')) + base_dict = self._get_basedict({"k": "v"}) + self.assertEqual(base_dict.popitem(), ("k", "v")) self.assertEqual(base_dict._instance._changed_fields, [base_dict._name]) self.assertFalse(base_dict) def test_pop_calls_mark_as_changed(self): - base_dict = self._get_basedict({'k': 'v'}) - self.assertEqual(base_dict.pop('k'), 'v') + base_dict = self._get_basedict({"k": "v"}) + self.assertEqual(base_dict.pop("k"), "v") self.assertEqual(base_dict._instance._changed_fields, [base_dict._name]) self.assertFalse(base_dict) def test_pop_calls_does_not_mark_as_changed_when_it_fails(self): - base_dict = self._get_basedict({'k': 'v'}) + base_dict = self._get_basedict({"k": "v"}) with self.assertRaises(KeyError): - base_dict.pop('X') + base_dict.pop("X") self.assertFalse(base_dict._instance._changed_fields) def test_clear_calls_mark_as_changed(self): - base_dict = self._get_basedict({'k': 'v'}) + base_dict = self._get_basedict({"k": "v"}) base_dict.clear() - self.assertEqual(base_dict._instance._changed_fields, ['my_name']) + self.assertEqual(base_dict._instance._changed_fields, ["my_name"]) self.assertEqual(base_dict, {}) def test___delitem___calls_mark_as_changed(self): - base_dict = self._get_basedict({'k': 'v'}) - del base_dict['k'] - self.assertEqual(base_dict._instance._changed_fields, ['my_name.k']) + base_dict = self._get_basedict({"k": "v"}) + del base_dict["k"] + self.assertEqual(base_dict._instance._changed_fields, ["my_name.k"]) self.assertEqual(base_dict, {}) def test___getitem____KeyError(self): base_dict = self._get_basedict({}) with self.assertRaises(KeyError): - base_dict['new'] + base_dict["new"] def test___getitem____simple_value(self): - base_dict = self._get_basedict({'k': 'v'}) - base_dict['k'] = 'v' + base_dict = self._get_basedict({"k": "v"}) + base_dict["k"] = "v" def test___getitem____sublist_gets_converted_to_BaseList(self): - base_dict = self._get_basedict({'k': [0, 1, 2]}) - sub_list = base_dict['k'] + base_dict = self._get_basedict({"k": [0, 1, 2]}) + sub_list = base_dict["k"] self.assertEqual(sub_list, [0, 1, 2]) self.assertIsInstance(sub_list, BaseList) self.assertIs(sub_list._instance, base_dict._instance) - self.assertEqual(sub_list._name, 'my_name.k') + self.assertEqual(sub_list._name, "my_name.k") self.assertEqual(base_dict._instance._changed_fields, []) # Challenge mark_as_changed from sublist sub_list[1] = None - self.assertEqual(base_dict._instance._changed_fields, ['my_name.k.1']) + self.assertEqual(base_dict._instance._changed_fields, ["my_name.k.1"]) def test___getitem____subdict_gets_converted_to_BaseDict(self): - base_dict = self._get_basedict({'k': {'subk': 'subv'}}) - sub_dict = base_dict['k'] - self.assertEqual(sub_dict, {'subk': 'subv'}) + base_dict = self._get_basedict({"k": {"subk": "subv"}}) + sub_dict = base_dict["k"] + self.assertEqual(sub_dict, {"subk": "subv"}) self.assertIsInstance(sub_dict, BaseDict) self.assertIs(sub_dict._instance, base_dict._instance) - self.assertEqual(sub_dict._name, 'my_name.k') + self.assertEqual(sub_dict._name, "my_name.k") self.assertEqual(base_dict._instance._changed_fields, []) # Challenge mark_as_changed from subdict - sub_dict['subk'] = None - self.assertEqual(base_dict._instance._changed_fields, ['my_name.k.subk']) + sub_dict["subk"] = None + self.assertEqual(base_dict._instance._changed_fields, ["my_name.k.subk"]) def test_get_sublist_gets_converted_to_BaseList_just_like__getitem__(self): - base_dict = self._get_basedict({'k': [0, 1, 2]}) - sub_list = base_dict.get('k') + base_dict = self._get_basedict({"k": [0, 1, 2]}) + sub_list = base_dict.get("k") self.assertEqual(sub_list, [0, 1, 2]) self.assertIsInstance(sub_list, BaseList) def test_get_returns_the_same_as___getitem__(self): - base_dict = self._get_basedict({'k': [0, 1, 2]}) - get_ = base_dict.get('k') - getitem_ = base_dict['k'] + base_dict = self._get_basedict({"k": [0, 1, 2]}) + get_ = base_dict.get("k") + getitem_ = base_dict["k"] self.assertEqual(get_, getitem_) def test_get_default(self): base_dict = self._get_basedict({}) sentinel = object() - self.assertEqual(base_dict.get('new'), None) - self.assertIs(base_dict.get('new', sentinel), sentinel) + self.assertEqual(base_dict.get("new"), None) + self.assertIs(base_dict.get("new", sentinel), sentinel) def test___setitem___calls_mark_as_changed(self): base_dict = self._get_basedict({}) - base_dict['k'] = 'v' - self.assertEqual(base_dict._instance._changed_fields, ['my_name.k']) - self.assertEqual(base_dict, {'k': 'v'}) + base_dict["k"] = "v" + self.assertEqual(base_dict._instance._changed_fields, ["my_name.k"]) + self.assertEqual(base_dict, {"k": "v"}) def test_update_calls_mark_as_changed(self): base_dict = self._get_basedict({}) - base_dict.update({'k': 'v'}) - self.assertEqual(base_dict._instance._changed_fields, ['my_name']) + base_dict.update({"k": "v"}) + self.assertEqual(base_dict._instance._changed_fields, ["my_name"]) def test___setattr____not_tracked_by_changes(self): base_dict = self._get_basedict({}) - base_dict.a_new_attr = 'test' + base_dict.a_new_attr = "test" self.assertEqual(base_dict._instance._changed_fields, []) def test___delattr____tracked_by_changes(self): @@ -143,19 +144,20 @@ class TestBaseDict(unittest.TestCase): # This is even bad because it could be that there is an attribute # with the same name as a key base_dict = self._get_basedict({}) - base_dict.a_new_attr = 'test' + base_dict.a_new_attr = "test" del base_dict.a_new_attr - self.assertEqual(base_dict._instance._changed_fields, ['my_name.a_new_attr']) + self.assertEqual(base_dict._instance._changed_fields, ["my_name.a_new_attr"]) class TestBaseList(unittest.TestCase): - @staticmethod def _get_baselist(list_items): """Get a BaseList bound to a fake document instance""" fake_doc = DocumentStub() - base_list = BaseList(list_items, instance=None, name='my_name') - base_list._instance = fake_doc # hack to inject the mock, it does not work in the constructor + base_list = BaseList(list_items, instance=None, name="my_name") + base_list._instance = ( + fake_doc + ) # hack to inject the mock, it does not work in the constructor return base_list def test___init___(self): @@ -164,19 +166,19 @@ class TestBaseList(unittest.TestCase): list_items = [True] doc = MyDoc() - base_list = BaseList(list_items, instance=doc, name='my_name') + base_list = BaseList(list_items, instance=doc, name="my_name") self.assertIsInstance(base_list._instance, Document) - self.assertEqual(base_list._name, 'my_name') + self.assertEqual(base_list._name, "my_name") self.assertEqual(base_list, list_items) def test___iter__(self): values = [True, False, True, False] - base_list = BaseList(values, instance=None, name='my_name') + base_list = BaseList(values, instance=None, name="my_name") self.assertEqual(values, list(base_list)) def test___iter___allow_modification_while_iterating_withou_error(self): # regular list allows for this, thus this subclass must comply to that - base_list = BaseList([True, False, True, False], instance=None, name='my_name') + base_list = BaseList([True, False, True, False], instance=None, name="my_name") for idx, val in enumerate(base_list): if val: base_list.pop(idx) @@ -185,7 +187,7 @@ class TestBaseList(unittest.TestCase): base_list = self._get_baselist([]) self.assertFalse(base_list._instance._changed_fields) base_list.append(True) - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) def test_subclass_append(self): # Due to the way mark_as_changed_wrapper is implemented @@ -193,7 +195,7 @@ class TestBaseList(unittest.TestCase): class SubBaseList(BaseList): pass - base_list = SubBaseList([], instance=None, name='my_name') + base_list = SubBaseList([], instance=None, name="my_name") base_list.append(True) def test___getitem__using_simple_index(self): @@ -217,54 +219,45 @@ class TestBaseList(unittest.TestCase): self.assertEqual(base_list._instance._changed_fields, []) def test___getitem__sublist_returns_BaseList_bound_to_instance(self): - base_list = self._get_baselist( - [ - [1, 2], - [3, 4] - ] - ) + base_list = self._get_baselist([[1, 2], [3, 4]]) sub_list = base_list[0] self.assertEqual(sub_list, [1, 2]) self.assertIsInstance(sub_list, BaseList) self.assertIs(sub_list._instance, base_list._instance) - self.assertEqual(sub_list._name, 'my_name.0') + self.assertEqual(sub_list._name, "my_name.0") self.assertEqual(base_list._instance._changed_fields, []) # Challenge mark_as_changed from sublist sub_list[1] = None - self.assertEqual(base_list._instance._changed_fields, ['my_name.0.1']) + self.assertEqual(base_list._instance._changed_fields, ["my_name.0.1"]) def test___getitem__subdict_returns_BaseList_bound_to_instance(self): - base_list = self._get_baselist( - [ - {'subk': 'subv'} - ] - ) + base_list = self._get_baselist([{"subk": "subv"}]) sub_dict = base_list[0] - self.assertEqual(sub_dict, {'subk': 'subv'}) + self.assertEqual(sub_dict, {"subk": "subv"}) self.assertIsInstance(sub_dict, BaseDict) self.assertIs(sub_dict._instance, base_list._instance) - self.assertEqual(sub_dict._name, 'my_name.0') + self.assertEqual(sub_dict._name, "my_name.0") self.assertEqual(base_list._instance._changed_fields, []) # Challenge mark_as_changed from subdict - sub_dict['subk'] = None - self.assertEqual(base_list._instance._changed_fields, ['my_name.0.subk']) + sub_dict["subk"] = None + self.assertEqual(base_list._instance._changed_fields, ["my_name.0.subk"]) def test_extend_calls_mark_as_changed(self): base_list = self._get_baselist([]) base_list.extend([True]) - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) def test_insert_calls_mark_as_changed(self): base_list = self._get_baselist([]) base_list.insert(0, True) - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) def test_remove_calls_mark_as_changed(self): base_list = self._get_baselist([True]) base_list.remove(True) - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) def test_remove_not_mark_as_changed_when_it_fails(self): base_list = self._get_baselist([True]) @@ -275,70 +268,76 @@ class TestBaseList(unittest.TestCase): def test_pop_calls_mark_as_changed(self): base_list = self._get_baselist([True]) base_list.pop() - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) def test_reverse_calls_mark_as_changed(self): base_list = self._get_baselist([True, False]) base_list.reverse() - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) def test___delitem___calls_mark_as_changed(self): base_list = self._get_baselist([True]) del base_list[0] - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) def test___setitem___calls_with_full_slice_mark_as_changed(self): base_list = self._get_baselist([]) - base_list[:] = [0, 1] # Will use __setslice__ under py2 and __setitem__ under py3 - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + base_list[:] = [ + 0, + 1, + ] # Will use __setslice__ under py2 and __setitem__ under py3 + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) self.assertEqual(base_list, [0, 1]) def test___setitem___calls_with_partial_slice_mark_as_changed(self): base_list = self._get_baselist([0, 1, 2]) - base_list[0:2] = [1, 0] # Will use __setslice__ under py2 and __setitem__ under py3 - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + base_list[0:2] = [ + 1, + 0, + ] # Will use __setslice__ under py2 and __setitem__ under py3 + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) self.assertEqual(base_list, [1, 0, 2]) def test___setitem___calls_with_step_slice_mark_as_changed(self): base_list = self._get_baselist([0, 1, 2]) - base_list[0:3:2] = [-1, -2] # uses __setitem__ in both py2 & 3 - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + base_list[0:3:2] = [-1, -2] # uses __setitem__ in both py2 & 3 + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) self.assertEqual(base_list, [-1, 1, -2]) def test___setitem___with_slice(self): base_list = self._get_baselist([0, 1, 2, 3, 4, 5]) base_list[0:6:2] = [None, None, None] - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) self.assertEqual(base_list, [None, 1, None, 3, None, 5]) def test___setitem___item_0_calls_mark_as_changed(self): base_list = self._get_baselist([True]) base_list[0] = False - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) self.assertEqual(base_list, [False]) def test___setitem___item_1_calls_mark_as_changed(self): base_list = self._get_baselist([True, True]) base_list[1] = False - self.assertEqual(base_list._instance._changed_fields, ['my_name.1']) + self.assertEqual(base_list._instance._changed_fields, ["my_name.1"]) self.assertEqual(base_list, [True, False]) def test___delslice___calls_mark_as_changed(self): base_list = self._get_baselist([0, 1]) del base_list[0:1] - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) self.assertEqual(base_list, [1]) def test___iadd___calls_mark_as_changed(self): base_list = self._get_baselist([True]) base_list += [False] - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) def test___imul___calls_mark_as_changed(self): base_list = self._get_baselist([True]) self.assertEqual(base_list._instance._changed_fields, []) base_list *= 2 - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) def test_sort_calls_not_marked_as_changed_when_it_fails(self): base_list = self._get_baselist([True]) @@ -350,7 +349,7 @@ class TestBaseList(unittest.TestCase): def test_sort_calls_mark_as_changed(self): base_list = self._get_baselist([True, False]) base_list.sort() - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + self.assertEqual(base_list._instance._changed_fields, ["my_name"]) def test_sort_calls_with_key(self): base_list = self._get_baselist([1, 2, 11]) @@ -371,7 +370,7 @@ class TestStrictDict(unittest.TestCase): def test_iterkeys(self): d = self.dtype(a=1) - self.assertEqual(list(iterkeys(d)), ['a']) + self.assertEqual(list(iterkeys(d)), ["a"]) def test_len(self): d = self.dtype(a=1) @@ -379,9 +378,9 @@ class TestStrictDict(unittest.TestCase): def test_pop(self): d = self.dtype(a=1) - self.assertIn('a', d) - d.pop('a') - self.assertNotIn('a', d) + self.assertIn("a", d) + d.pop("a") + self.assertNotIn("a", d) def test_repr(self): d = self.dtype(a=1, b=2, c=3) @@ -416,7 +415,7 @@ class TestStrictDict(unittest.TestCase): d = self.dtype() d.a = 1 self.assertEqual(d.a, 1) - self.assertRaises(AttributeError, getattr, d, 'b') + self.assertRaises(AttributeError, getattr, d, "b") def test_setattr_raises_on_nonexisting_attr(self): d = self.dtype() @@ -430,20 +429,20 @@ class TestStrictDict(unittest.TestCase): def test_get(self): d = self.dtype(a=1) - self.assertEqual(d.get('a'), 1) - self.assertEqual(d.get('b', 'bla'), 'bla') + self.assertEqual(d.get("a"), 1) + self.assertEqual(d.get("b", "bla"), "bla") def test_items(self): d = self.dtype(a=1) - self.assertEqual(d.items(), [('a', 1)]) + self.assertEqual(d.items(), [("a", 1)]) d = self.dtype(a=1, b=2) - self.assertEqual(d.items(), [('a', 1), ('b', 2)]) + self.assertEqual(d.items(), [("a", 1), ("b", 2)]) def test_mappings_protocol(self): d = self.dtype(a=1, b=2) - self.assertEqual(dict(d), {'a': 1, 'b': 2}) - self.assertEqual(dict(**d), {'a': 1, 'b': 2}) + self.assertEqual(dict(d), {"a": 1, "b": 2}) + self.assertEqual(dict(**d), {"a": 1, "b": 2}) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 9c565810..4730e2e3 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -10,18 +10,18 @@ from mongoengine.context_managers import query_counter class FieldTest(unittest.TestCase): - @classmethod def setUpClass(cls): - cls.db = connect(db='mongoenginetest') + cls.db = connect(db="mongoenginetest") @classmethod def tearDownClass(cls): - cls.db.drop_database('mongoenginetest') + cls.db.drop_database("mongoenginetest") def test_list_item_dereference(self): """Ensure that DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -32,7 +32,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - user = User(name='user %s' % i) + user = User(name="user %s" % i) user.save() group = Group(members=User.objects) @@ -47,7 +47,7 @@ class FieldTest(unittest.TestCase): group_obj = Group.objects.first() self.assertEqual(q, 1) - len(group_obj._data['members']) + len(group_obj._data["members"]) self.assertEqual(q, 1) len(group_obj.members) @@ -80,6 +80,7 @@ class FieldTest(unittest.TestCase): def test_list_item_dereference_dref_false(self): """Ensure that DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -90,7 +91,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - user = User(name='user %s' % i) + user = User(name="user %s" % i) user.save() group = Group(members=User.objects) @@ -105,14 +106,14 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] self.assertEqual(q, 2) - self.assertTrue(group_obj._data['members']._dereferenced) + self.assertTrue(group_obj._data["members"]._dereferenced) # verifies that no additional queries gets executed # if we re-iterate over the ListField once it is # dereferenced [m for m in group_obj.members] self.assertEqual(q, 2) - self.assertTrue(group_obj._data['members']._dereferenced) + self.assertTrue(group_obj._data["members"]._dereferenced) # Document select_related with query_counter() as q: @@ -136,6 +137,7 @@ class FieldTest(unittest.TestCase): def test_list_item_dereference_orphan_dbref(self): """Ensure that orphan DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -146,7 +148,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - user = User(name='user %s' % i) + user = User(name="user %s" % i) user.save() group = Group(members=User.objects) @@ -164,14 +166,14 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] self.assertEqual(q, 2) - self.assertTrue(group_obj._data['members']._dereferenced) + self.assertTrue(group_obj._data["members"]._dereferenced) # verifies that no additional queries gets executed # if we re-iterate over the ListField once it is # dereferenced [m for m in group_obj.members] self.assertEqual(q, 2) - self.assertTrue(group_obj._data['members']._dereferenced) + self.assertTrue(group_obj._data["members"]._dereferenced) User.drop_collection() Group.drop_collection() @@ -179,6 +181,7 @@ class FieldTest(unittest.TestCase): def test_list_item_dereference_dref_false_stores_as_type(self): """Ensure that DBRef items are stored as their type """ + class User(Document): my_id = IntField(primary_key=True) name = StringField() @@ -189,17 +192,18 @@ class FieldTest(unittest.TestCase): User.drop_collection() Group.drop_collection() - user = User(my_id=1, name='user 1').save() + user = User(my_id=1, name="user 1").save() Group(members=User.objects).save() group = Group.objects.first() - self.assertEqual(Group._get_collection().find_one()['members'], [1]) + self.assertEqual(Group._get_collection().find_one()["members"], [1]) self.assertEqual(group.members, [user]) def test_handle_old_style_references(self): """Ensure that DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -210,7 +214,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() for i in range(1, 26): - user = User(name='user %s' % i) + user = User(name="user %s" % i) user.save() group = Group(members=User.objects) @@ -227,8 +231,8 @@ class FieldTest(unittest.TestCase): group.save() group = Group.objects.first() - self.assertEqual(group.members[0].name, 'user 1') - self.assertEqual(group.members[-1].name, 'String!') + self.assertEqual(group.members[0].name, "user 1") + self.assertEqual(group.members[-1].name, "String!") def test_migrate_references(self): """Example of migrating ReferenceField storage @@ -249,8 +253,8 @@ class FieldTest(unittest.TestCase): group = Group(author=user, members=[user]).save() raw_data = Group._get_collection().find_one() - self.assertIsInstance(raw_data['author'], DBRef) - self.assertIsInstance(raw_data['members'][0], DBRef) + self.assertIsInstance(raw_data["author"], DBRef) + self.assertIsInstance(raw_data["members"][0], DBRef) group = Group.objects.first() self.assertEqual(group.author, user) @@ -264,8 +268,8 @@ class FieldTest(unittest.TestCase): # Migrate the data for g in Group.objects(): # Explicitly mark as changed so resets - g._mark_as_changed('author') - g._mark_as_changed('members') + g._mark_as_changed("author") + g._mark_as_changed("members") g.save() group = Group.objects.first() @@ -273,35 +277,36 @@ class FieldTest(unittest.TestCase): self.assertEqual(group.members, [user]) raw_data = Group._get_collection().find_one() - self.assertIsInstance(raw_data['author'], ObjectId) - self.assertIsInstance(raw_data['members'][0], ObjectId) + self.assertIsInstance(raw_data["author"], ObjectId) + self.assertIsInstance(raw_data["members"][0], ObjectId) def test_recursive_reference(self): """Ensure that ReferenceFields can reference their own documents. """ + class Employee(Document): name = StringField() - boss = ReferenceField('self') - friends = ListField(ReferenceField('self')) + boss = ReferenceField("self") + friends = ListField(ReferenceField("self")) Employee.drop_collection() - bill = Employee(name='Bill Lumbergh') + bill = Employee(name="Bill Lumbergh") bill.save() - michael = Employee(name='Michael Bolton') + michael = Employee(name="Michael Bolton") michael.save() - samir = Employee(name='Samir Nagheenanajar') + samir = Employee(name="Samir Nagheenanajar") samir.save() friends = [michael, samir] - peter = Employee(name='Peter Gibbons', boss=bill, friends=friends) + peter = Employee(name="Peter Gibbons", boss=bill, friends=friends) peter.save() - Employee(name='Funky Gibbon', boss=bill, friends=friends).save() - Employee(name='Funky Gibbon', boss=bill, friends=friends).save() - Employee(name='Funky Gibbon', boss=bill, friends=friends).save() + Employee(name="Funky Gibbon", boss=bill, friends=friends).save() + Employee(name="Funky Gibbon", boss=bill, friends=friends).save() + Employee(name="Funky Gibbon", boss=bill, friends=friends).save() with query_counter() as q: self.assertEqual(q, 0) @@ -343,7 +348,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 2) def test_list_of_lists_of_references(self): - class User(Document): name = StringField() @@ -357,9 +361,9 @@ class FieldTest(unittest.TestCase): Post.drop_collection() SimpleList.drop_collection() - u1 = User.objects.create(name='u1') - u2 = User.objects.create(name='u2') - u3 = User.objects.create(name='u3') + u1 = User.objects.create(name="u1") + u2 = User.objects.create(name="u2") + u3 = User.objects.create(name="u3") SimpleList.objects.create(users=[u1, u2, u3]) self.assertEqual(SimpleList.objects.all()[0].users, [u1, u2, u3]) @@ -370,13 +374,14 @@ class FieldTest(unittest.TestCase): def test_circular_reference(self): """Ensure you can handle circular references """ + class Relation(EmbeddedDocument): name = StringField() - person = ReferenceField('Person') + person = ReferenceField("Person") class Person(Document): name = StringField() - relations = ListField(EmbeddedDocumentField('Relation')) + relations = ListField(EmbeddedDocumentField("Relation")) def __repr__(self): return "" % self.name @@ -398,14 +403,17 @@ class FieldTest(unittest.TestCase): daughter.relations.append(self_rel) daughter.save() - self.assertEqual("[, ]", "%s" % Person.objects()) + self.assertEqual( + "[, ]", "%s" % Person.objects() + ) def test_circular_reference_on_self(self): """Ensure you can handle circular references """ + class Person(Document): name = StringField() - relations = ListField(ReferenceField('self')) + relations = ListField(ReferenceField("self")) def __repr__(self): return "" % self.name @@ -424,14 +432,17 @@ class FieldTest(unittest.TestCase): daughter.relations.append(daughter) daughter.save() - self.assertEqual("[, ]", "%s" % Person.objects()) + self.assertEqual( + "[, ]", "%s" % Person.objects() + ) def test_circular_tree_reference(self): """Ensure you can handle circular references with more than one level """ + class Other(EmbeddedDocument): name = StringField() - friends = ListField(ReferenceField('Person')) + friends = ListField(ReferenceField("Person")) class Person(Document): name = StringField() @@ -443,8 +454,8 @@ class FieldTest(unittest.TestCase): Person.drop_collection() paul = Person(name="Paul").save() maria = Person(name="Maria").save() - julia = Person(name='Julia').save() - anna = Person(name='Anna').save() + julia = Person(name="Julia").save() + anna = Person(name="Anna").save() paul.other.friends = [maria, julia, anna] paul.other.name = "Paul's friends" @@ -464,11 +475,10 @@ class FieldTest(unittest.TestCase): self.assertEqual( "[, , , ]", - "%s" % Person.objects() + "%s" % Person.objects(), ) def test_generic_reference(self): - class UserA(Document): name = StringField() @@ -488,13 +498,13 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i) + a = UserA(name="User A %s" % i) a.save() - b = UserB(name='User B %s' % i) + b = UserB(name="User B %s" % i) b.save() - c = UserC(name='User C %s' % i) + c = UserC(name="User C %s" % i) c.save() members += [a, b, c] @@ -518,7 +528,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for m in group_obj.members: - self.assertIn('User', m.__class__.__name__) + self.assertIn("User", m.__class__.__name__) # Document select_related with query_counter() as q: @@ -534,7 +544,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for m in group_obj.members: - self.assertIn('User', m.__class__.__name__) + self.assertIn("User", m.__class__.__name__) # Queryset select_related with query_counter() as q: @@ -551,8 +561,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for m in group_obj.members: - self.assertIn('User', m.__class__.__name__) - + self.assertIn("User", m.__class__.__name__) def test_generic_reference_orphan_dbref(self): """Ensure that generic orphan DBRef items in ListFields are dereferenced. @@ -577,13 +586,13 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i) + a = UserA(name="User A %s" % i) a.save() - b = UserB(name='User B %s' % i) + b = UserB(name="User B %s" % i) b.save() - c = UserC(name='User C %s' % i) + c = UserC(name="User C %s" % i) c.save() members += [a, b, c] @@ -602,11 +611,11 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] self.assertEqual(q, 4) - self.assertTrue(group_obj._data['members']._dereferenced) + self.assertTrue(group_obj._data["members"]._dereferenced) [m for m in group_obj.members] self.assertEqual(q, 4) - self.assertTrue(group_obj._data['members']._dereferenced) + self.assertTrue(group_obj._data["members"]._dereferenced) UserA.drop_collection() UserB.drop_collection() @@ -614,7 +623,6 @@ class FieldTest(unittest.TestCase): Group.drop_collection() def test_list_field_complex(self): - class UserA(Document): name = StringField() @@ -634,13 +642,13 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i) + a = UserA(name="User A %s" % i) a.save() - b = UserB(name='User B %s' % i) + b = UserB(name="User B %s" % i) b.save() - c = UserC(name='User C %s' % i) + c = UserC(name="User C %s" % i) c.save() members += [a, b, c] @@ -664,7 +672,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for m in group_obj.members: - self.assertIn('User', m.__class__.__name__) + self.assertIn("User", m.__class__.__name__) # Document select_related with query_counter() as q: @@ -680,7 +688,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for m in group_obj.members: - self.assertIn('User', m.__class__.__name__) + self.assertIn("User", m.__class__.__name__) # Queryset select_related with query_counter() as q: @@ -697,7 +705,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for m in group_obj.members: - self.assertIn('User', m.__class__.__name__) + self.assertIn("User", m.__class__.__name__) UserA.drop_collection() UserB.drop_collection() @@ -705,7 +713,6 @@ class FieldTest(unittest.TestCase): Group.drop_collection() def test_map_field_reference(self): - class User(Document): name = StringField() @@ -717,7 +724,7 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - user = User(name='user %s' % i) + user = User(name="user %s" % i) user.save() members.append(user) @@ -752,7 +759,7 @@ class FieldTest(unittest.TestCase): for k, m in iteritems(group_obj.members): self.assertIsInstance(m, User) - # Queryset select_related + # Queryset select_related with query_counter() as q: self.assertEqual(q, 0) @@ -770,7 +777,6 @@ class FieldTest(unittest.TestCase): Group.drop_collection() def test_dict_field(self): - class UserA(Document): name = StringField() @@ -790,13 +796,13 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i) + a = UserA(name="User A %s" % i) a.save() - b = UserB(name='User B %s' % i) + b = UserB(name="User B %s" % i) b.save() - c = UserC(name='User C %s' % i) + c = UserC(name="User C %s" % i) c.save() members += [a, b, c] @@ -819,7 +825,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for k, m in iteritems(group_obj.members): - self.assertIn('User', m.__class__.__name__) + self.assertIn("User", m.__class__.__name__) # Document select_related with query_counter() as q: @@ -835,7 +841,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for k, m in iteritems(group_obj.members): - self.assertIn('User', m.__class__.__name__) + self.assertIn("User", m.__class__.__name__) # Queryset select_related with query_counter() as q: @@ -852,7 +858,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for k, m in iteritems(group_obj.members): - self.assertIn('User', m.__class__.__name__) + self.assertIn("User", m.__class__.__name__) Group.objects.delete() Group().save() @@ -873,10 +879,9 @@ class FieldTest(unittest.TestCase): Group.drop_collection() def test_dict_field_no_field_inheritance(self): - class UserA(Document): name = StringField() - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} class Group(Document): members = DictField() @@ -886,7 +891,7 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i) + a = UserA(name="User A %s" % i) a.save() members += [a] @@ -949,7 +954,6 @@ class FieldTest(unittest.TestCase): Group.drop_collection() def test_generic_reference_map_field(self): - class UserA(Document): name = StringField() @@ -969,13 +973,13 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i) + a = UserA(name="User A %s" % i) a.save() - b = UserB(name='User B %s' % i) + b = UserB(name="User B %s" % i) b.save() - c = UserC(name='User C %s' % i) + c = UserC(name="User C %s" % i) c.save() members += [a, b, c] @@ -998,7 +1002,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for k, m in iteritems(group_obj.members): - self.assertIn('User', m.__class__.__name__) + self.assertIn("User", m.__class__.__name__) # Document select_related with query_counter() as q: @@ -1014,7 +1018,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for k, m in iteritems(group_obj.members): - self.assertIn('User', m.__class__.__name__) + self.assertIn("User", m.__class__.__name__) # Queryset select_related with query_counter() as q: @@ -1031,7 +1035,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 4) for k, m in iteritems(group_obj.members): - self.assertIn('User', m.__class__.__name__) + self.assertIn("User", m.__class__.__name__) Group.objects.delete() Group().save() @@ -1051,7 +1055,6 @@ class FieldTest(unittest.TestCase): Group.drop_collection() def test_multidirectional_lists(self): - class Asset(Document): name = StringField(max_length=250, required=True) path = StringField() @@ -1062,10 +1065,10 @@ class FieldTest(unittest.TestCase): Asset.drop_collection() - root = Asset(name='', path="/", title="Site Root") + root = Asset(name="", path="/", title="Site Root") root.save() - company = Asset(name='company', title='Company', parent=root, parents=[root]) + company = Asset(name="company", title="Company", parent=root, parents=[root]) company.save() root.children = [company] @@ -1076,7 +1079,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(company.parents, [root]) def test_dict_in_dbref_instance(self): - class Person(Document): name = StringField(max_length=250, required=True) @@ -1087,34 +1089,35 @@ class FieldTest(unittest.TestCase): Person.drop_collection() Room.drop_collection() - bob = Person.objects.create(name='Bob') + bob = Person.objects.create(name="Bob") bob.save() - sarah = Person.objects.create(name='Sarah') + sarah = Person.objects.create(name="Sarah") sarah.save() room_101 = Room.objects.create(number="101") room_101.staffs_with_position = [ - {'position_key': 'window', 'staff': sarah}, - {'position_key': 'door', 'staff': bob.to_dbref()}] + {"position_key": "window", "staff": sarah}, + {"position_key": "door", "staff": bob.to_dbref()}, + ] room_101.save() room = Room.objects.first().select_related() - self.assertEqual(room.staffs_with_position[0]['staff'], sarah) - self.assertEqual(room.staffs_with_position[1]['staff'], bob) + self.assertEqual(room.staffs_with_position[0]["staff"], sarah) + self.assertEqual(room.staffs_with_position[1]["staff"], bob) def test_document_reload_no_inheritance(self): class Foo(Document): - meta = {'allow_inheritance': False} - bar = ReferenceField('Bar') - baz = ReferenceField('Baz') + meta = {"allow_inheritance": False} + bar = ReferenceField("Bar") + baz = ReferenceField("Baz") class Bar(Document): - meta = {'allow_inheritance': False} - msg = StringField(required=True, default='Blammo!') + meta = {"allow_inheritance": False} + msg = StringField(required=True, default="Blammo!") class Baz(Document): - meta = {'allow_inheritance': False} - msg = StringField(required=True, default='Kaboom!') + meta = {"allow_inheritance": False} + msg = StringField(required=True, default="Kaboom!") Foo.drop_collection() Bar.drop_collection() @@ -1138,11 +1141,14 @@ class FieldTest(unittest.TestCase): Ensure reloading a document with multiple similar id in different collections doesn't mix them. """ + class Topic(Document): id = IntField(primary_key=True) + class User(Document): id = IntField(primary_key=True) name = StringField() + class Message(Document): id = IntField(primary_key=True) topic = ReferenceField(Topic) @@ -1154,23 +1160,24 @@ class FieldTest(unittest.TestCase): # All objects share the same id, but each in a different collection topic = Topic(id=1).save() - user = User(id=1, name='user-name').save() + user = User(id=1, name="user-name").save() Message(id=1, topic=topic, author=user).save() concurrent_change_user = User.objects.get(id=1) - concurrent_change_user.name = 'new-name' + concurrent_change_user.name = "new-name" concurrent_change_user.save() - self.assertNotEqual(user.name, 'new-name') + self.assertNotEqual(user.name, "new-name") msg = Message.objects.get(id=1) msg.reload() self.assertEqual(msg.topic, topic) self.assertEqual(msg.author, user) - self.assertEqual(msg.author.name, 'new-name') + self.assertEqual(msg.author.name, "new-name") def test_list_lookup_not_checked_in_map(self): """Ensure we dereference list data correctly """ + class Comment(Document): id = IntField(primary_key=True) text = StringField() @@ -1182,8 +1189,8 @@ class FieldTest(unittest.TestCase): Comment.drop_collection() Message.drop_collection() - c1 = Comment(id=0, text='zero').save() - c2 = Comment(id=1, text='one').save() + c1 = Comment(id=0, text="zero").save() + c2 = Comment(id=1, text="one").save() Message(id=1, comments=[c1, c2]).save() msg = Message.objects.get(id=1) @@ -1193,6 +1200,7 @@ class FieldTest(unittest.TestCase): def test_list_item_dereference_dref_false_save_doesnt_cause_extra_queries(self): """Ensure that DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -1204,7 +1212,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - User(name='user %s' % i).save() + User(name="user %s" % i).save() Group(name="Test", members=User.objects).save() @@ -1222,6 +1230,7 @@ class FieldTest(unittest.TestCase): def test_list_item_dereference_dref_true_save_doesnt_cause_extra_queries(self): """Ensure that DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -1233,7 +1242,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - User(name='user %s' % i).save() + User(name="user %s" % i).save() Group(name="Test", members=User.objects).save() @@ -1249,7 +1258,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 2) def test_generic_reference_save_doesnt_cause_extra_queries(self): - class UserA(Document): name = StringField() @@ -1270,9 +1278,9 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i).save() - b = UserB(name='User B %s' % i).save() - c = UserC(name='User C %s' % i).save() + a = UserA(name="User A %s" % i).save() + b = UserB(name="User B %s" % i).save() + c = UserC(name="User C %s" % i).save() members += [a, b, c] @@ -1292,7 +1300,7 @@ class FieldTest(unittest.TestCase): def test_objectid_reference_across_databases(self): # mongoenginetest - Is default connection alias from setUp() # Register Aliases - register_connection('testdb-1', 'mongoenginetest2') + register_connection("testdb-1", "mongoenginetest2") class User(Document): name = StringField() @@ -1311,16 +1319,17 @@ class FieldTest(unittest.TestCase): # Can't use query_counter across databases - so test the _data object book = Book.objects.first() - self.assertNotIsInstance(book._data['author'], User) + self.assertNotIsInstance(book._data["author"], User) book.select_related() - self.assertIsInstance(book._data['author'], User) + self.assertIsInstance(book._data["author"], User) def test_non_ascii_pk(self): """ Ensure that dbref conversion to string does not fail when non-ascii characters are used in primary key """ + class Brand(Document): title = StringField(max_length=255, primary_key=True) @@ -1341,7 +1350,7 @@ class FieldTest(unittest.TestCase): def test_dereferencing_embedded_listfield_referencefield(self): class Tag(Document): - meta = {'collection': 'tags'} + meta = {"collection": "tags"} name = StringField() class Post(EmbeddedDocument): @@ -1349,22 +1358,21 @@ class FieldTest(unittest.TestCase): tags = ListField(ReferenceField("Tag", dbref=True)) class Page(Document): - meta = {'collection': 'pages'} + meta = {"collection": "pages"} tags = ListField(ReferenceField("Tag", dbref=True)) posts = ListField(EmbeddedDocumentField(Post)) Tag.drop_collection() Page.drop_collection() - tag = Tag(name='test').save() - post = Post(body='test body', tags=[tag]) + tag = Tag(name="test").save() + post = Post(body="test body", tags=[tag]) Page(tags=[tag], posts=[post]).save() page = Page.objects.first() self.assertEqual(page.tags[0], page.posts[0].tags[0]) def test_select_related_follows_embedded_referencefields(self): - class Song(Document): title = StringField() @@ -1390,5 +1398,5 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 2) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_replicaset_connection.py b/tests/test_replicaset_connection.py index cacdce8b..5e3aa493 100644 --- a/tests/test_replicaset_connection.py +++ b/tests/test_replicaset_connection.py @@ -12,7 +12,6 @@ READ_PREF = ReadPreference.SECONDARY class ConnectionTest(unittest.TestCase): - def setUp(self): mongoengine.connection._connection_settings = {} mongoengine.connection._connections = {} @@ -28,9 +27,11 @@ class ConnectionTest(unittest.TestCase): """ try: - conn = mongoengine.connect(db='mongoenginetest', - host="mongodb://localhost/mongoenginetest?replicaSet=rs", - read_preference=READ_PREF) + conn = mongoengine.connect( + db="mongoenginetest", + host="mongodb://localhost/mongoenginetest?replicaSet=rs", + read_preference=READ_PREF, + ) except MongoEngineConnectionError as e: return @@ -41,5 +42,5 @@ class ConnectionTest(unittest.TestCase): self.assertEqual(conn.read_preference, READ_PREF) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_signals.py b/tests/test_signals.py index 34cb43c3..1d0607d7 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -20,7 +20,7 @@ class SignalTests(unittest.TestCase): return signal_output def setUp(self): - connect(db='mongoenginetest') + connect(db="mongoenginetest") class Author(Document): # Make the id deterministic for easier testing @@ -32,60 +32,63 @@ class SignalTests(unittest.TestCase): @classmethod def pre_init(cls, sender, document, *args, **kwargs): - signal_output.append('pre_init signal, %s' % cls.__name__) - signal_output.append(kwargs['values']) + signal_output.append("pre_init signal, %s" % cls.__name__) + signal_output.append(kwargs["values"]) @classmethod def post_init(cls, sender, document, **kwargs): - signal_output.append('post_init signal, %s, document._created = %s' % (document, document._created)) + signal_output.append( + "post_init signal, %s, document._created = %s" + % (document, document._created) + ) @classmethod def pre_save(cls, sender, document, **kwargs): - signal_output.append('pre_save signal, %s' % document) + signal_output.append("pre_save signal, %s" % document) signal_output.append(kwargs) @classmethod def pre_save_post_validation(cls, sender, document, **kwargs): - signal_output.append('pre_save_post_validation signal, %s' % document) - if kwargs.pop('created', False): - signal_output.append('Is created') + signal_output.append("pre_save_post_validation signal, %s" % document) + if kwargs.pop("created", False): + signal_output.append("Is created") else: - signal_output.append('Is updated') + signal_output.append("Is updated") signal_output.append(kwargs) @classmethod def post_save(cls, sender, document, **kwargs): dirty_keys = document._delta()[0].keys() + document._delta()[1].keys() - signal_output.append('post_save signal, %s' % document) - signal_output.append('post_save dirty keys, %s' % dirty_keys) - if kwargs.pop('created', False): - signal_output.append('Is created') + signal_output.append("post_save signal, %s" % document) + signal_output.append("post_save dirty keys, %s" % dirty_keys) + if kwargs.pop("created", False): + signal_output.append("Is created") else: - signal_output.append('Is updated') + signal_output.append("Is updated") signal_output.append(kwargs) @classmethod def pre_delete(cls, sender, document, **kwargs): - signal_output.append('pre_delete signal, %s' % document) + signal_output.append("pre_delete signal, %s" % document) signal_output.append(kwargs) @classmethod def post_delete(cls, sender, document, **kwargs): - signal_output.append('post_delete signal, %s' % document) + signal_output.append("post_delete signal, %s" % document) signal_output.append(kwargs) @classmethod def pre_bulk_insert(cls, sender, documents, **kwargs): - signal_output.append('pre_bulk_insert signal, %s' % documents) + signal_output.append("pre_bulk_insert signal, %s" % documents) signal_output.append(kwargs) @classmethod def post_bulk_insert(cls, sender, documents, **kwargs): - signal_output.append('post_bulk_insert signal, %s' % documents) - if kwargs.pop('loaded', False): - signal_output.append('Is loaded') + signal_output.append("post_bulk_insert signal, %s" % documents) + if kwargs.pop("loaded", False): + signal_output.append("Is loaded") else: - signal_output.append('Not loaded') + signal_output.append("Not loaded") signal_output.append(kwargs) self.Author = Author @@ -101,12 +104,12 @@ class SignalTests(unittest.TestCase): @classmethod def pre_delete(cls, sender, document, **kwargs): - signal_output.append('pre_delete signal, %s' % document) + signal_output.append("pre_delete signal, %s" % document) signal_output.append(kwargs) @classmethod def post_delete(cls, sender, document, **kwargs): - signal_output.append('post_delete signal, %s' % document) + signal_output.append("post_delete signal, %s" % document) signal_output.append(kwargs) self.Another = Another @@ -117,11 +120,11 @@ class SignalTests(unittest.TestCase): @classmethod def post_save(cls, sender, document, **kwargs): - if 'created' in kwargs: - if kwargs['created']: - signal_output.append('Is created') + if "created" in kwargs: + if kwargs["created"]: + signal_output.append("Is created") else: - signal_output.append('Is updated') + signal_output.append("Is updated") self.ExplicitId = ExplicitId ExplicitId.drop_collection() @@ -136,9 +139,13 @@ class SignalTests(unittest.TestCase): @classmethod def pre_bulk_insert(cls, sender, documents, **kwargs): - signal_output.append('pre_bulk_insert signal, %s' % - [(doc, {'active': documents[n].active}) - for n, doc in enumerate(documents)]) + signal_output.append( + "pre_bulk_insert signal, %s" + % [ + (doc, {"active": documents[n].active}) + for n, doc in enumerate(documents) + ] + ) # make changes here, this is just an example - # it could be anything that needs pre-validation or looks-ups before bulk bulk inserting @@ -149,13 +156,17 @@ class SignalTests(unittest.TestCase): @classmethod def post_bulk_insert(cls, sender, documents, **kwargs): - signal_output.append('post_bulk_insert signal, %s' % - [(doc, {'active': documents[n].active}) - for n, doc in enumerate(documents)]) - if kwargs.pop('loaded', False): - signal_output.append('Is loaded') + signal_output.append( + "post_bulk_insert signal, %s" + % [ + (doc, {"active": documents[n].active}) + for n, doc in enumerate(documents) + ] + ) + if kwargs.pop("loaded", False): + signal_output.append("Is loaded") else: - signal_output.append('Not loaded') + signal_output.append("Not loaded") signal_output.append(kwargs) self.Post = Post @@ -178,7 +189,9 @@ class SignalTests(unittest.TestCase): signals.pre_init.connect(Author.pre_init, sender=Author) signals.post_init.connect(Author.post_init, sender=Author) signals.pre_save.connect(Author.pre_save, sender=Author) - signals.pre_save_post_validation.connect(Author.pre_save_post_validation, sender=Author) + signals.pre_save_post_validation.connect( + Author.pre_save_post_validation, sender=Author + ) signals.post_save.connect(Author.post_save, sender=Author) signals.pre_delete.connect(Author.pre_delete, sender=Author) signals.post_delete.connect(Author.post_delete, sender=Author) @@ -199,7 +212,9 @@ class SignalTests(unittest.TestCase): signals.post_delete.disconnect(self.Author.post_delete) signals.pre_delete.disconnect(self.Author.pre_delete) signals.post_save.disconnect(self.Author.post_save) - signals.pre_save_post_validation.disconnect(self.Author.pre_save_post_validation) + signals.pre_save_post_validation.disconnect( + self.Author.pre_save_post_validation + ) signals.pre_save.disconnect(self.Author.pre_save) signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert) signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert) @@ -236,203 +251,236 @@ class SignalTests(unittest.TestCase): """ Model saves should throw some signals. """ def create_author(): - self.Author(name='Bill Shakespeare') + self.Author(name="Bill Shakespeare") def bulk_create_author_with_load(): - a1 = self.Author(name='Bill Shakespeare') + a1 = self.Author(name="Bill Shakespeare") self.Author.objects.insert([a1], load_bulk=True) def bulk_create_author_without_load(): - a1 = self.Author(name='Bill Shakespeare') + a1 = self.Author(name="Bill Shakespeare") self.Author.objects.insert([a1], load_bulk=False) def load_existing_author(): - a = self.Author(name='Bill Shakespeare') + a = self.Author(name="Bill Shakespeare") a.save() self.get_signal_output(lambda: None) # eliminate signal output - a1 = self.Author.objects(name='Bill Shakespeare')[0] + a1 = self.Author.objects(name="Bill Shakespeare")[0] - self.assertEqual(self.get_signal_output(create_author), [ - "pre_init signal, Author", - {'name': 'Bill Shakespeare'}, - "post_init signal, Bill Shakespeare, document._created = True", - ]) + self.assertEqual( + self.get_signal_output(create_author), + [ + "pre_init signal, Author", + {"name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = True", + ], + ) - a1 = self.Author(name='Bill Shakespeare') - self.assertEqual(self.get_signal_output(a1.save), [ - "pre_save signal, Bill Shakespeare", - {}, - "pre_save_post_validation signal, Bill Shakespeare", - "Is created", - {}, - "post_save signal, Bill Shakespeare", - "post_save dirty keys, ['name']", - "Is created", - {} - ]) + a1 = self.Author(name="Bill Shakespeare") + self.assertEqual( + self.get_signal_output(a1.save), + [ + "pre_save signal, Bill Shakespeare", + {}, + "pre_save_post_validation signal, Bill Shakespeare", + "Is created", + {}, + "post_save signal, Bill Shakespeare", + "post_save dirty keys, ['name']", + "Is created", + {}, + ], + ) a1.reload() - a1.name = 'William Shakespeare' - self.assertEqual(self.get_signal_output(a1.save), [ - "pre_save signal, William Shakespeare", - {}, - "pre_save_post_validation signal, William Shakespeare", - "Is updated", - {}, - "post_save signal, William Shakespeare", - "post_save dirty keys, ['name']", - "Is updated", - {} - ]) + a1.name = "William Shakespeare" + self.assertEqual( + self.get_signal_output(a1.save), + [ + "pre_save signal, William Shakespeare", + {}, + "pre_save_post_validation signal, William Shakespeare", + "Is updated", + {}, + "post_save signal, William Shakespeare", + "post_save dirty keys, ['name']", + "Is updated", + {}, + ], + ) - self.assertEqual(self.get_signal_output(a1.delete), [ - 'pre_delete signal, William Shakespeare', - {}, - 'post_delete signal, William Shakespeare', - {} - ]) + self.assertEqual( + self.get_signal_output(a1.delete), + [ + "pre_delete signal, William Shakespeare", + {}, + "post_delete signal, William Shakespeare", + {}, + ], + ) - self.assertEqual(self.get_signal_output(load_existing_author), [ - "pre_init signal, Author", - {'id': 2, 'name': 'Bill Shakespeare'}, - "post_init signal, Bill Shakespeare, document._created = False" - ]) + self.assertEqual( + self.get_signal_output(load_existing_author), + [ + "pre_init signal, Author", + {"id": 2, "name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = False", + ], + ) - self.assertEqual(self.get_signal_output(bulk_create_author_with_load), [ - 'pre_init signal, Author', - {'name': 'Bill Shakespeare'}, - 'post_init signal, Bill Shakespeare, document._created = True', - 'pre_bulk_insert signal, []', - {}, - 'pre_init signal, Author', - {'id': 3, 'name': 'Bill Shakespeare'}, - 'post_init signal, Bill Shakespeare, document._created = False', - 'post_bulk_insert signal, []', - 'Is loaded', - {} - ]) + self.assertEqual( + self.get_signal_output(bulk_create_author_with_load), + [ + "pre_init signal, Author", + {"name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = True", + "pre_bulk_insert signal, []", + {}, + "pre_init signal, Author", + {"id": 3, "name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = False", + "post_bulk_insert signal, []", + "Is loaded", + {}, + ], + ) - self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [ - "pre_init signal, Author", - {'name': 'Bill Shakespeare'}, - "post_init signal, Bill Shakespeare, document._created = True", - "pre_bulk_insert signal, []", - {}, - "post_bulk_insert signal, []", - "Not loaded", - {} - ]) + self.assertEqual( + self.get_signal_output(bulk_create_author_without_load), + [ + "pre_init signal, Author", + {"name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = True", + "pre_bulk_insert signal, []", + {}, + "post_bulk_insert signal, []", + "Not loaded", + {}, + ], + ) def test_signal_kwargs(self): """ Make sure signal_kwargs is passed to signals calls. """ def live_and_let_die(): - a = self.Author(name='Bill Shakespeare') - a.save(signal_kwargs={'live': True, 'die': False}) - a.delete(signal_kwargs={'live': False, 'die': True}) + a = self.Author(name="Bill Shakespeare") + a.save(signal_kwargs={"live": True, "die": False}) + a.delete(signal_kwargs={"live": False, "die": True}) - self.assertEqual(self.get_signal_output(live_and_let_die), [ - "pre_init signal, Author", - {'name': 'Bill Shakespeare'}, - "post_init signal, Bill Shakespeare, document._created = True", - "pre_save signal, Bill Shakespeare", - {'die': False, 'live': True}, - "pre_save_post_validation signal, Bill Shakespeare", - "Is created", - {'die': False, 'live': True}, - "post_save signal, Bill Shakespeare", - "post_save dirty keys, ['name']", - "Is created", - {'die': False, 'live': True}, - 'pre_delete signal, Bill Shakespeare', - {'die': True, 'live': False}, - 'post_delete signal, Bill Shakespeare', - {'die': True, 'live': False} - ]) + self.assertEqual( + self.get_signal_output(live_and_let_die), + [ + "pre_init signal, Author", + {"name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = True", + "pre_save signal, Bill Shakespeare", + {"die": False, "live": True}, + "pre_save_post_validation signal, Bill Shakespeare", + "Is created", + {"die": False, "live": True}, + "post_save signal, Bill Shakespeare", + "post_save dirty keys, ['name']", + "Is created", + {"die": False, "live": True}, + "pre_delete signal, Bill Shakespeare", + {"die": True, "live": False}, + "post_delete signal, Bill Shakespeare", + {"die": True, "live": False}, + ], + ) def bulk_create_author(): - a1 = self.Author(name='Bill Shakespeare') - self.Author.objects.insert([a1], signal_kwargs={'key': True}) + a1 = self.Author(name="Bill Shakespeare") + self.Author.objects.insert([a1], signal_kwargs={"key": True}) - self.assertEqual(self.get_signal_output(bulk_create_author), [ - 'pre_init signal, Author', - {'name': 'Bill Shakespeare'}, - 'post_init signal, Bill Shakespeare, document._created = True', - 'pre_bulk_insert signal, []', - {'key': True}, - 'pre_init signal, Author', - {'id': 2, 'name': 'Bill Shakespeare'}, - 'post_init signal, Bill Shakespeare, document._created = False', - 'post_bulk_insert signal, []', - 'Is loaded', - {'key': True} - ]) + self.assertEqual( + self.get_signal_output(bulk_create_author), + [ + "pre_init signal, Author", + {"name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = True", + "pre_bulk_insert signal, []", + {"key": True}, + "pre_init signal, Author", + {"id": 2, "name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = False", + "post_bulk_insert signal, []", + "Is loaded", + {"key": True}, + ], + ) def test_queryset_delete_signals(self): """ Queryset delete should throw some signals. """ - self.Another(name='Bill Shakespeare').save() - self.assertEqual(self.get_signal_output(self.Another.objects.delete), [ - 'pre_delete signal, Bill Shakespeare', - {}, - 'post_delete signal, Bill Shakespeare', - {} - ]) + self.Another(name="Bill Shakespeare").save() + self.assertEqual( + self.get_signal_output(self.Another.objects.delete), + [ + "pre_delete signal, Bill Shakespeare", + {}, + "post_delete signal, Bill Shakespeare", + {}, + ], + ) def test_signals_with_explicit_doc_ids(self): """ Model saves must have a created flag the first time.""" ei = self.ExplicitId(id=123) # post save must received the created flag, even if there's already # an object id present - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) # second time, it must be an update - self.assertEqual(self.get_signal_output(ei.save), ['Is updated']) + self.assertEqual(self.get_signal_output(ei.save), ["Is updated"]) def test_signals_with_switch_collection(self): ei = self.ExplicitId(id=123) ei.switch_collection("explicit__1") - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) ei.switch_collection("explicit__1") - self.assertEqual(self.get_signal_output(ei.save), ['Is updated']) + self.assertEqual(self.get_signal_output(ei.save), ["Is updated"]) ei.switch_collection("explicit__1", keep_created=False) - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) ei.switch_collection("explicit__1", keep_created=False) - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) def test_signals_with_switch_db(self): - connect('mongoenginetest') - register_connection('testdb-1', 'mongoenginetest2') + connect("mongoenginetest") + register_connection("testdb-1", "mongoenginetest2") ei = self.ExplicitId(id=123) ei.switch_db("testdb-1") - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) ei.switch_db("testdb-1") - self.assertEqual(self.get_signal_output(ei.save), ['Is updated']) + self.assertEqual(self.get_signal_output(ei.save), ["Is updated"]) ei.switch_db("testdb-1", keep_created=False) - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) ei.switch_db("testdb-1", keep_created=False) - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) def test_signals_bulk_insert(self): def bulk_set_active_post(): posts = [ - self.Post(title='Post 1'), - self.Post(title='Post 2'), - self.Post(title='Post 3') + self.Post(title="Post 1"), + self.Post(title="Post 2"), + self.Post(title="Post 3"), ] self.Post.objects.insert(posts) results = self.get_signal_output(bulk_set_active_post) - self.assertEqual(results, [ - "pre_bulk_insert signal, [(, {'active': False}), (, {'active': False}), (, {'active': False})]", - {}, - "post_bulk_insert signal, [(, {'active': True}), (, {'active': True}), (, {'active': True})]", - 'Is loaded', - {} - ]) + self.assertEqual( + results, + [ + "pre_bulk_insert signal, [(, {'active': False}), (, {'active': False}), (, {'active': False})]", + {}, + "post_bulk_insert signal, [(, {'active': True}), (, {'active': True}), (, {'active': True})]", + "Is loaded", + {}, + ], + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_utils.py b/tests/test_utils.py index 562cc1ff..2d1e8b00 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -7,32 +7,33 @@ signal_output = [] class LazyRegexCompilerTest(unittest.TestCase): - def test_lazy_regex_compiler_verify_laziness_of_descriptor(self): class UserEmail(object): - EMAIL_REGEX = LazyRegexCompiler('@', flags=32) + EMAIL_REGEX = LazyRegexCompiler("@", flags=32) - descriptor = UserEmail.__dict__['EMAIL_REGEX'] + descriptor = UserEmail.__dict__["EMAIL_REGEX"] self.assertIsNone(descriptor._compiled_regex) regex = UserEmail.EMAIL_REGEX - self.assertEqual(regex, re.compile('@', flags=32)) - self.assertEqual(regex.search('user@domain.com').group(), '@') + self.assertEqual(regex, re.compile("@", flags=32)) + self.assertEqual(regex.search("user@domain.com").group(), "@") user_email = UserEmail() self.assertIs(user_email.EMAIL_REGEX, UserEmail.EMAIL_REGEX) def test_lazy_regex_compiler_verify_cannot_set_descriptor_on_instance(self): class UserEmail(object): - EMAIL_REGEX = LazyRegexCompiler('@') + EMAIL_REGEX = LazyRegexCompiler("@") user_email = UserEmail() with self.assertRaises(AttributeError): - user_email.EMAIL_REGEX = re.compile('@') + user_email.EMAIL_REGEX = re.compile("@") def test_lazy_regex_compiler_verify_can_override_class_attr(self): class UserEmail(object): - EMAIL_REGEX = LazyRegexCompiler('@') + EMAIL_REGEX = LazyRegexCompiler("@") - UserEmail.EMAIL_REGEX = re.compile('cookies') - self.assertEqual(UserEmail.EMAIL_REGEX.search('Cake & cookies').group(), 'cookies') + UserEmail.EMAIL_REGEX = re.compile("cookies") + self.assertEqual( + UserEmail.EMAIL_REGEX.search("Cake & cookies").group(), "cookies" + ) diff --git a/tests/utils.py b/tests/utils.py index 27d5ada7..eb3f016f 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,7 +8,7 @@ from mongoengine.connection import get_db, disconnect_all from mongoengine.mongodb_support import get_mongodb_version -MONGO_TEST_DB = 'mongoenginetest' # standard name for the test database +MONGO_TEST_DB = "mongoenginetest" # standard name for the test database class MongoDBTestCase(unittest.TestCase): @@ -53,12 +53,15 @@ def _decorated_with_ver_requirement(func, mongo_version_req, oper): :param mongo_version_req: The mongodb version requirement (tuple(int, int)) :param oper: The operator to apply (e.g: operator.ge) """ + def _inner(*args, **kwargs): mongodb_v = get_mongodb_version() if oper(mongodb_v, mongo_version_req): return func(*args, **kwargs) - raise SkipTest('Needs MongoDB v{}+'.format('.'.join(str(n) for n in mongo_version_req))) + raise SkipTest( + "Needs MongoDB v{}+".format(".".join(str(n) for n in mongo_version_req)) + ) _inner.__name__ = func.__name__ _inner.__doc__ = func.__doc__ From 82f0eb1cbc7b068b643df690680cd1dd5424f529 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20W=C3=B3jcik?= Date: Thu, 27 Jun 2019 15:07:02 +0200 Subject: [PATCH 018/216] Add a max_length param to the ListField (#2107) This is similar to the `max_length` param of a `StringField`. Sometimes you don't want your lists to be able to grow indefinitely. --- mongoengine/fields.py | 15 ++++++++++++++- tests/fields/fields.py | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 7ab2276d..9b9fef6e 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -916,8 +916,9 @@ class ListField(ComplexBaseField): Required means it cannot be empty - as the default for ListFields is [] """ - def __init__(self, field=None, **kwargs): + def __init__(self, field=None, max_length=None, **kwargs): self.field = field + self.max_length = max_length kwargs.setdefault("default", lambda: []) super(ListField, self).__init__(**kwargs) @@ -939,9 +940,21 @@ class ListField(ComplexBaseField): """Make sure that a list of valid fields is being used.""" if not isinstance(value, (list, tuple, BaseQuerySet)): self.error("Only lists and tuples may be used in a list field") + + # Validate that max_length is not exceeded. + # NOTE It's still possible to bypass this enforcement by using $push. + # However, if the document is reloaded after $push and then re-saved, + # the validation error will be raised. + if self.max_length is not None and len(value) > self.max_length: + self.error("List is too long") + super(ListField, self).validate(value) def prepare_query_value(self, op, value): + # Validate that the `set` operator doesn't contain more items than `max_length`. + if op == "set" and self.max_length is not None and len(value) > self.max_length: + self.error("List is too long") + if self.field: # If the value is iterable and it's not a string nor a diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 87acf27f..b77ba753 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -1010,6 +1010,38 @@ class FieldTest(MongoDBTestCase): e.mapping = ["abc"] e.save() + def test_list_field_max_length(self): + """Ensure ListField's max_length is respected.""" + + class Foo(Document): + items = ListField(IntField(), max_length=5) + + foo = Foo() + for i in range(1, 7): + foo.items.append(i) + if i < 6: + foo.save() + else: + with self.assertRaises(ValidationError) as cm: + foo.save() + self.assertIn("List is too long", str(cm.exception)) + + def test_list_field_max_length(self): + """Ensure ListField's max_length is respected.""" + + class Foo(Document): + items = ListField(IntField(), max_length=5) + + foo = Foo() + for i in range(1, 7): + foo.items.append(i) + if i < 6: + foo.save() + else: + with self.assertRaises(ValidationError) as cm: + foo.save() + self.assertIn("List is too long", str(cm.exception)) + def test_list_field_rejects_strings(self): """Strings aren't valid list field data types.""" From 609f50d26191c1f2541c241de341985cc85427ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20W=C3=B3jcik?= Date: Thu, 27 Jun 2019 16:45:31 +0200 Subject: [PATCH 019/216] Fix the duplicate ListField max_length test (#2110) This is a follow-up after #2107. Not sure what happened here, but in https://github.com/MongoEngine/mongoengine/pull/2107/commits/87194856ecc5076bec2a186bae60c5d5b25c01ed I committed a copy-paste of the same test instead of a test validating the max_length behavior along with a "set" operator. --- tests/fields/fields.py | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/tests/fields/fields.py b/tests/fields/fields.py index b77ba753..49e9508c 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -1026,21 +1026,16 @@ class FieldTest(MongoDBTestCase): foo.save() self.assertIn("List is too long", str(cm.exception)) - def test_list_field_max_length(self): - """Ensure ListField's max_length is respected.""" + def test_list_field_max_length_set_operator(self): + """Ensure ListField's max_length is respected for a "set" operator.""" class Foo(Document): - items = ListField(IntField(), max_length=5) + items = ListField(IntField(), max_length=3) - foo = Foo() - for i in range(1, 7): - foo.items.append(i) - if i < 6: - foo.save() - else: - with self.assertRaises(ValidationError) as cm: - foo.save() - self.assertIn("List is too long", str(cm.exception)) + foo = Foo.objects.create(items=[1, 2, 3]) + with self.assertRaises(ValidationError) as cm: + foo.modify(set__items=[1, 2, 3, 4]) + self.assertIn("List is too long", str(cm.exception)) def test_list_field_rejects_strings(self): """Strings aren't valid list field data types.""" From 2769967e1e3390d1c99bd666f846a3fbaa0061c2 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Thu, 27 Jun 2019 17:41:29 +0200 Subject: [PATCH 020/216] Update the changelog [ci skip] --- docs/changelog.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b30bd52f..d053a8db 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,8 +6,10 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). -- BREAKING CHANGE: Drop support for positional arguments when instantiating a document. #2103 +- BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 - From now on keyword arguments (e.g. `Doc(field_name=value)`) are required. +- The codebase is now formatted using `black`. #2109 +- `ListField` now accepts an optional `max_length` parameter. #2110 Changes in 0.18.2 ================= From 9170eea784e7d7ac344779f818672a572a7258aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20W=C3=B3jcik?= Date: Sun, 30 Jun 2019 09:23:32 +0200 Subject: [PATCH 021/216] Rename MongoEngineConnectionError to ConnectionFailure (#2111) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I originally changed the exception name from `ConnectionError` to `MongoEngineConnectionError` in https://github.com/MongoEngine/mongoengine/pull/1428/commits/b02904ee750a30f8e2246a326376b40358543101, inspired by landscape.io's package health report, which argued that `ConnectionError` is already a built-in exception in Python 3 (which it is: https://docs.python.org/3/library/exceptions.html#ConnectionError). I do agree that we shouldn't override built-in exceptions. [0] That said, it’s silly to add a "MongoEngine" prefix to any class within the `mongoengine` module (and *especially* to *just one* exception class out of many). I've decided to do what PyMongo does ( https://github.com/mongodb/mongo-python-driver/blob/8855a510a80a30268ffd4b90be65fb26929648e2/pymongo/errors.py#L59) and call this exception `ConnectionFailure`. Note that this is a breaking change and people will need to rename `MongoEngineConnectionError`s in their code to `ConnectionFailure`. Moreover, if they use PyMongo's `ConnectionFailure` for anything, they'll need to take extra care to avoid conflicts, e.g. by using: ``` from mongoengine import ConnectionFailure as MongoEngineConnectionFailure ``` [0] Note that some popular packages still overwrite `ConnectionError`, e.g. https://github.com/kennethreitz/requests/blob/4983a9bde39c6320aa4f3e34e50dac6e263dab6f/requests/exceptions.py#L32 or https://github.com/andymccurdy/redis-py/blob/0be4d2920684345eb52115c7142c39d65356e7d4/redis/exceptions.py#L8 --- docs/changelog.rst | 2 ++ mongoengine/connection.py | 16 +++++++--------- tests/test_connection.py | 22 +++++++++++----------- tests/test_replicaset_connection.py | 4 ++-- 4 files changed, 22 insertions(+), 22 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index d053a8db..7eac72df 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,6 +6,8 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). +- BREAKING CHANGE: Renamed `MongoEngineConnectionError` to `ConnectionFailure` #2111 + - If you catch/use `MongoEngineConnectionError` in your code, you'll have to rename it. - BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 - From now on keyword arguments (e.g. `Doc(field_name=value)`) are required. - The codebase is now formatted using `black`. #2109 diff --git a/mongoengine/connection.py b/mongoengine/connection.py index ef0dd27c..b1e12a96 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -5,7 +5,7 @@ import six __all__ = [ "DEFAULT_CONNECTION_NAME", "DEFAULT_DATABASE_NAME", - "MongoEngineConnectionError", + "ConnectionFailure", "connect", "disconnect", "disconnect_all", @@ -27,7 +27,7 @@ _dbs = {} READ_PREFERENCE = ReadPreference.PRIMARY -class MongoEngineConnectionError(Exception): +class ConnectionFailure(Exception): """Error raised when the database connection can't be established or when a connection with a requested alias can't be retrieved. """ @@ -252,13 +252,13 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): return _connections[alias] # Validate that the requested alias exists in the _connection_settings. - # Raise MongoEngineConnectionError if it doesn't. + # Raise ConnectionFailure if it doesn't. if alias not in _connection_settings: if alias == DEFAULT_CONNECTION_NAME: msg = "You have not defined a default connection" else: msg = 'Connection with alias "%s" has not been defined' % alias - raise MongoEngineConnectionError(msg) + raise ConnectionFailure(msg) def _clean_settings(settings_dict): irrelevant_fields_set = { @@ -305,14 +305,12 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): def _create_connection(alias, connection_class, **connection_settings): """ Create the new connection for this alias. Raise - MongoEngineConnectionError if it can't be established. + ConnectionFailure if it can't be established. """ try: return connection_class(**connection_settings) except Exception as e: - raise MongoEngineConnectionError( - "Cannot connect to database %s :\n%s" % (alias, e) - ) + raise ConnectionFailure("Cannot connect to database %s :\n%s" % (alias, e)) def _find_existing_connection(connection_settings): @@ -393,7 +391,7 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): u"A different connection with alias `{}` was already " u"registered. Use disconnect() first" ).format(alias) - raise MongoEngineConnectionError(err_msg) + raise ConnectionFailure(err_msg) else: register_connection(alias, db, **kwargs) diff --git a/tests/test_connection.py b/tests/test_connection.py index 25007132..f9c9d098 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -23,7 +23,7 @@ from mongoengine import ( ) import mongoengine.connection from mongoengine.connection import ( - MongoEngineConnectionError, + ConnectionFailure, get_db, get_connection, disconnect, @@ -92,10 +92,10 @@ class ConnectionTest(unittest.TestCase): disconnect("db1") disconnect("db2") - with self.assertRaises(MongoEngineConnectionError): + with self.assertRaises(ConnectionFailure): list(History1.objects().as_pymongo()) - with self.assertRaises(MongoEngineConnectionError): + with self.assertRaises(ConnectionFailure): list(History2.objects().as_pymongo()) connect("db1", alias="db1") @@ -149,7 +149,7 @@ class ConnectionTest(unittest.TestCase): def test_connect_fails_if_connect_2_times_with_default_alias(self): connect("mongoenginetest") - with self.assertRaises(MongoEngineConnectionError) as ctx_err: + with self.assertRaises(ConnectionFailure) as ctx_err: connect("mongoenginetest2") self.assertEqual( "A different connection with alias `default` was already registered. Use disconnect() first", @@ -159,7 +159,7 @@ class ConnectionTest(unittest.TestCase): def test_connect_fails_if_connect_2_times_with_custom_alias(self): connect("mongoenginetest", alias="alias1") - with self.assertRaises(MongoEngineConnectionError) as ctx_err: + with self.assertRaises(ConnectionFailure) as ctx_err: connect("mongoenginetest2", alias="alias1") self.assertEqual( @@ -175,7 +175,7 @@ class ConnectionTest(unittest.TestCase): db_alias = "alias1" connect(db=db_name, alias=db_alias, host="localhost", port=27017) - with self.assertRaises(MongoEngineConnectionError): + with self.assertRaises(ConnectionFailure): connect(host="mongodb://localhost:27017/%s" % db_name, alias=db_alias) def test_connect_passes_silently_connect_multiple_times_with_same_config(self): @@ -353,7 +353,7 @@ class ConnectionTest(unittest.TestCase): self.assertIsNone(History._collection) - with self.assertRaises(MongoEngineConnectionError) as ctx_err: + with self.assertRaises(ConnectionFailure) as ctx_err: History.objects.first() self.assertEqual( "You have not defined a default connection", str(ctx_err.exception) @@ -379,7 +379,7 @@ class ConnectionTest(unittest.TestCase): disconnect() # Make sure save doesnt work at this stage - with self.assertRaises(MongoEngineConnectionError): + with self.assertRaises(ConnectionFailure): User(name="Wont work").save() # Save in db2 @@ -433,10 +433,10 @@ class ConnectionTest(unittest.TestCase): self.assertEqual(len(dbs), 0) self.assertEqual(len(connection_settings), 0) - with self.assertRaises(MongoEngineConnectionError): + with self.assertRaises(ConnectionFailure): History.objects.first() - with self.assertRaises(MongoEngineConnectionError): + with self.assertRaises(ConnectionFailure): History1.objects.first() def test_disconnect_all_silently_pass_if_no_connection_exist(self): @@ -557,7 +557,7 @@ class ConnectionTest(unittest.TestCase): """ register_connection("testdb", "mongoenginetest2") - self.assertRaises(MongoEngineConnectionError, get_connection) + self.assertRaises(ConnectionFailure, get_connection) conn = get_connection("testdb") self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) diff --git a/tests/test_replicaset_connection.py b/tests/test_replicaset_connection.py index 5e3aa493..6dfab407 100644 --- a/tests/test_replicaset_connection.py +++ b/tests/test_replicaset_connection.py @@ -4,7 +4,7 @@ from pymongo import ReadPreference from pymongo import MongoClient import mongoengine -from mongoengine.connection import MongoEngineConnectionError +from mongoengine.connection import ConnectionFailure CONN_CLASS = MongoClient @@ -32,7 +32,7 @@ class ConnectionTest(unittest.TestCase): host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=READ_PREF, ) - except MongoEngineConnectionError as e: + except ConnectionFailure as e: return if not isinstance(conn, CONN_CLASS): From 4d5eba317eb77eacc2a0974ee0995acbcec6d6aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 30 Jun 2019 20:55:19 +0200 Subject: [PATCH 022/216] convert travis test on latest python/mongo/pymongo from python 3.6 to 3.7 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index cf66da6a..9d0b5b4b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -41,7 +41,7 @@ matrix: include: - python: 2.7 env: MONGODB=${MONGODB_3_4} PYMONGO=3.4.x - - python: 3.6 + - python: 3.7 env: MONGODB=${MONGODB_3_6} PYMONGO=3.x From 06a21e038addd4ac05e36acb5f2bfa810cf59209 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 30 Jun 2019 21:03:13 +0200 Subject: [PATCH 023/216] Use global constants for mongo/pymongo in travis.yml --- .travis.yml | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 8af73c6b..0aba2ae8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -30,8 +30,10 @@ env: global: - MONGODB_3_4=3.4.17 - MONGODB_3_6=3.6.12 + - PYMONGO_3_X=3.x + - PYMONGO_3_4=3.4.x matrix: - - MONGODB=${MONGODB_3_4} PYMONGO=3.x + - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_X} matrix: @@ -40,11 +42,11 @@ matrix: include: - python: 2.7 - env: MONGODB=${MONGODB_3_4} PYMONGO=3.4.x + env: MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_4} - python: 3.6 - env: MONGODB=${MONGODB_3_6} PYMONGO=3.x + env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_X} - python: 3.7 - env: MONGODB=${MONGODB_3_6} PYMONGO=3.x + env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_X} install: @@ -105,5 +107,5 @@ deploy: on: tags: true repo: MongoEngine/mongoengine - condition: ($PYMONGO = 3.x) && ($MONGODB = 3.4.17) + condition: ($PYMONGO = ${PYMONGO_3_X}) && ($MONGODB = ${MONGODB_3_4}) python: 2.7 From d0b87f7f82579c28e19e6f665839b597e9a037a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20W=C3=B3jcik?= Date: Mon, 1 Jul 2019 10:18:47 +0200 Subject: [PATCH 024/216] Drop the deprecated "format" param from BaseQuerySet.explain (#2113) That option was pretty useless. You can very easily do: ``` import pprint (...) plan = SomeDoc.objects(...).explain() pprint.pformat(plan) ``` --- docs/changelog.rst | 1 + mongoengine/queryset/base.py | 19 ++----------------- 2 files changed, 3 insertions(+), 17 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 7eac72df..b9114eb5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,6 +6,7 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). +- BREAKING CHANGE: Removed the deprecated `format` param from `QuerySet.explain` #2113 - BREAKING CHANGE: Renamed `MongoEngineConnectionError` to `ConnectionFailure` #2111 - If you catch/use `MongoEngineConnectionError` in your code, you'll have to rename it. - BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 78e85399..42c4b927 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -2,7 +2,6 @@ from __future__ import absolute_import import copy import itertools -import pprint import re import warnings @@ -1109,25 +1108,11 @@ class BaseQuerySet(object): """ return self._chainable_method("comment", text) - def explain(self, format=False): + def explain(self): """Return an explain plan record for the :class:`~mongoengine.queryset.QuerySet`\ 's cursor. - - :param format: format the plan before returning it """ - plan = self._cursor.explain() - - # TODO remove this option completely - it's useless. If somebody - # wants to pretty-print the output, they easily can. - if format: - msg = ( - '"format" param of BaseQuerySet.explain has been ' - "deprecated and will be removed in future versions." - ) - warnings.warn(msg, DeprecationWarning) - plan = pprint.pformat(plan) - - return plan + return self._cursor.explain() # DEPRECATED. Has no more impact on PyMongo 3+ def snapshot(self, enabled): From de80f0ccff487f32b0ce171093f33c67e3a7d019 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Tue, 2 Jul 2019 18:26:48 +0200 Subject: [PATCH 025/216] Clean up the changelog [ci skip] Mostly making sure that code is formatted using backticks and that wording and punctuation are consistent. --- docs/changelog.rst | 294 ++++++++++++++++++++++----------------------- 1 file changed, 144 insertions(+), 150 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b9114eb5..4dcb7298 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -16,210 +16,204 @@ Development Changes in 0.18.2 ================= -- Replace some of the deprecated PyMongo v2.x methods with their v3.x equivalents #2097 -- Various code clarity and documentation improvements +- Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the `SequenceField` #2097 +- Various code clarity and documentation improvements. Changes in 0.18.1 ================= -- Fix a bug introduced in 0.18.0 which was causing `.save()` to update all the fields - instead of updating only the modified fields. This bug only occurs when using custom pk #2082 -- Add Python 3.7 in travis #2058 +- Fix a bug introduced in 0.18.0 which was causing `.save()` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082 +- Add Python 3.7 to Travis CI. #2058 Changes in 0.18.0 ================= - Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2. -- MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6 (#2017 #2066). -- Improve performance by avoiding a call to `to_mongo` in `Document.save()` #2049 +- MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6. #2017 #2066 +- Improve performance by avoiding a call to `to_mongo` in `Document.save()`. #2049 - Connection/disconnection improvements: - - Expose `mongoengine.connection.disconnect` and `mongoengine.connection.disconnect_all` - - Fix disconnecting #566 #1599 #605 #607 #1213 #565 - - Improve documentation of `connect`/`disconnect` - - Fix issue when using multiple connections to the same mongo with different credentials #2047 - - `connect` fails immediately when db name contains invalid characters #2031 #1718 -- Fix the default write concern of `Document.save` that was overwriting the connection write concern #568 -- Fix querying on `List(EmbeddedDocument)` subclasses fields #1961 #1492 -- Fix querying on `(Generic)EmbeddedDocument` subclasses fields #475 -- Fix `QuerySet.aggregate` so that it takes limit and skip value into account #2029 -- Generate unique indices for `SortedListField` and `EmbeddedDocumentListFields` #2020 -- BREAKING CHANGE: Changed the behavior of a custom field validator (i.e `validation` parameter of a `Field`). It is now expected to raise a `ValidationError` instead of returning True/False #2050 -- BREAKING CHANGES (associated with connect/disconnect fixes): + - Expose `mongoengine.connection.disconnect` and `mongoengine.connection.disconnect_all`. + - Fix disconnecting. #566 #1599 #605 #607 #1213 #565 + - Improve documentation of `connect`/`disconnect`. + - Fix issue when using multiple connections to the same mongo with different credentials. #2047 + - `connect` fails immediately when db name contains invalid characters. #2031 #1718 +- Fix the default write concern of `Document.save` that was overwriting the connection write concern. #568 +- Fix querying on `List(EmbeddedDocument)` subclasses fields. #1961 #1492 +- Fix querying on `(Generic)EmbeddedDocument` subclasses fields. #475 +- Fix `QuerySet.aggregate` so that it takes limit and skip value into account. #2029 +- Generate unique indices for `SortedListField` and `EmbeddedDocumentListFields`. #2020 +- BREAKING CHANGE: Changed the behavior of a custom field validator (i.e `validation` parameter of a `Field`). It is now expected to raise a `ValidationError` instead of returning `True`/`False`. #2050 +- BREAKING CHANGES (associated with connection/disconnection fixes): - Calling `connect` 2 times with the same alias and different parameter will raise an error (should call `disconnect` first). - `disconnect` now clears `mongoengine.connection._connection_settings`. - `disconnect` now clears the cached attribute `Document._collection`. -- BREAKING CHANGE: `EmbeddedDocument.save` & `.reload` is no longier exist #1552 +- BREAKING CHANGE: `EmbeddedDocument.save` & `.reload` is no longer exist. #1552 Changes in 0.17.0 ================= -- Fix .only() working improperly after using .count() of the same instance of QuerySet -- Fix batch_size that was not copied when cloning a queryset object #2011 -- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (_cls, _id) when using `QuerySet.as_pymongo` #1976 -- Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time (#1995) -- Fix InvalidStringData error when using modify on a BinaryField #1127 -- DEPRECATION: `EmbeddedDocument.save` & `.reload` are marked as deprecated and will be removed in a next version of mongoengine #1552 -- Fix test suite and CI to support MongoDB 3.4 #1445 -- Fix reference fields querying the database on each access if value contains orphan DBRefs +- Fix `.only()` working improperly after using `.count()` of the same instance of a `QuerySet`. +- Fix `batch_size` that was not copied when cloning a `QuerySet` object. #2011 +- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (`_cls`, `_id`) when using `QuerySet.as_pymongo`. #1976 +- Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time. #1995 +- Fix `InvalidStringData` error when using `modify` on a `BinaryField`. #1127 +- DEPRECATION: `EmbeddedDocument.save` & `.reload` are marked as deprecated and will be removed in a next version of MongoEngine. #1552 +- Fix test suite and CI to support MongoDB v3.4. #1445 +- Fix reference fields querying the database on each access if value contains orphan DBRefs. ================= Changes in 0.16.3 ================= -- Fix $push with $position operator not working with lists in embedded document #1965 +- Fix `$push` with the `$position` operator not working with lists in embedded documents. #1965 ================= Changes in 0.16.2 ================= -- Fix .save() that fails when called with write_concern=None (regression of 0.16.1) #1958 +- Fix `.save()` that fails when called with `write_concern=None` (regression of 0.16.1). #1958 ================= Changes in 0.16.1 ================= -- Fix `_cls` that is not set properly in Document constructor (regression) #1950 -- Fix bug in _delta method - Update of a ListField depends on an unrelated dynamic field update #1733 -- Remove deprecated `save()` method and used `insert_one()` #1899 +- Fix `_cls` that is not set properly in the Document constructor (regression). #1950 +- Fix a bug in the `_delta` method - update of a ListField depends on an unrelated dynamic field update. #1733 +- Remove PyMongo's deprecated `save()` method and use `insert_one()` instead. #1899 ================= Changes in 0.16.0 ================= -- Various improvements to the doc -- Improvement to code quality - POTENTIAL BREAKING CHANGES: - - EmbeddedDocumentField will no longer accept references to Document classes in its constructor #1661 - - Get rid of the `basecls` parameter from the DictField constructor (dead code) #1876 - - default value of ComplexDateTime is now None (and no longer the current datetime) #1368 -- Fix unhashable TypeError when referencing a Document with a compound key in an EmbeddedDocument #1685 -- Fix bug where an EmbeddedDocument with the same id as its parent would not be tracked for changes #1768 -- Fix the fact that bulk `insert()` was not setting primary keys of inserted documents instances #1919 -- Fix bug when referencing the abstract class in a ReferenceField #1920 -- Allow modification to the document made in pre_save_post_validation to be taken into account #1202 -- Replaced MongoDB 2.4 tests in CI by MongoDB 3.2 #1903 -- Fix side effects of using queryset.`no_dereference` on other documents #1677 -- Fix TypeError when using lazy django translation objects as translated choices #1879 -- Improve 2-3 codebase compatibility #1889 -- Fix the support for changing the default value of ComplexDateTime #1368 -- Improves error message in case an EmbeddedDocumentListField receives an EmbeddedDocument instance - instead of a list #1877 -- Fix the Decimal operator inc/dec #1517 #1320 -- Ignore killcursors queries in `query_counter` context manager #1869 -- Fix the fact that `query_counter` was modifying the initial profiling_level in case it was != 0 #1870 -- Repaired the `no_sub_classes` context manager + fix the fact that it was swallowing exceptions #1865 -- Fix index creation error that was swallowed by hasattr under python2 #1688 -- QuerySet limit function behaviour: Passing 0 as parameter will return all the documents in the cursor #1611 -- bulk insert updates the ids of the input documents instances #1919 -- Fix an harmless bug related to GenericReferenceField where modifications in the generic-referenced document - were tracked in the parent #1934 -- Improve validator of BinaryField #273 -- Implemented lazy regex compiling in Field classes to improve 'import mongoengine' performance #1806 -- Updated GridFSProxy.__str__ so that it would always print both the filename and grid_id #710 -- Add __repr__ to Q and QCombination #1843 -- fix bug in BaseList.__iter__ operator (was occuring when modifying a BaseList while iterating over it) #1676 -- Added field `DateField`#513 + - `EmbeddedDocumentField` will no longer accept references to Document classes in its constructor. #1661 + - Get rid of the `basecls` parameter from the `DictField` constructor (dead code). #1876 + - Default value of the `ComplexDateTime` field is now `None` (and no longer the current datetime). #1368 +- Fix an unhashable `TypeError` when referencing a `Document` with a compound key in an `EmbeddedDocument`. #1685 +- Fix a bug where an `EmbeddedDocument` with the same id as its parent would not be tracked for changes. #1768 +- Fix the fact that bulk `insert()` was not setting primary keys of inserted document instances. #1919 +- Fix a bug when referencing the abstract class in a `ReferenceField`. #1920 +- Allow modification to the document made in `pre_save_post_validation` to be taken into account. #1202 +- Replaced MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903 +- Fix side effects of using `QuerySet.no_dereference` on other documents. #1677 +- Fix `TypeError` when using lazy django translation objects as translated choices. #1879 +- Improve Python 2-3 codebase compatibility. #1889 +- Fix the support for changing the default value of the `ComplexDateTime` field. #1368 +- Improve error message in case an `EmbeddedDocumentListField` receives an `EmbeddedDocument` instance instead of a list. #1877 +- Fix the Decimal operator inc/dec. #1517 #1320 +- Ignore `killcursors` queries in `query_counter` context manager. #1869 +- Fix the fact that `query_counter` was modifying the initial profiling level in case it was != 0. #1870 +- Repair the `no_sub_classes` context manager + fix the fact that it was swallowing exceptions. #1865 +- Fix index creation error that was swallowed by `hasattr` under Python 2. #1688 +- `QuerySet.limit` function behaviour: Passing 0 as parameter will return all the documents in the cursor. #1611 +- Bulk insert updates the IDs of the input documents instances. #1919 +- Fix a harmless bug related to `GenericReferenceField` where modifications in the generic-referenced document were tracked in the parent. #1934 +- Improve validation of the `BinaryField`. #273 +- Implement lazy regex compiling in Field classes to improve `import mongoengine` performance. #1806 +- Update `GridFSProxy.__str__` so that it would always print both the filename and grid_id. #710 +- Add `__repr__` to `Q` and `QCombination` classes. #1843 +- Fix bug in the `BaseList.__iter__` operator (was occuring when modifying a BaseList while iterating over it). #1676 +- Added a `DateField`. #513 +- Various improvements to the documentation. +- Various code quality improvements. Changes in 0.15.3 ================= -- BREAKING CHANGES: `Queryset.update/update_one` methods now returns an UpdateResult when `full_result=True` is provided and no longer a dict (relates to #1491) -- Subfield resolve error in generic_emdedded_document query #1651 #1652 -- use each modifier only with $position #1673 #1675 -- Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704 -- Fix validation error instance in GenericEmbeddedDocumentField #1067 -- Update cached fields when fields argument is given #1712 -- Add a db parameter to register_connection for compatibility with connect -- Use insert_one, insert_many in Document.insert #1491 -- Use new update_one, update_many on document/queryset update #1491 -- Use insert_one, insert_many in Document.insert #1491 -- Fix reload(fields) affect changed fields #1371 -- Fix Read-only access to database fails when trying to create indexes #1338 +- BREAKING CHANGES: `Queryset.update/update_one` methods now return an `UpdateResult` when `full_result=True` is provided and no longer a dict. #1491 +- Fix the subfield resolve error in `generic_emdedded_document` query. #1651 #1652 +- Use each modifier only with `$position`. #1673 #1675 +- Improve `LazyReferenceField` and `GenericLazyReferenceField` with nested fields. #1704 +- Fix validation error instance in `GenericEmbeddedDocumentField`. #1067 +- Update cached fields when fields argument is given. #1712 +- Add a db parameter to `register_connection` for compatibility with `connect`. +- Use PyMongo v3.x's `insert_one` and `insert_many` in `Document.insert`. #1491 +- Use PyMongo v3.x's `update_one` and `update_many` in `Document.update` and `QuerySet.update`. #1491 +- Fix how `reload(fields)` affects changed fields. #1371 +- Fix a bug where the read-only access to the database fails when trying to create indexes. #1338 Changes in 0.15.0 ================= -- Add LazyReferenceField and GenericLazyReferenceField to address #1230 +- Add `LazyReferenceField` and `GenericLazyReferenceField`. #1230 Changes in 0.14.1 ================= -- Removed SemiStrictDict and started using a regular dict for `BaseDocument._data` #1630 -- Added support for the `$position` param in the `$push` operator #1566 -- Fixed `DateTimeField` interpreting an empty string as today #1533 -- Added a missing `__ne__` method to the `GridFSProxy` class #1632 -- Fixed `BaseQuerySet._fields_to_db_fields` #1553 +- Remove `SemiStrictDict` and start using a regular dict for `BaseDocument._data`. #1630 +- Add support for the `$position` param in the `$push` operator. #1566 +- Fix `DateTimeField` interpreting an empty string as today. #1533 +- Add a missing `__ne__` method to the `GridFSProxy` class. #1632 +- Fix `BaseQuerySet._fields_to_db_fields`. #1553 Changes in 0.14.0 ================= -- BREAKING CHANGE: Removed the `coerce_types` param from `QuerySet.as_pymongo` #1549 -- POTENTIAL BREAKING CHANGE: Made EmbeddedDocument not hashable by default #1528 -- Improved code quality #1531, #1540, #1541, #1547 +- BREAKING CHANGE: Remov the `coerce_types` param from `QuerySet.as_pymongo`. #1549 +- POTENTIAL BREAKING CHANGE: Make `EmbeddedDocument` not hashable by default. #1528 +- Improv code quality. #1531, #1540, #1541, #1547 Changes in 0.13.0 ================= -- POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see - docs/upgrade.rst for details. +- POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see docs/upgrade.rst for details. Changes in 0.12.0 ================= -- POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476 -- POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476 -- Fixed the way `Document.objects.create` works with duplicate IDs #1485 -- Fixed connecting to a replica set with PyMongo 2.x #1436 -- Fixed using sets in field choices #1481 -- Fixed deleting items from a `ListField` #1318 -- Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237 -- Fixed behavior of a `dec` update operator #1450 -- Added a `rename` update operator #1454 -- Added validation for the `db_field` parameter #1448 -- Fixed the error message displayed when querying an `EmbeddedDocumentField` by an invalid value #1440 -- Fixed the error message displayed when validating unicode URLs #1486 -- Raise an error when trying to save an abstract document #1449 +- POTENTIAL BREAKING CHANGE: Fix `limit`/`skip`/`hint`/`batch_size` chaining. #1476 +- POTENTIAL BREAKING CHANGE: Change a public `QuerySet.clone_into` method to a private `QuerySet._clone_into`. #1476 +- Fix the way `Document.objects.create` works with duplicate IDs. #1485 +- Fix connecting to a replica set with PyMongo 2.x. #1436 +- Fix using sets in field choices. #1481 +- Fix deleting items from a `ListField`. #1318 +- Fix an obscure error message when filtering by `field__in=non_iterable`. #1237 +- Fix behavior of a `dec` update operator. #1450 +- Add a `rename` update operator. #1454 +- Add validation for the `db_field` parameter. #1448 +- Fix the error message displayed when querying an `EmbeddedDocumentField` by an invalid value. #1440 +- Fix the error message displayed when validating Unicode URLs. #1486 +- Raise an error when trying to save an abstract document. #1449 Changes in 0.11.0 ================= -- BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428 -- BREAKING CHANGE: Dropped Python 2.6 support. #1428 +- BREAKING CHANGE: Rename `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428 +- BREAKING CHANGE: Drop Python v2.6 support. #1428 - BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428 - BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334 - Fixed absent rounding for DecimalField when `force_string` is set. #1103 Changes in 0.10.8 ================= -- Added support for QuerySet.batch_size (#1426) -- Fixed query set iteration within iteration #1427 -- Fixed an issue where specifying a MongoDB URI host would override more information than it should #1421 -- Added ability to filter the generic reference field by ObjectId and DBRef #1425 -- Fixed delete cascade for models with a custom primary key field #1247 -- Added ability to specify an authentication mechanism (e.g. X.509) #1333 -- Added support for falsey primary keys (e.g. doc.pk = 0) #1354 -- Fixed QuerySet#sum/average for fields w/ explicit db_field #1417 -- Fixed filtering by embedded_doc=None #1422 -- Added support for cursor.comment #1420 -- Fixed doc.get__display #1419 -- Fixed __repr__ method of the StrictDict #1424 -- Added a deprecation warning for Python 2.6 +- Add support for `QuerySet.batch_size`. (#1426) +- Fix a query set iteration within an iteration. #1427 +- Fix an issue where specifying a MongoDB URI host would override more information than it should. #1421 +- Add an ability to filter the `GenericReferenceField` by an `ObjectId` and a `DBRef`. #1425 +- Fix cascading deletes for models with a custom primary key field. #1247 +- Add ability to specify an authentication mechanism (e.g. X.509). #1333 +- Add support for falsy primary keys (e.g. `doc.pk = 0`). #1354 +- Fix `QuerySet.sum/average` for fields w/ an explicit `db_field`. #1417 +- Fix filtering by `embedded_doc=None`. #1422 +- Add support for `Cursor.comment`. #1420 +- Fix `doc.get__display` methods. #1419 +- Fix the `__repr__` method of the `StrictDict` #1424 +- Add a deprecation warning for Python v2.6. Changes in 0.10.7 ================= -- Dropped Python 3.2 support #1390 -- Fixed the bug where dynamic doc has index inside a dict field #1278 -- Fixed: ListField minus index assignment does not work #1128 -- Fixed cascade delete mixing among collections #1224 -- Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206 +- Drop Python 3.2 support #1390 +- Fix a bug where a dynamic doc has an index inside a dict field. #1278 +- Fix: `ListField` minus index assignment does not work. #1128 +- Fix cascade delete mixing among collections. #1224 +- Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls. #1206 - Raise `OperationError` when trying to do a `drop_collection` on document with no collection set. -- count on ListField of EmbeddedDocumentField fails. #1187 -- Fixed long fields stored as int32 in Python 3. #1253 -- MapField now handles unicodes keys correctly. #1267 -- ListField now handles negative indicies correctly. #1270 -- Fixed AttributeError when initializing EmbeddedDocument with positional args. #681 -- Fixed no_cursor_timeout error with pymongo 3.0+ #1304 -- Replaced map-reduce based QuerySet.sum/average with aggregation-based implementations #1336 -- Fixed support for `__` to escape field names that match operators names in `update` #1351 -- Fixed BaseDocument#_mark_as_changed #1369 -- Added support for pickling QuerySet instances. #1397 -- Fixed connecting to a list of hosts #1389 -- Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334 -- Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218 -- Improvements to the dictionary fields docs #1383 +- Fix a bug where a count on `ListField` of `EmbeddedDocumentField` fails. #1187 +- Fix `LongField` values stored as int32 in Python 3. #1253 +- `MapField` now handles unicode keys correctly. #1267 +- `ListField` now handles negative indicies correctly. #1270 +- Fix an `AttributeError` when initializing an `EmbeddedDocument` with positional args. #681 +- Fix a `no_cursor_timeout` error with PyMongo v3.x. #1304 +- Replace map-reduce based `QuerySet.sum/average` with aggregation-based implementations. #1336 +- Fix support for `__` to escape field names that match operators' names in `update`. #1351 +- Fix `BaseDocument._mark_as_changed`. #1369 +- Add support for pickling `QuerySet` instances. #1397 +- Fix connecting to a list of hosts. #1389 +- Fix a bug where accessing broken references wouldn't raise a `DoesNotExist` error. #1334 +- Fix not being able to specify `use_db_field=False` on `ListField(EmbeddedDocumentField)` instances. #1218 +- Improvements to the dictionary field's docs. #1383 Changes in 0.10.6 ================= - Add support for mocking MongoEngine based on mongomock. #1151 -- Fixed not being able to run tests on Windows. #1153 +- Fix not being able to run tests on Windows. #1153 - Allow creation of sparse compound indexes. #1114 -- count on ListField of EmbeddedDocumentField fails. #1187 Changes in 0.10.5 ================= @@ -227,12 +221,12 @@ Changes in 0.10.5 Changes in 0.10.4 ================= -- SaveConditionError is now importable from the top level package. #1165 -- upsert_one method added. #1157 +- `SaveConditionError` is now importable from the top level package. #1165 +- Add a `QuerySet.upsert_one` method. #1157 Changes in 0.10.3 ================= -- Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042 +- Fix `read_preference` (it had chaining issues with PyMongo v2.x and it didn't work at all with PyMongo v3.x). #1042 Changes in 0.10.2 ================= @@ -242,16 +236,16 @@ Changes in 0.10.2 Changes in 0.10.1 ================= -- Fix infinite recursion with CASCADE delete rules under specific conditions. #1046 -- Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047 -- Fix ignored chained options #842 -- Document save's save_condition error raises `SaveConditionError` exception #1070 -- Fix Document.reload for DynamicDocument. #1050 -- StrictDict & SemiStrictDict are shadowed at init time. #1105 -- Fix ListField minus index assignment does not work. #1119 -- Remove code that marks field as changed when the field has default but not existed in database #1126 -- Remove test dependencies (nose and rednose) from install dependencies list. #1079 -- Recursively build query when using elemMatch operator. #1130 +- Fix infinite recursion with cascade delete rules under specific conditions. #1046 +- Fix `CachedReferenceField` bug when loading cached docs as `DBRef` but failing to save them. #1047 +- Fix ignored chained options. #842 +- `Document.save`'s `save_condition` error raises a `SaveConditionError` exception. #1070 +- Fix `Document.reload` for the `DynamicDocument`. #1050 +- `StrictDict` & `SemiStrictDict` are shadowed at init time. #1105 +- Fix `ListField` negative index assignment not working. #1119 +- Remove code that marks a field as changed when the field has a default value but does not exist in the database. #1126 +- Remove test dependencies (nose and rednose) from install dependencies. #1079 +- Recursively build a query when using the `elemMatch` operator. #1130 - Fix instance back references for lists of embedded documents. #1131 Changes in 0.10.0 From 483c840fc8247c2c52dec44eb9f73486a70c3fff Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Tue, 2 Jul 2019 18:29:28 +0200 Subject: [PATCH 026/216] One more changelog tweak [ci skip] --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 4dcb7298..052364a9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -45,7 +45,7 @@ Changes in 0.18.0 - Calling `connect` 2 times with the same alias and different parameter will raise an error (should call `disconnect` first). - `disconnect` now clears `mongoengine.connection._connection_settings`. - `disconnect` now clears the cached attribute `Document._collection`. -- BREAKING CHANGE: `EmbeddedDocument.save` & `.reload` is no longer exist. #1552 +- BREAKING CHANGE: `EmbeddedDocument.save` & `.reload` no longer exist. #1552 Changes in 0.17.0 ================= From b593764ded7245b12722d931f599c4a962146e21 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Tue, 2 Jul 2019 18:35:29 +0200 Subject: [PATCH 027/216] One more changelog tweak [ci skip] --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 052364a9..e4d38b24 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -168,7 +168,7 @@ Changes in 0.11.0 - BREAKING CHANGE: Drop Python v2.6 support. #1428 - BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428 - BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334 -- Fixed absent rounding for DecimalField when `force_string` is set. #1103 +- Fix absent rounding for the `DecimalField` when `force_string` is set. #1103 Changes in 0.10.8 ================= From bbed312bdd6fd1bf991cff9bf5289ac7f94ab77f Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Tue, 2 Jul 2019 18:42:50 +0200 Subject: [PATCH 028/216] Final changelog tweaks [ci skip] --- docs/changelog.rst | 52 +++++++++++++++++++++++----------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index e4d38b24..a78c9e13 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -49,12 +49,12 @@ Changes in 0.18.0 Changes in 0.17.0 ================= -- Fix `.only()` working improperly after using `.count()` of the same instance of a `QuerySet`. -- Fix `batch_size` that was not copied when cloning a `QuerySet` object. #2011 - POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (`_cls`, `_id`) when using `QuerySet.as_pymongo`. #1976 - Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time. #1995 -- Fix `InvalidStringData` error when using `modify` on a `BinaryField`. #1127 - DEPRECATION: `EmbeddedDocument.save` & `.reload` are marked as deprecated and will be removed in a next version of MongoEngine. #1552 +- Fix `.only()` working improperly after using `.count()` of the same instance of a `QuerySet`. +- Fix `batch_size` that was not copied when cloning a `QuerySet` object. #2011 +- Fix `InvalidStringData` error when using `modify` on a `BinaryField`. #1127 - Fix test suite and CI to support MongoDB v3.4. #1445 - Fix reference fields querying the database on each access if value contains orphan DBRefs. @@ -71,8 +71,8 @@ Changes in 0.16.2 ================= Changes in 0.16.1 ================= -- Fix `_cls` that is not set properly in the Document constructor (regression). #1950 -- Fix a bug in the `_delta` method - update of a ListField depends on an unrelated dynamic field update. #1733 +- Fix `_cls` that is not set properly in the `Document` constructor (regression). #1950 +- Fix a bug in the `_delta` method - update of a `ListField` depends on an unrelated dynamic field update. #1733 - Remove PyMongo's deprecated `save()` method and use `insert_one()` instead. #1899 ================= @@ -84,16 +84,16 @@ Changes in 0.16.0 - Default value of the `ComplexDateTime` field is now `None` (and no longer the current datetime). #1368 - Fix an unhashable `TypeError` when referencing a `Document` with a compound key in an `EmbeddedDocument`. #1685 - Fix a bug where an `EmbeddedDocument` with the same id as its parent would not be tracked for changes. #1768 -- Fix the fact that bulk `insert()` was not setting primary keys of inserted document instances. #1919 -- Fix a bug when referencing the abstract class in a `ReferenceField`. #1920 -- Allow modification to the document made in `pre_save_post_validation` to be taken into account. #1202 -- Replaced MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903 +- Fix the fact that a bulk `QuerySet.insert` was not setting primary keys of inserted document instances. #1919 +- Fix a bug when referencing an abstract class in a `ReferenceField`. #1920 +- Allow modifications to the document made in `pre_save_post_validation` to be taken into account. #1202 +- Replace MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903 - Fix side effects of using `QuerySet.no_dereference` on other documents. #1677 -- Fix `TypeError` when using lazy django translation objects as translated choices. #1879 +- Fix `TypeError` when using lazy Django translation objects as translated choices. #1879 - Improve Python 2-3 codebase compatibility. #1889 -- Fix the support for changing the default value of the `ComplexDateTime` field. #1368 +- Fix support for changing the default value of the `ComplexDateTime` field. #1368 - Improve error message in case an `EmbeddedDocumentListField` receives an `EmbeddedDocument` instance instead of a list. #1877 -- Fix the Decimal operator inc/dec. #1517 #1320 +- Fix the `inc` and `dec` operators for the `DecimalField`. #1517 #1320 - Ignore `killcursors` queries in `query_counter` context manager. #1869 - Fix the fact that `query_counter` was modifying the initial profiling level in case it was != 0. #1870 - Repair the `no_sub_classes` context manager + fix the fact that it was swallowing exceptions. #1865 @@ -106,23 +106,23 @@ Changes in 0.16.0 - Update `GridFSProxy.__str__` so that it would always print both the filename and grid_id. #710 - Add `__repr__` to `Q` and `QCombination` classes. #1843 - Fix bug in the `BaseList.__iter__` operator (was occuring when modifying a BaseList while iterating over it). #1676 -- Added a `DateField`. #513 +- Add a `DateField`. #513 - Various improvements to the documentation. - Various code quality improvements. Changes in 0.15.3 ================= -- BREAKING CHANGES: `Queryset.update/update_one` methods now return an `UpdateResult` when `full_result=True` is provided and no longer a dict. #1491 -- Fix the subfield resolve error in `generic_emdedded_document` query. #1651 #1652 -- Use each modifier only with `$position`. #1673 #1675 -- Improve `LazyReferenceField` and `GenericLazyReferenceField` with nested fields. #1704 -- Fix validation error instance in `GenericEmbeddedDocumentField`. #1067 -- Update cached fields when fields argument is given. #1712 -- Add a db parameter to `register_connection` for compatibility with `connect`. -- Use PyMongo v3.x's `insert_one` and `insert_many` in `Document.insert`. #1491 -- Use PyMongo v3.x's `update_one` and `update_many` in `Document.update` and `QuerySet.update`. #1491 -- Fix how `reload(fields)` affects changed fields. #1371 -- Fix a bug where the read-only access to the database fails when trying to create indexes. #1338 +- `Queryset.update/update_one` methods now return an `UpdateResult` when `full_result=True` is provided and no longer a dict. #1491 +- Improve `LazyReferenceField` and `GenericLazyReferenceField` with nested fields. #1704 +- Fix the subfield resolve error in `generic_emdedded_document` query. #1651 #1652 +- Use each modifier only with `$position`. #1673 #1675 +- Fix validation errors in the `GenericEmbeddedDocumentField`. #1067 +- Update cached fields when a `fields` argument is given. #1712 +- Add a `db` parameter to `register_connection` for compatibility with `connect`. +- Use PyMongo v3.x's `insert_one` and `insert_many` in `Document.insert`. #1491 +- Use PyMongo v3.x's `update_one` and `update_many` in `Document.update` and `QuerySet.update`. #1491 +- Fix how `reload(fields)` affects changed fields. #1371 +- Fix a bug where the read-only access to the database fails when trying to create indexes. #1338 Changes in 0.15.0 ================= @@ -138,9 +138,9 @@ Changes in 0.14.1 Changes in 0.14.0 ================= -- BREAKING CHANGE: Remov the `coerce_types` param from `QuerySet.as_pymongo`. #1549 +- BREAKING CHANGE: Remove the `coerce_types` param from `QuerySet.as_pymongo`. #1549 - POTENTIAL BREAKING CHANGE: Make `EmbeddedDocument` not hashable by default. #1528 -- Improv code quality. #1531, #1540, #1541, #1547 +- Improve code quality. #1531, #1540, #1541, #1547 Changes in 0.13.0 ================= From e9400446030ecc29fede088a7abe2c501a737b8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 2 Jul 2019 23:06:31 +0200 Subject: [PATCH 029/216] fixes in .travis.yml and tox.ini based on PR review --- .travis.yml | 8 ++++---- tox.ini | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index 4021cb8f..34acd952 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,10 +32,10 @@ env: global: - MONGODB_3_4=3.4.17 - MONGODB_3_6=3.6.12 - - PYMONGO_3_X=3.x + - PYMONGO_3_6=3.6 - PYMONGO_3_4=3.4.x matrix: - - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_X} + - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_6} matrix: @@ -46,7 +46,7 @@ matrix: - python: 2.7 env: MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_4} - python: 3.7 - env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_X} + env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} install: @@ -107,5 +107,5 @@ deploy: on: tags: true repo: MongoEngine/mongoengine - condition: ($PYMONGO = ${PYMONGO_3_X}) && ($MONGODB = ${MONGODB_3_4}) + condition: ($PYMONGO = ${PYMONGO_3_6}) && ($MONGODB = ${MONGODB_3_4}) python: 2.7 diff --git a/tox.ini b/tox.ini index 40bcea8a..5e62e58b 100644 --- a/tox.ini +++ b/tox.ini @@ -6,7 +6,7 @@ commands = python setup.py nosetests {posargs} deps = nose - mg34x: PyMongo>=3.4,<3.5 - mg3x: PyMongo>=3.0,<3.7 + mg34: pymongo>=3.4,<3.5 + mg36: pymongo>=3.6,<3.7 setenv = PYTHON_EGG_CACHE = {envdir}/python-eggs From 951a532a9f30693903fa41ad3ec9cbe2cf0f7790 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 3 Jul 2019 09:15:28 +0200 Subject: [PATCH 030/216] additional fix in travis/tox --- .travis.yml | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 34acd952..54a6befd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -33,7 +33,7 @@ env: - MONGODB_3_4=3.4.17 - MONGODB_3_6=3.6.12 - PYMONGO_3_6=3.6 - - PYMONGO_3_4=3.4.x + - PYMONGO_3_4=3.4 matrix: - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_6} diff --git a/tox.ini b/tox.ini index 5e62e58b..a1ae8444 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27,py35,pypy,pypy3}-{mg35,mg3x} +envlist = {py27,py35,pypy,pypy3}-{mg34,mg36} [testenv] commands = From 50555ec73e8fdd978ca755eadef948321d2ef73c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20W=C3=B3jcik?= Date: Wed, 3 Jul 2019 11:07:55 +0200 Subject: [PATCH 031/216] Better management of the automatic "_cls" field (#2112) * Rename BaseQuerySet._initial_query to BaseQuerySet._cls_query This new name more accurately reflects the purpose of the dict. It is either empty for documents that don't use inheritance or it contains a `{"_cls": ...}` shape query. There was nothing "initial" about it per se. * Drop read_preference as a kwarg on BaseQuerySet.__call__/filter It was a poor design choice to offer two different ways to do the same thing: 1. `SomeDoc.objects(foo=bar, bar=baz).read_preference(...)` 2. `SomeDoc.objects(foo=bar, bar=baz, read_preference=...)` Option 1 is good because it's immediately obvious which part defines the query to be used and which part defines the read preference. Option 2 is bad because you don't immediately know whether `read_preference` is a query kwarg or a reserved keyword with some special behavior. If you wanted to be particularly cruel, you could even write `SomeDoc.objects(foo=bar, read_preference=..., bar=baz)`. THIS IS A BREAKING CHANGE. From now on you have to use the `BaseQuerySet.read_preference(...)` method. * Add a BaseQuerySet.clear_cls_query method + get rid of the class_check kwarg This is similar to what the previous commit did to read preference except that in this case we were still missing a `BaseQuerySet` method for clearing the `_cls` query. Now, instead of the undocumented, untested, and confusing interface: `ParentDoc.objects(foo=bar, bar=baz, class_check=False)` We do: `ParentDoc.objects(foo=bar, bar=baz).clear_cls_query()` --- docs/changelog.rst | 4 +++ mongoengine/queryset/base.py | 57 ++++++++++++++++---------------- tests/queryset/queryset.py | 63 +++++++++++++++++++++++++++--------- 3 files changed, 80 insertions(+), 44 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index a78c9e13..6512c1d3 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,6 +6,10 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). +- BREAKING CHANGE: `class_check` and `read_preference` keyword arguments are no longer available when filtering a `QuerySet`. #2112 + - Instead of `Doc.objects(foo=bar, read_preference=...)` use `Doc.objects(foo=bar).read_preference(...)`. + - Instead of `Doc.objects(foo=bar, class_check=False)` use `Doc.objects(foo=bar).clear_cls_query(...)`. + - This change also renames the private `QuerySet._initial_query` attribute to `_cls_query`. - BREAKING CHANGE: Removed the deprecated `format` param from `QuerySet.explain` #2113 - BREAKING CHANGE: Renamed `MongoEngineConnectionError` to `ConnectionFailure` #2111 - If you catch/use `MongoEngineConnectionError` in your code, you'll have to rename it. diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 42c4b927..ba3ac95a 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -53,13 +53,12 @@ class BaseQuerySet(object): self._collection_obj = collection self._mongo_query = None self._query_obj = Q() - self._initial_query = {} + self._cls_query = {} self._where_clause = None self._loaded_fields = QueryFieldList() self._ordering = None self._snapshot = False self._timeout = True - self._class_check = True self._slave_okay = False self._read_preference = None self._iter = False @@ -72,9 +71,9 @@ class BaseQuerySet(object): # subclasses of the class being used if document._meta.get("allow_inheritance") is True: if len(self._document._subclasses) == 1: - self._initial_query = {"_cls": self._document._subclasses[0]} + self._cls_query = {"_cls": self._document._subclasses[0]} else: - self._initial_query = {"_cls": {"$in": self._document._subclasses}} + self._cls_query = {"_cls": {"$in": self._document._subclasses}} self._loaded_fields = QueryFieldList(always_include=["_cls"]) self._cursor_obj = None @@ -86,23 +85,19 @@ class BaseQuerySet(object): self._max_time_ms = None self._comment = None - def __call__(self, q_obj=None, class_check=True, read_preference=None, **query): + def __call__(self, q_obj=None, **query): """Filter the selected documents by calling the :class:`~mongoengine.queryset.QuerySet` with a query. :param q_obj: a :class:`~mongoengine.queryset.Q` object to be used in the query; the :class:`~mongoengine.queryset.QuerySet` is filtered multiple times with different :class:`~mongoengine.queryset.Q` - objects, only the last one will be used - :param class_check: If set to False bypass class name check when - querying collection - :param read_preference: if set, overrides connection-level - read_preference from `ReplicaSetConnection`. - :param query: Django-style query keyword arguments + objects, only the last one will be used. + :param query: Django-style query keyword arguments. """ query = Q(**query) if q_obj: - # make sure proper query object is passed + # Make sure proper query object is passed. if not isinstance(q_obj, QNode): msg = ( "Not a query object: %s. " @@ -111,16 +106,10 @@ class BaseQuerySet(object): raise InvalidQueryError(msg) query &= q_obj - if read_preference is None: - queryset = self.clone() - else: - # Use the clone provided when setting read_preference - queryset = self.read_preference(read_preference) - + queryset = self.clone() queryset._query_obj &= query queryset._mongo_query = None queryset._cursor_obj = None - queryset._class_check = class_check return queryset @@ -222,8 +211,7 @@ class BaseQuerySet(object): return self.__call__() def filter(self, *q_objs, **query): - """An alias of :meth:`~mongoengine.queryset.QuerySet.__call__` - """ + """An alias of :meth:`~mongoengine.queryset.QuerySet.__call__`""" return self.__call__(*q_objs, **query) def search_text(self, text, language=None): @@ -743,7 +731,7 @@ class BaseQuerySet(object): Do NOT return any inherited documents. """ if self._document._meta.get("allow_inheritance") is True: - self._initial_query = {"_cls": self._document._class_name} + self._cls_query = {"_cls": self._document._class_name} return self @@ -777,7 +765,7 @@ class BaseQuerySet(object): copy_props = ( "_mongo_query", - "_initial_query", + "_cls_query", "_none", "_query_obj", "_where_clause", @@ -785,7 +773,6 @@ class BaseQuerySet(object): "_ordering", "_snapshot", "_timeout", - "_class_check", "_slave_okay", "_read_preference", "_iter", @@ -1100,6 +1087,20 @@ class BaseQuerySet(object): return queryset + def clear_cls_query(self): + """Clear the default "_cls" query. + + By default, all queries generated for documents that allow inheritance + include an extra "_cls" clause. In most cases this is desirable, but + sometimes you might achieve better performance if you clear that + default query. + + Scan the code for `_cls_query` to get more details. + """ + queryset = self.clone() + queryset._cls_query = {} + return queryset + def comment(self, text): """Add a comment to the query. @@ -1651,13 +1652,11 @@ class BaseQuerySet(object): def _query(self): if self._mongo_query is None: self._mongo_query = self._query_obj.to_query(self._document) - if self._class_check and self._initial_query: + if self._cls_query: if "_cls" in self._mongo_query: - self._mongo_query = { - "$and": [self._initial_query, self._mongo_query] - } + self._mongo_query = {"$and": [self._cls_query, self._mongo_query]} else: - self._mongo_query.update(self._initial_query) + self._mongo_query.update(self._cls_query) return self._mongo_query @property diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 21f35012..9dc68f2e 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -4588,6 +4588,44 @@ class QuerySetTest(unittest.TestCase): doc.save() self.assertEqual(MyDoc.objects.only("test__47").get().test["47"], 1) + def test_clear_cls_query(self): + class Parent(Document): + name = StringField() + meta = {"allow_inheritance": True} + + class Child(Parent): + age = IntField() + + Parent.drop_collection() + + # Default query includes the "_cls" check. + self.assertEqual( + Parent.objects._query, {"_cls": {"$in": ("Parent", "Parent.Child")}} + ) + + # Clearing the "_cls" query should work. + self.assertEqual(Parent.objects.clear_cls_query()._query, {}) + + # Clearing the "_cls" query should not persist across queryset instances. + self.assertEqual( + Parent.objects._query, {"_cls": {"$in": ("Parent", "Parent.Child")}} + ) + + # The rest of the query should not be cleared. + self.assertEqual( + Parent.objects.filter(name="xyz").clear_cls_query()._query, {"name": "xyz"} + ) + + Parent.objects.create(name="foo") + Child.objects.create(name="bar", age=1) + self.assertEqual(Parent.objects.clear_cls_query().count(), 2) + self.assertEqual(Parent.objects.count(), 2) + self.assertEqual(Child.objects().count(), 1) + + # XXX This isn't really how you'd want to use `clear_cls_query()`, but + # it's a decent test to validate its behavior nonetheless. + self.assertEqual(Child.objects.clear_cls_query().count(), 2) + def test_read_preference(self): class Bar(Document): txt = StringField() @@ -4595,40 +4633,35 @@ class QuerySetTest(unittest.TestCase): meta = {"indexes": ["txt"]} Bar.drop_collection() - bars = list(Bar.objects(read_preference=ReadPreference.PRIMARY)) - self.assertEqual([], bars) + bar = Bar.objects.create(txt="xyz") - self.assertRaises(TypeError, Bar.objects, read_preference="Primary") + bars = list(Bar.objects.read_preference(ReadPreference.PRIMARY)) + self.assertEqual(bars, [bar]) - # read_preference as a kwarg - bars = Bar.objects(read_preference=ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) - self.assertEqual( - bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED - ) - - # read_preference as a query set method bars = Bar.objects.read_preference(ReadPreference.SECONDARY_PREFERRED) self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) self.assertEqual( bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED ) - # read_preference after skip + # Make sure that `.read_preference(...)` does accept string values. + self.assertRaises(TypeError, Bar.objects.read_preference, "Primary") + + # Make sure read preference is respected after a `.skip(...)`. bars = Bar.objects.skip(1).read_preference(ReadPreference.SECONDARY_PREFERRED) self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) self.assertEqual( bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED ) - # read_preference after limit + # Make sure read preference is respected after a `.limit(...)`. bars = Bar.objects.limit(1).read_preference(ReadPreference.SECONDARY_PREFERRED) self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) self.assertEqual( bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED ) - # read_preference after order_by + # Make sure read preference is respected after an `.order_by(...)`. bars = Bar.objects.order_by("txt").read_preference( ReadPreference.SECONDARY_PREFERRED ) @@ -4637,7 +4670,7 @@ class QuerySetTest(unittest.TestCase): bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED ) - # read_preference after hint + # Make sure read preference is respected after a `.hint(...)`. bars = Bar.objects.hint([("txt", 1)]).read_preference( ReadPreference.SECONDARY_PREFERRED ) From 8fdf6649683a32702c1c1af0a82925fe55aacd20 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Wed, 3 Jul 2019 11:08:51 +0200 Subject: [PATCH 032/216] Changelog tweaks [ci skip] --- docs/changelog.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6512c1d3..4e2aaa66 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -10,8 +10,8 @@ Development - Instead of `Doc.objects(foo=bar, read_preference=...)` use `Doc.objects(foo=bar).read_preference(...)`. - Instead of `Doc.objects(foo=bar, class_check=False)` use `Doc.objects(foo=bar).clear_cls_query(...)`. - This change also renames the private `QuerySet._initial_query` attribute to `_cls_query`. -- BREAKING CHANGE: Removed the deprecated `format` param from `QuerySet.explain` #2113 -- BREAKING CHANGE: Renamed `MongoEngineConnectionError` to `ConnectionFailure` #2111 +- BREAKING CHANGE: Removed the deprecated `format` param from `QuerySet.explain`. #2113 +- BREAKING CHANGE: Renamed `MongoEngineConnectionError` to `ConnectionFailure`. #2111 - If you catch/use `MongoEngineConnectionError` in your code, you'll have to rename it. - BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 - From now on keyword arguments (e.g. `Doc(field_name=value)`) are required. @@ -20,7 +20,7 @@ Development Changes in 0.18.2 ================= -- Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the `SequenceField` #2097 +- Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the `SequenceField`. #2097 - Various code clarity and documentation improvements. Changes in 0.18.1 From 058203a0ec0c73c20e9df7953f6659be550edd8c Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Wed, 3 Jul 2019 11:12:25 +0200 Subject: [PATCH 033/216] More changelog tweaks [ci skip] The main change is that we're now using double backticks (``), which are the correct way to format code in an RST file. --- docs/changelog.rst | 285 ++++++++++++++++++++++----------------------- 1 file changed, 141 insertions(+), 144 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 4e2aaa66..ecdbf381 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,211 +6,211 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). -- BREAKING CHANGE: `class_check` and `read_preference` keyword arguments are no longer available when filtering a `QuerySet`. #2112 - - Instead of `Doc.objects(foo=bar, read_preference=...)` use `Doc.objects(foo=bar).read_preference(...)`. - - Instead of `Doc.objects(foo=bar, class_check=False)` use `Doc.objects(foo=bar).clear_cls_query(...)`. - - This change also renames the private `QuerySet._initial_query` attribute to `_cls_query`. -- BREAKING CHANGE: Removed the deprecated `format` param from `QuerySet.explain`. #2113 -- BREAKING CHANGE: Renamed `MongoEngineConnectionError` to `ConnectionFailure`. #2111 - - If you catch/use `MongoEngineConnectionError` in your code, you'll have to rename it. +- BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112 + - Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``. + - Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``. + - This change also renames the private ``QuerySet._initial_query`` attribute to ``_cls_query``. +- BREAKING CHANGE: Removed the deprecated ``format`` param from ``QuerySet.explain``. #2113 +- BREAKING CHANGE: Renamed ``MongoEngineConnectionError`` to ``ConnectionFailure``. #2111 + - If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it. - BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 - - From now on keyword arguments (e.g. `Doc(field_name=value)`) are required. -- The codebase is now formatted using `black`. #2109 -- `ListField` now accepts an optional `max_length` parameter. #2110 + - From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required. +- The codebase is now formatted using ``black``. #2109 +- ``ListField`` now accepts an optional ``max_length`` parameter. #2110 Changes in 0.18.2 ================= -- Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the `SequenceField`. #2097 +- Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097 - Various code clarity and documentation improvements. Changes in 0.18.1 ================= -- Fix a bug introduced in 0.18.0 which was causing `.save()` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082 +- Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082 - Add Python 3.7 to Travis CI. #2058 Changes in 0.18.0 ================= - Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2. - MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6. #2017 #2066 -- Improve performance by avoiding a call to `to_mongo` in `Document.save()`. #2049 +- Improve performance by avoiding a call to ``to_mongo`` in ``Document.save``. #2049 - Connection/disconnection improvements: - - Expose `mongoengine.connection.disconnect` and `mongoengine.connection.disconnect_all`. + - Expose ``mongoengine.connection.disconnect`` and ``mongoengine.connection.disconnect_all``. - Fix disconnecting. #566 #1599 #605 #607 #1213 #565 - - Improve documentation of `connect`/`disconnect`. + - Improve documentation of ``connect``/``disconnect``. - Fix issue when using multiple connections to the same mongo with different credentials. #2047 - - `connect` fails immediately when db name contains invalid characters. #2031 #1718 -- Fix the default write concern of `Document.save` that was overwriting the connection write concern. #568 -- Fix querying on `List(EmbeddedDocument)` subclasses fields. #1961 #1492 -- Fix querying on `(Generic)EmbeddedDocument` subclasses fields. #475 -- Fix `QuerySet.aggregate` so that it takes limit and skip value into account. #2029 -- Generate unique indices for `SortedListField` and `EmbeddedDocumentListFields`. #2020 -- BREAKING CHANGE: Changed the behavior of a custom field validator (i.e `validation` parameter of a `Field`). It is now expected to raise a `ValidationError` instead of returning `True`/`False`. #2050 + - ``connect`` fails immediately when db name contains invalid characters. #2031 #1718 +- Fix the default write concern of ``Document.save`` that was overwriting the connection write concern. #568 +- Fix querying on ``List(EmbeddedDocument)`` subclasses fields. #1961 #1492 +- Fix querying on ``(Generic)EmbeddedDocument`` subclasses fields. #475 +- Fix ``QuerySet.aggregate`` so that it takes limit and skip value into account. #2029 +- Generate unique indices for ``SortedListField`` and ``EmbeddedDocumentListFields``. #2020 +- BREAKING CHANGE: Changed the behavior of a custom field validator (i.e ``validation`` parameter of a ``Field``). It is now expected to raise a ``ValidationError`` instead of returning ``True``/``False``. #2050 - BREAKING CHANGES (associated with connection/disconnection fixes): - - Calling `connect` 2 times with the same alias and different parameter will raise an error (should call `disconnect` first). - - `disconnect` now clears `mongoengine.connection._connection_settings`. - - `disconnect` now clears the cached attribute `Document._collection`. -- BREAKING CHANGE: `EmbeddedDocument.save` & `.reload` no longer exist. #1552 + - Calling ``connect`` 2 times with the same alias and different parameter will raise an error (should call ``disconnect`` first). + - ``disconnect`` now clears ``mongoengine.connection._connection_settings``. + - ``disconnect`` now clears the cached attribute ``Document._collection``. +- BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552 Changes in 0.17.0 ================= -- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (`_cls`, `_id`) when using `QuerySet.as_pymongo`. #1976 +- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976 - Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time. #1995 -- DEPRECATION: `EmbeddedDocument.save` & `.reload` are marked as deprecated and will be removed in a next version of MongoEngine. #1552 -- Fix `.only()` working improperly after using `.count()` of the same instance of a `QuerySet`. -- Fix `batch_size` that was not copied when cloning a `QuerySet` object. #2011 -- Fix `InvalidStringData` error when using `modify` on a `BinaryField`. #1127 +- DEPRECATION: ``EmbeddedDocument.save`` & ``.reload`` are marked as deprecated and will be removed in a next version of MongoEngine. #1552 +- Fix ``QuerySet.only`` working improperly after using ``QuerySet.count`` of the same instance of a ``QuerySet``. +- Fix ``batch_size`` that was not copied when cloning a ``QuerySet`` object. #2011 +- Fix ``InvalidStringData`` error when using ``modify`` on a ``BinaryField``. #1127 - Fix test suite and CI to support MongoDB v3.4. #1445 - Fix reference fields querying the database on each access if value contains orphan DBRefs. ================= Changes in 0.16.3 ================= -- Fix `$push` with the `$position` operator not working with lists in embedded documents. #1965 +- Fix ``$push`` with the ``$position`` operator not working with lists in embedded documents. #1965 ================= Changes in 0.16.2 ================= -- Fix `.save()` that fails when called with `write_concern=None` (regression of 0.16.1). #1958 +- Fix ``Document.save`` that fails when called with ``write_concern=None`` (regression of 0.16.1). #1958 ================= Changes in 0.16.1 ================= -- Fix `_cls` that is not set properly in the `Document` constructor (regression). #1950 -- Fix a bug in the `_delta` method - update of a `ListField` depends on an unrelated dynamic field update. #1733 -- Remove PyMongo's deprecated `save()` method and use `insert_one()` instead. #1899 +- Fix ``_cls`` that is not set properly in the ``Document`` constructor (regression). #1950 +- Fix a bug in the ``_delta`` method - update of a ``ListField`` depends on an unrelated dynamic field update. #1733 +- Remove PyMongo's deprecated ``Collection.save`` method and use ``Collection.insert_one`` instead. #1899 ================= Changes in 0.16.0 ================= - POTENTIAL BREAKING CHANGES: - - `EmbeddedDocumentField` will no longer accept references to Document classes in its constructor. #1661 - - Get rid of the `basecls` parameter from the `DictField` constructor (dead code). #1876 - - Default value of the `ComplexDateTime` field is now `None` (and no longer the current datetime). #1368 -- Fix an unhashable `TypeError` when referencing a `Document` with a compound key in an `EmbeddedDocument`. #1685 -- Fix a bug where an `EmbeddedDocument` with the same id as its parent would not be tracked for changes. #1768 -- Fix the fact that a bulk `QuerySet.insert` was not setting primary keys of inserted document instances. #1919 -- Fix a bug when referencing an abstract class in a `ReferenceField`. #1920 -- Allow modifications to the document made in `pre_save_post_validation` to be taken into account. #1202 + - ``EmbeddedDocumentField`` will no longer accept references to Document classes in its constructor. #1661 + - Get rid of the ``basecls`` parameter from the ``DictField`` constructor (dead code). #1876 + - Default value of the ``ComplexDateTime`` field is now ``None`` (and no longer the current datetime). #1368 +- Fix an unhashable ``TypeError`` when referencing a ``Document`` with a compound key in an ``EmbeddedDocument``. #1685 +- Fix a bug where an ``EmbeddedDocument`` with the same id as its parent would not be tracked for changes. #1768 +- Fix the fact that a bulk ``QuerySet.insert`` was not setting primary keys of inserted document instances. #1919 +- Fix a bug when referencing an abstract class in a ``ReferenceField``. #1920 +- Allow modifications to the document made in ``pre_save_post_validation`` to be taken into account. #1202 - Replace MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903 -- Fix side effects of using `QuerySet.no_dereference` on other documents. #1677 -- Fix `TypeError` when using lazy Django translation objects as translated choices. #1879 +- Fix side effects of using ``QuerySet.no_dereference`` on other documents. #1677 +- Fix ``TypeError`` when using lazy Django translation objects as translated choices. #1879 - Improve Python 2-3 codebase compatibility. #1889 -- Fix support for changing the default value of the `ComplexDateTime` field. #1368 -- Improve error message in case an `EmbeddedDocumentListField` receives an `EmbeddedDocument` instance instead of a list. #1877 -- Fix the `inc` and `dec` operators for the `DecimalField`. #1517 #1320 -- Ignore `killcursors` queries in `query_counter` context manager. #1869 -- Fix the fact that `query_counter` was modifying the initial profiling level in case it was != 0. #1870 -- Repair the `no_sub_classes` context manager + fix the fact that it was swallowing exceptions. #1865 -- Fix index creation error that was swallowed by `hasattr` under Python 2. #1688 -- `QuerySet.limit` function behaviour: Passing 0 as parameter will return all the documents in the cursor. #1611 +- Fix support for changing the default value of the ``ComplexDateTime`` field. #1368 +- Improve error message in case an ``EmbeddedDocumentListField`` receives an ``EmbeddedDocument`` instance instead of a list. #1877 +- Fix the ``inc`` and ``dec`` operators for the ``DecimalField``. #1517 #1320 +- Ignore ``killcursors`` queries in ``query_counter`` context manager. #1869 +- Fix the fact that ``query_counter`` was modifying the initial profiling level in case it was != 0. #1870 +- Repair the ``no_sub_classes`` context manager + fix the fact that it was swallowing exceptions. #1865 +- Fix index creation error that was swallowed by ``hasattr`` under Python 2. #1688 +- ``QuerySet.limit`` function behaviour: Passing 0 as parameter will return all the documents in the cursor. #1611 - Bulk insert updates the IDs of the input documents instances. #1919 -- Fix a harmless bug related to `GenericReferenceField` where modifications in the generic-referenced document were tracked in the parent. #1934 -- Improve validation of the `BinaryField`. #273 -- Implement lazy regex compiling in Field classes to improve `import mongoengine` performance. #1806 -- Update `GridFSProxy.__str__` so that it would always print both the filename and grid_id. #710 -- Add `__repr__` to `Q` and `QCombination` classes. #1843 -- Fix bug in the `BaseList.__iter__` operator (was occuring when modifying a BaseList while iterating over it). #1676 -- Add a `DateField`. #513 +- Fix a harmless bug related to ``GenericReferenceField`` where modifications in the generic-referenced document were tracked in the parent. #1934 +- Improve validation of the ``BinaryField``. #273 +- Implement lazy regex compiling in Field classes to improve ``import mongoengine`` performance. #1806 +- Update ``GridFSProxy.__str__`` so that it would always print both the filename and grid_id. #710 +- Add ``__repr__`` to ``Q`` and ``QCombination`` classes. #1843 +- Fix bug in the ``BaseList.__iter__`` operator (was occuring when modifying a BaseList while iterating over it). #1676 +- Add a ``DateField``. #513 - Various improvements to the documentation. - Various code quality improvements. Changes in 0.15.3 ================= -- `Queryset.update/update_one` methods now return an `UpdateResult` when `full_result=True` is provided and no longer a dict. #1491 -- Improve `LazyReferenceField` and `GenericLazyReferenceField` with nested fields. #1704 -- Fix the subfield resolve error in `generic_emdedded_document` query. #1651 #1652 -- Use each modifier only with `$position`. #1673 #1675 -- Fix validation errors in the `GenericEmbeddedDocumentField`. #1067 -- Update cached fields when a `fields` argument is given. #1712 -- Add a `db` parameter to `register_connection` for compatibility with `connect`. -- Use PyMongo v3.x's `insert_one` and `insert_many` in `Document.insert`. #1491 -- Use PyMongo v3.x's `update_one` and `update_many` in `Document.update` and `QuerySet.update`. #1491 -- Fix how `reload(fields)` affects changed fields. #1371 +- ``Queryset.update/update_one`` methods now return an ``UpdateResult`` when ``full_result=True`` is provided and no longer a dict. #1491 +- Improve ``LazyReferenceField`` and ``GenericLazyReferenceField`` with nested fields. #1704 +- Fix the subfield resolve error in ``generic_emdedded_document`` query. #1651 #1652 +- Use each modifier only with ``$position``. #1673 #1675 +- Fix validation errors in the ``GenericEmbeddedDocumentField``. #1067 +- Update cached fields when a ``fields`` argument is given. #1712 +- Add a ``db`` parameter to ``register_connection`` for compatibility with ``connect``. +- Use PyMongo v3.x's ``insert_one`` and ``insert_many`` in ``Document.insert``. #1491 +- Use PyMongo v3.x's ``update_one`` and ``update_many`` in ``Document.update`` and ``QuerySet.update``. #1491 +- Fix how ``reload(fields)`` affects changed fields. #1371 - Fix a bug where the read-only access to the database fails when trying to create indexes. #1338 Changes in 0.15.0 ================= -- Add `LazyReferenceField` and `GenericLazyReferenceField`. #1230 +- Add ``LazyReferenceField`` and ``GenericLazyReferenceField``. #1230 Changes in 0.14.1 ================= -- Remove `SemiStrictDict` and start using a regular dict for `BaseDocument._data`. #1630 -- Add support for the `$position` param in the `$push` operator. #1566 -- Fix `DateTimeField` interpreting an empty string as today. #1533 -- Add a missing `__ne__` method to the `GridFSProxy` class. #1632 -- Fix `BaseQuerySet._fields_to_db_fields`. #1553 +- Remove ``SemiStrictDict`` and start using a regular dict for ``BaseDocument._data``. #1630 +- Add support for the ``$position`` param in the ``$push`` operator. #1566 +- Fix ``DateTimeField`` interpreting an empty string as today. #1533 +- Add a missing ``__ne__`` method to the ``GridFSProxy`` class. #1632 +- Fix ``BaseQuerySet._fields_to_db_fields``. #1553 Changes in 0.14.0 ================= -- BREAKING CHANGE: Remove the `coerce_types` param from `QuerySet.as_pymongo`. #1549 -- POTENTIAL BREAKING CHANGE: Make `EmbeddedDocument` not hashable by default. #1528 +- BREAKING CHANGE: Remove the ``coerce_types`` param from ``QuerySet.as_pymongo``. #1549 +- POTENTIAL BREAKING CHANGE: Make ``EmbeddedDocument`` not hashable by default. #1528 - Improve code quality. #1531, #1540, #1541, #1547 Changes in 0.13.0 ================= -- POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see docs/upgrade.rst for details. +- POTENTIAL BREAKING CHANGE: Added Unicode support to the ``EmailField``, see docs/upgrade.rst for details. Changes in 0.12.0 ================= -- POTENTIAL BREAKING CHANGE: Fix `limit`/`skip`/`hint`/`batch_size` chaining. #1476 -- POTENTIAL BREAKING CHANGE: Change a public `QuerySet.clone_into` method to a private `QuerySet._clone_into`. #1476 -- Fix the way `Document.objects.create` works with duplicate IDs. #1485 +- POTENTIAL BREAKING CHANGE: Fix ``limit``/``skip``/``hint``/``batch_size`` chaining. #1476 +- POTENTIAL BREAKING CHANGE: Change a public ``QuerySet.clone_into`` method to a private ``QuerySet._clone_into``. #1476 +- Fix the way ``Document.objects.create`` works with duplicate IDs. #1485 - Fix connecting to a replica set with PyMongo 2.x. #1436 - Fix using sets in field choices. #1481 -- Fix deleting items from a `ListField`. #1318 -- Fix an obscure error message when filtering by `field__in=non_iterable`. #1237 -- Fix behavior of a `dec` update operator. #1450 -- Add a `rename` update operator. #1454 -- Add validation for the `db_field` parameter. #1448 -- Fix the error message displayed when querying an `EmbeddedDocumentField` by an invalid value. #1440 +- Fix deleting items from a ``ListField``. #1318 +- Fix an obscure error message when filtering by ``field__in=non_iterable``. #1237 +- Fix behavior of a ``dec`` update operator. #1450 +- Add a ``rename`` update operator. #1454 +- Add validation for the ``db_field`` parameter. #1448 +- Fix the error message displayed when querying an ``EmbeddedDocumentField`` by an invalid value. #1440 - Fix the error message displayed when validating Unicode URLs. #1486 - Raise an error when trying to save an abstract document. #1449 Changes in 0.11.0 ================= -- BREAKING CHANGE: Rename `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428 +- BREAKING CHANGE: Rename ``ConnectionError`` to ``MongoEngineConnectionError`` since the former is a built-in exception name in Python v3.x. #1428 - BREAKING CHANGE: Drop Python v2.6 support. #1428 -- BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428 -- BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334 -- Fix absent rounding for the `DecimalField` when `force_string` is set. #1103 +- BREAKING CHANGE: ``from mongoengine.base import ErrorClass`` won't work anymore for any error from ``mongoengine.errors`` (e.g. ``ValidationError``). Use ``from mongoengine.errors import ErrorClass instead``. #1428 +- BREAKING CHANGE: Accessing a broken reference will raise a ``DoesNotExist`` error. In the past it used to return ``None``. #1334 +- Fix absent rounding for the ``DecimalField`` when ``force_string`` is set. #1103 Changes in 0.10.8 ================= -- Add support for `QuerySet.batch_size`. (#1426) +- Add support for ``QuerySet.batch_size``. (#1426) - Fix a query set iteration within an iteration. #1427 - Fix an issue where specifying a MongoDB URI host would override more information than it should. #1421 -- Add an ability to filter the `GenericReferenceField` by an `ObjectId` and a `DBRef`. #1425 +- Add an ability to filter the ``GenericReferenceField`` by an ``ObjectId`` and a ``DBRef``. #1425 - Fix cascading deletes for models with a custom primary key field. #1247 - Add ability to specify an authentication mechanism (e.g. X.509). #1333 -- Add support for falsy primary keys (e.g. `doc.pk = 0`). #1354 -- Fix `QuerySet.sum/average` for fields w/ an explicit `db_field`. #1417 -- Fix filtering by `embedded_doc=None`. #1422 -- Add support for `Cursor.comment`. #1420 -- Fix `doc.get__display` methods. #1419 -- Fix the `__repr__` method of the `StrictDict` #1424 +- Add support for falsy primary keys (e.g. ``doc.pk = 0``). #1354 +- Fix ``QuerySet.sum/average`` for fields w/ an explicit ``db_field``. #1417 +- Fix filtering by ``embedded_doc=None``. #1422 +- Add support for ``Cursor.comment``. #1420 +- Fix ``doc.get__display`` methods. #1419 +- Fix the ``__repr__`` method of the ``StrictDict`` #1424 - Add a deprecation warning for Python v2.6. Changes in 0.10.7 ================= - Drop Python 3.2 support #1390 - Fix a bug where a dynamic doc has an index inside a dict field. #1278 -- Fix: `ListField` minus index assignment does not work. #1128 +- Fix: ``ListField`` minus index assignment does not work. #1128 - Fix cascade delete mixing among collections. #1224 -- Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls. #1206 -- Raise `OperationError` when trying to do a `drop_collection` on document with no collection set. -- Fix a bug where a count on `ListField` of `EmbeddedDocumentField` fails. #1187 -- Fix `LongField` values stored as int32 in Python 3. #1253 -- `MapField` now handles unicode keys correctly. #1267 -- `ListField` now handles negative indicies correctly. #1270 -- Fix an `AttributeError` when initializing an `EmbeddedDocument` with positional args. #681 -- Fix a `no_cursor_timeout` error with PyMongo v3.x. #1304 -- Replace map-reduce based `QuerySet.sum/average` with aggregation-based implementations. #1336 -- Fix support for `__` to escape field names that match operators' names in `update`. #1351 -- Fix `BaseDocument._mark_as_changed`. #1369 -- Add support for pickling `QuerySet` instances. #1397 +- Add ``signal_kwargs`` argument to ``Document.save``, ``Document.delete`` and ``BaseQuerySet.insert`` to be passed to signals calls. #1206 +- Raise ``OperationError`` when trying to do a ``drop_collection`` on document with no collection set. +- Fix a bug where a count on ``ListField`` of ``EmbeddedDocumentField`` fails. #1187 +- Fix ``LongField`` values stored as int32 in Python 3. #1253 +- ``MapField`` now handles unicode keys correctly. #1267 +- ``ListField`` now handles negative indicies correctly. #1270 +- Fix an ``AttributeError`` when initializing an ``EmbeddedDocument`` with positional args. #681 +- Fix a ``no_cursor_timeout`` error with PyMongo v3.x. #1304 +- Replace map-reduce based ``QuerySet.sum/average`` with aggregation-based implementations. #1336 +- Fix support for ``__`` to escape field names that match operators' names in ``update``. #1351 +- Fix ``BaseDocument._mark_as_changed``. #1369 +- Add support for pickling ``QuerySet`` instances. #1397 - Fix connecting to a list of hosts. #1389 -- Fix a bug where accessing broken references wouldn't raise a `DoesNotExist` error. #1334 -- Fix not being able to specify `use_db_field=False` on `ListField(EmbeddedDocumentField)` instances. #1218 +- Fix a bug where accessing broken references wouldn't raise a ``DoesNotExist`` error. #1334 +- Fix not being able to specify ``use_db_field=False`` on ``ListField(EmbeddedDocumentField)`` instances. #1218 - Improvements to the dictionary field's docs. #1383 Changes in 0.10.6 @@ -225,12 +225,12 @@ Changes in 0.10.5 Changes in 0.10.4 ================= -- `SaveConditionError` is now importable from the top level package. #1165 -- Add a `QuerySet.upsert_one` method. #1157 +- ``SaveConditionError`` is now importable from the top level package. #1165 +- Add a ``QuerySet.upsert_one`` method. #1157 Changes in 0.10.3 ================= -- Fix `read_preference` (it had chaining issues with PyMongo v2.x and it didn't work at all with PyMongo v3.x). #1042 +- Fix ``read_preference`` (it had chaining issues with PyMongo v2.x and it didn't work at all with PyMongo v3.x). #1042 Changes in 0.10.2 ================= @@ -241,15 +241,15 @@ Changes in 0.10.2 Changes in 0.10.1 ================= - Fix infinite recursion with cascade delete rules under specific conditions. #1046 -- Fix `CachedReferenceField` bug when loading cached docs as `DBRef` but failing to save them. #1047 +- Fix ``CachedReferenceField`` bug when loading cached docs as ``DBRef`` but failing to save them. #1047 - Fix ignored chained options. #842 -- `Document.save`'s `save_condition` error raises a `SaveConditionError` exception. #1070 -- Fix `Document.reload` for the `DynamicDocument`. #1050 -- `StrictDict` & `SemiStrictDict` are shadowed at init time. #1105 -- Fix `ListField` negative index assignment not working. #1119 +- ``Document.save``'s ``save_condition`` error raises a ``SaveConditionError`` exception. #1070 +- Fix ``Document.reload`` for the ``DynamicDocument``. #1050 +- ``StrictDict`` & ``SemiStrictDict`` are shadowed at init time. #1105 +- Fix ``ListField`` negative index assignment not working. #1119 - Remove code that marks a field as changed when the field has a default value but does not exist in the database. #1126 - Remove test dependencies (nose and rednose) from install dependencies. #1079 -- Recursively build a query when using the `elemMatch` operator. #1130 +- Recursively build a query when using the ``elemMatch`` operator. #1130 - Fix instance back references for lists of embedded documents. #1131 Changes in 0.10.0 @@ -260,7 +260,7 @@ Changes in 0.10.0 - Removed get_or_create() deprecated since 0.8.0. #300 - Improve Document._created status when switch collection and db #1020 - Queryset update doesn't go through field validation #453 -- Added support for specifying authentication source as option `authSource` in URI. #967 +- Added support for specifying authentication source as option ``authSource`` in URI. #967 - Fixed mark_as_changed to handle higher/lower level fields changed. #927 - ListField of embedded docs doesn't set the _instance attribute when iterating over it #914 - Support += and *= for ListField #595 @@ -276,7 +276,7 @@ Changes in 0.10.0 - Fixes some internal _id handling issue. #961 - Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652 - Capped collection multiple of 256. #1011 -- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods. +- Added ``BaseQuerySet.aggregate_sum`` and ``BaseQuerySet.aggregate_average`` methods. - Fix for delete with write_concern {'w': 0}. #1008 - Allow dynamic lookup for more than two parts. #882 - Added support for min_distance on geo queries. #831 @@ -285,10 +285,10 @@ Changes in 0.10.0 Changes in 0.9.0 ================ - Update FileField when creating a new file #714 -- Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826 +- Added ``EmbeddedDocumentListField`` for Lists of Embedded Documents. #826 - ComplexDateTimeField should fall back to None when null=True #864 - Request Support for $min, $max Field update operators #863 -- `BaseDict` does not follow `setdefault` #866 +- ``BaseDict`` does not follow ``setdefault`` #866 - Add support for $type operator # 766 - Fix tests for pymongo 2.8+ #877 - No module named 'django.utils.importlib' (Django dev) #872 @@ -309,13 +309,13 @@ Changes in 0.9.0 - Stop ensure_indexes running on a secondaries unless connection is through mongos #746 - Not overriding default values when loading a subset of fields #399 - Saving document doesn't create new fields in existing collection #620 -- Added `Queryset.aggregate` wrapper to aggregation framework #703 +- Added ``Queryset.aggregate`` wrapper to aggregation framework #703 - Added support to show original model fields on to_json calls instead of db_field #697 - Added Queryset.search_text to Text indexes searchs #700 - Fixed tests for Django 1.7 #696 - Follow ReferenceFields in EmbeddedDocuments with select_related #690 - Added preliminary support for text indexes #680 -- Added `elemMatch` operator as well - `match` is too obscure #653 +- Added ``elemMatch`` operator as well - ``match`` is too obscure #653 - Added support for progressive JPEG #486 #548 - Allow strings to be used in index creation #675 - Fixed EmbeddedDoc weakref proxy issue #592 @@ -351,11 +351,11 @@ Changes in 0.9.0 - Increase email field length to accommodate new TLDs #726 - index_cls is ignored when deciding to set _cls as index prefix #733 - Make 'db' argument to connection optional #737 -- Allow atomic update for the entire `DictField` #742 +- Allow atomic update for the entire ``DictField`` #742 - Added MultiPointField, MultiLineField, MultiPolygonField - Fix multiple connections aliases being rewritten #748 - Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791 -- Make `in_bulk()` respect `no_dereference()` #775 +- Make ``in_bulk()`` respect ``no_dereference()`` #775 - Handle None from model __str__; Fixes #753 #754 - _get_changed_fields fix for embedded documents with id field. #925 @@ -409,18 +409,18 @@ Changes in 0.8.4 Changes in 0.8.3 ================ -- Fixed EmbeddedDocuments with `id` also storing `_id` (#402) +- Fixed EmbeddedDocuments with ``id`` also storing ``_id`` (#402) - Added get_proxy_object helper to filefields (#391) - Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) - Fixed sum and average mapreduce dot notation support (#375, #376, #393) - Fixed as_pymongo to return the id (#386) -- Document.select_related() now respects `db_alias` (#377) +- Document.select_related() now respects ``db_alias`` (#377) - Reload uses shard_key if applicable (#384) - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) **Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3 -- Fixed pickling dynamic documents `_dynamic_fields` (#387) +- Fixed pickling dynamic documents ``_dynamic_fields`` (#387) - Fixed ListField setslice and delslice dirty tracking (#390) - Added Django 1.5 PY3 support (#392) - Added match ($elemMatch) support for EmbeddedDocuments (#379) @@ -461,7 +461,7 @@ Changes in 0.8.0 ================ - Fixed querying ReferenceField custom_id (#317) - Fixed pickle issues with collections (#316) -- Added `get_next_value` preview for SequenceFields (#319) +- Added ``get_next_value`` preview for SequenceFields (#319) - Added no_sub_classes context manager and queryset helper (#312) - Querysets now utilises a local cache - Changed __len__ behaviour in the queryset (#247, #311) @@ -490,7 +490,7 @@ Changes in 0.8.0 - Updated connection to use MongoClient (#262, #274) - Fixed db_alias and inherited Documents (#143) - Documentation update for document errors (#124) -- Deprecated `get_or_create` (#35) +- Deprecated ``get_or_create`` (#35) - Updated inheritable objects created by upsert now contain _cls (#118) - Added support for creating documents with embedded documents in a single operation (#6) - Added to_json and from_json to Document (#1) @@ -611,7 +611,7 @@ Changes in 0.7.0 - Fixed UnboundLocalError in composite index with pk field (#88) - Updated ReferenceField's to optionally store ObjectId strings this will become the default in 0.8 (#89) -- Added FutureWarning - save will default to `cascade=False` in 0.8 +- Added FutureWarning - save will default to ``cascade=False`` in 0.8 - Added example of indexing embedded document fields (#75) - Fixed ImageField resizing when forcing size (#80) - Add flexibility for fields handling bad data (#78) @@ -707,7 +707,7 @@ Changes in 0.6.8 ================ - Fixed FileField losing reference when no default set - Removed possible race condition from FileField (grid_file) -- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()` +- Added assignment to save, can now do: ``b = MyDoc(**kwargs).save()`` - Added support for pull operations on nested EmbeddedDocuments - Added support for choices with GenericReferenceFields - Added support for choices with GenericEmbeddedDocumentFields @@ -722,7 +722,7 @@ Changes in 0.6.7 - Fixed indexing on '_id' or 'pk' or 'id' - Invalid data from the DB now raises a InvalidDocumentError - Cleaned up the Validation Error - docs and code -- Added meta `auto_create_index` so you can disable index creation +- Added meta ``auto_create_index`` so you can disable index creation - Added write concern options to inserts - Fixed typo in meta for index options - Bug fix Read preference now passed correctly @@ -763,7 +763,6 @@ Changes in 0.6.1 Changes in 0.6 ============== - - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 - Added support for covered indexes when inheritance is off - No longer always upsert on save for items with a '_id' @@ -988,7 +987,6 @@ Changes in v0.1.3 querying takes place - A few minor bugfixes - Changes in v0.1.2 ================= - Query values may be processed before before being used in queries @@ -997,7 +995,6 @@ Changes in v0.1.2 - Added ``BooleanField`` - Added ``Document.reload()`` method - Changes in v0.1.1 ================= - Documents may now use capped collections From 1338839b52dfefe3a63dd78bd4006900a6784a17 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20W=C3=B3jcik?= Date: Wed, 3 Jul 2019 11:18:56 +0200 Subject: [PATCH 034/216] Update changelog.rst --- docs/changelog.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index ecdbf381..e9f342e1 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -7,14 +7,14 @@ Development =========== - (Fill this out as you fix issues and develop your features). - BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112 - - Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``. - - Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``. - - This change also renames the private ``QuerySet._initial_query`` attribute to ``_cls_query``. + - Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``. + - Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``. + - This change also renames the private ``QuerySet._initial_query`` attribute to ``_cls_query``. - BREAKING CHANGE: Removed the deprecated ``format`` param from ``QuerySet.explain``. #2113 - BREAKING CHANGE: Renamed ``MongoEngineConnectionError`` to ``ConnectionFailure``. #2111 - - If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it. + - If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it. - BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 - - From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required. + - From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required. - The codebase is now formatted using ``black``. #2109 - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 From abe8070c36bbbd1598b4752bd5fb3b082b33c955 Mon Sep 17 00:00:00 2001 From: Andreas Doll Date: Mon, 8 Jul 2019 14:16:09 +0200 Subject: [PATCH 035/216] Document register_connection arguments in correct order (#2121) Put the documentation of the parameter 'name' of the function 'register_connection' in order of appearance in the function signature. --- mongoengine/connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/connection.py b/mongoengine/connection.py index b1e12a96..8aa95daa 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -178,8 +178,8 @@ def register_connection( : param alias: the name that will be used to refer to this connection throughout MongoEngine - : param name: the name of the specific database to use : param db: the name of the database to use, for compatibility with connect + : param name: the name of the specific database to use : param host: the host name of the: program: `mongod` instance to connect to : param port: the port that the: program: `mongod` instance is running on : param read_preference: The read preference for the collection From aa76ccdd25d58c6563a0548ea13f42ffbe46d2fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20W=C3=B3jcik?= Date: Tue, 9 Jul 2019 12:08:26 +0200 Subject: [PATCH 036/216] Fix Document._object_key (#2125) Previous implementation of `Document._object_key` was *pretending* to work on MongoEngine-level fields (e.g. using "pk" instead of "_id" and separating nested field parts by "__" instead of "."), but then it was also attempting to transform field names from the `shard_key` into DB-level fields. This, expectedly, didn't really work well. Most of the test cases added in this commit were failing prior to the code fixes. --- docs/changelog.rst | 3 +- mongoengine/base/document.py | 1 + mongoengine/document.py | 16 ++++---- tests/document/instance.py | 76 +++++++++++++++++++++++++++++++++--- 4 files changed, 82 insertions(+), 14 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index e9f342e1..7cf74d66 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -15,8 +15,9 @@ Development - If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it. - BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 - From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required. -- The codebase is now formatted using ``black``. #2109 +- Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125 - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 +- The codebase is now formatted using ``black``. #2109 Changes in 0.18.2 ================= diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 928a00c2..f5109b44 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -202,6 +202,7 @@ class BaseDocument(object): self__initialised = self._initialised except AttributeError: self__initialised = False + # Check if the user has created a new instance of a class if ( self._is_document diff --git a/mongoengine/document.py b/mongoengine/document.py index 41166df4..23968f17 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -575,22 +575,24 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): @property def _object_key(self): - """Get the query dict that can be used to fetch this object from - the database. + """Return a query dict that can be used to fetch this document. Most of the time the dict is a simple PK lookup, but in case of a sharded collection with a compound shard key, it can contain a more complex query. + + Note that the dict returned by this method uses MongoEngine field + names instead of PyMongo field names (e.g. "pk" instead of "_id", + "some__nested__field" instead of "some.nested.field", etc.). """ select_dict = {"pk": self.pk} shard_key = self.__class__._meta.get("shard_key", tuple()) for k in shard_key: - path = self._lookup_field(k.split(".")) - actual_key = [p.db_field for p in path] val = self - for ak in actual_key: - val = getattr(val, ak) - select_dict["__".join(actual_key)] = val + field_parts = k.split(".") + for part in field_parts: + val = getattr(val, part) + select_dict["__".join(field_parts)] = val return select_dict def update(self, **kwargs): diff --git a/tests/document/instance.py b/tests/document/instance.py index 49606cff..9c854f8d 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -466,21 +466,33 @@ class InstanceTest(MongoDBTestCase): meta = {"shard_key": ("superphylum",)} Animal.drop_collection() - doc = Animal(superphylum="Deuterostomia") - doc.save() + doc = Animal.objects.create(superphylum="Deuterostomia") mongo_db = get_mongodb_version() CMD_QUERY_KEY = "command" if mongo_db >= MONGODB_36 else "query" - with query_counter() as q: doc.reload() query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0] self.assertEqual( - set(query_op[CMD_QUERY_KEY]["filter"].keys()), - set(["_id", "superphylum"]), + set(query_op[CMD_QUERY_KEY]["filter"].keys()), {"_id", "superphylum"} ) - Animal.drop_collection() + def test_reload_sharded_with_db_field(self): + class Person(Document): + nationality = StringField(db_field="country") + meta = {"shard_key": ("nationality",)} + + Person.drop_collection() + doc = Person.objects.create(nationality="Poland") + + mongo_db = get_mongodb_version() + CMD_QUERY_KEY = "command" if mongo_db >= MONGODB_36 else "query" + with query_counter() as q: + doc.reload() + query_op = q.db.system.profile.find({"ns": "mongoenginetest.person"})[0] + self.assertEqual( + set(query_op[CMD_QUERY_KEY]["filter"].keys()), {"_id", "country"} + ) def test_reload_sharded_nested(self): class SuperPhylum(EmbeddedDocument): @@ -3616,5 +3628,57 @@ class InstanceTest(MongoDBTestCase): User.objects().select_related() +class ObjectKeyTestCase(MongoDBTestCase): + def test_object_key_simple_document(self): + class Book(Document): + title = StringField() + + book = Book(title="Whatever") + self.assertEqual(book._object_key, {"pk": None}) + + book.pk = ObjectId() + self.assertEqual(book._object_key, {"pk": book.pk}) + + def test_object_key_with_custom_primary_key(self): + class Book(Document): + isbn = StringField(primary_key=True) + title = StringField() + + book = Book(title="Sapiens") + self.assertEqual(book._object_key, {"pk": None}) + + book = Book(pk="0062316117") + self.assertEqual(book._object_key, {"pk": "0062316117"}) + + def test_object_key_in_a_sharded_collection(self): + class Book(Document): + title = StringField() + meta = {"shard_key": ("pk", "title")} + + book = Book() + self.assertEqual(book._object_key, {"pk": None, "title": None}) + book = Book(pk=ObjectId(), title="Sapiens") + self.assertEqual(book._object_key, {"pk": book.pk, "title": "Sapiens"}) + + def test_object_key_with_custom_db_field(self): + class Book(Document): + author = StringField(db_field="creator") + meta = {"shard_key": ("pk", "author")} + + book = Book(pk=ObjectId(), author="Author") + self.assertEqual(book._object_key, {"pk": book.pk, "author": "Author"}) + + def test_object_key_with_nested_shard_key(self): + class Author(EmbeddedDocument): + name = StringField() + + class Book(Document): + author = EmbeddedDocumentField(Author) + meta = {"shard_key": ("pk", "author.name")} + + book = Book(pk=ObjectId(), author=Author(name="Author")) + self.assertEqual(book._object_key, {"pk": book.pk, "author__name": "Author"}) + + if __name__ == "__main__": unittest.main() From 290d9df3eb6491f22a8feea0f2e9e909bc94b621 Mon Sep 17 00:00:00 2001 From: Nuno Rodrigues Date: Fri, 12 Jul 2019 13:33:19 +0100 Subject: [PATCH 037/216] Set default database when using mongomock. When passing host `mongomock://localhost/some-default-database` to `connect` the default database was `"test"` instead of `"some-default-database"`. Fixes: #2130 --- mongoengine/connection.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 8aa95daa..01e9a7af 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -103,7 +103,13 @@ def _get_connection_settings( if entity.startswith("mongomock://"): conn_settings["is_mock"] = True # `mongomock://` is not a valid url prefix and must be replaced by `mongodb://` - resolved_hosts.append(entity.replace("mongomock://", "mongodb://", 1)) + new_entity = entity.replace("mongomock://", "mongodb://", 1) + resolved_hosts.append(new_entity) + + uri_dict = uri_parser.parse_uri(new_entity) + + if uri_dict.get("database"): + conn_settings["name"] = uri_dict.get("database") # Handle URI style connections, only updating connection params which # were explicitly specified in the URI. From 1aba145bc6f8f944cd66a5710a7d7d46bd4fe841 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Mon, 15 Jul 2019 09:32:26 +0200 Subject: [PATCH 038/216] Split requirements into requirements.txt and requirements-lint.txt I'm doing this because it makes sense to separate which requirements are needed to develop the package and which are purely needed for static code analysis. That said, the trigger for this commit was that ReadTheDocs automatically tries to install everything that's in requirements.txt and, since `black` isn't available on Python 2.7, it was failing. See https://readthedocs.org/projects/mongoengine-odm/builds/9371765/. Refs #2105 --- requirements-lint.txt | 3 +++ requirements.txt | 3 --- 2 files changed, 3 insertions(+), 3 deletions(-) create mode 100644 requirements-lint.txt diff --git a/requirements-lint.txt b/requirements-lint.txt new file mode 100644 index 00000000..9dc6123b --- /dev/null +++ b/requirements-lint.txt @@ -0,0 +1,3 @@ +black +flake8 +flake8-import-order diff --git a/requirements.txt b/requirements.txt index 62ad8766..46eabac3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,5 @@ -black nose pymongo>=3.4 six==1.10.0 -flake8 -flake8-import-order Sphinx==1.5.5 sphinx-rtd-theme==0.2.4 From 921c1fa412c81e2c01fb529d7192a1b141ab48da Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Mon, 15 Jul 2019 09:50:29 +0200 Subject: [PATCH 039/216] Dummy commit to confirm that the new RTD webhook works From 79454b5eed0cda6e66c939234a548695a0209c83 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Mon, 15 Jul 2019 09:54:18 +0200 Subject: [PATCH 040/216] Fix formatting of the changelog RTD didn't render it properly previously. Refs #2105 --- docs/changelog.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 7cf74d66..d3b547ee 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -3,6 +3,7 @@ Changelog ========= +=========== Development =========== - (Fill this out as you fix issues and develop your features). @@ -19,16 +20,19 @@ Development - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 - The codebase is now formatted using ``black``. #2109 +================= Changes in 0.18.2 ================= - Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097 - Various code clarity and documentation improvements. +================= Changes in 0.18.1 ================= - Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082 - Add Python 3.7 to Travis CI. #2058 +================= Changes in 0.18.0 ================= - Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2. @@ -52,6 +56,7 @@ Changes in 0.18.0 - ``disconnect`` now clears the cached attribute ``Document._collection``. - BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552 +================= Changes in 0.17.0 ================= - POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976 From d09af430e89618736cd5e3594b9b3e2a9e1b5649 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Mon, 15 Jul 2019 09:57:48 +0200 Subject: [PATCH 041/216] =?UTF-8?q?Fix=20formatting=20of=20the=20changelog?= =?UTF-8?q?=20=E2=80=93=20part=202?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit RTD didn't render it properly previously. Refs #2105 --- docs/changelog.rst | 9 --------- 1 file changed, 9 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index d3b547ee..55fa4b25 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -3,7 +3,6 @@ Changelog ========= -=========== Development =========== - (Fill this out as you fix issues and develop your features). @@ -20,19 +19,16 @@ Development - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 - The codebase is now formatted using ``black``. #2109 -================= Changes in 0.18.2 ================= - Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097 - Various code clarity and documentation improvements. -================= Changes in 0.18.1 ================= - Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082 - Add Python 3.7 to Travis CI. #2058 -================= Changes in 0.18.0 ================= - Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2. @@ -56,7 +52,6 @@ Changes in 0.18.0 - ``disconnect`` now clears the cached attribute ``Document._collection``. - BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552 -================= Changes in 0.17.0 ================= - POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976 @@ -68,24 +63,20 @@ Changes in 0.17.0 - Fix test suite and CI to support MongoDB v3.4. #1445 - Fix reference fields querying the database on each access if value contains orphan DBRefs. -================= Changes in 0.16.3 ================= - Fix ``$push`` with the ``$position`` operator not working with lists in embedded documents. #1965 -================= Changes in 0.16.2 ================= - Fix ``Document.save`` that fails when called with ``write_concern=None`` (regression of 0.16.1). #1958 -================= Changes in 0.16.1 ================= - Fix ``_cls`` that is not set properly in the ``Document`` constructor (regression). #1950 - Fix a bug in the ``_delta`` method - update of a ``ListField`` depends on an unrelated dynamic field update. #1733 - Remove PyMongo's deprecated ``Collection.save`` method and use ``Collection.insert_one`` instead. #1899 -================= Changes in 0.16.0 ================= - POTENTIAL BREAKING CHANGES: From ac416aeeb3e30716b1da2f21465191ab81263eed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20W=C3=B3jcik?= Date: Mon, 15 Jul 2019 12:15:55 +0200 Subject: [PATCH 042/216] Improve BaseDocument.from_json tests and documentation (#2127) --- mongoengine/base/document.py | 20 ++++-- tests/document/instance.py | 121 ++++++++++++++++++++++------------- 2 files changed, 90 insertions(+), 51 deletions(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index f5109b44..a962a82b 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -463,13 +463,21 @@ class BaseDocument(object): def from_json(cls, json_data, created=False): """Converts json data to a Document instance - :param json_data: The json data to load into the Document - :param created: If True, the document will be considered as a brand new document - If False and an id is provided, it will consider that the data being - loaded corresponds to what's already in the database (This has an impact of subsequent call to .save()) - If False and no id is provided, it will consider the data as a new document - (default ``False``) + :param str json_data: The json data to load into the Document + :param bool created: Boolean defining whether to consider the newly + instantiated document as brand new or as persisted already: + * If True, consider the document as brand new, no matter what data + it's loaded with (i.e. even if an ID is loaded). + * If False and an ID is NOT provided, consider the document as + brand new. + * If False and an ID is provided, assume that the object has + already been persisted (this has an impact on the subsequent + call to .save()). + * Defaults to ``False``. """ + # TODO should `created` default to False? If the object already exists + # in the DB, you would likely retrieve it from MongoDB itself through + # a query, not load it from JSON data. return cls._from_son(json_util.loads(json_data), created=created) def __expand_dynamic_values(self, name, value): diff --git a/tests/document/instance.py b/tests/document/instance.py index 9c854f8d..d8841a40 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -3429,83 +3429,114 @@ class InstanceTest(MongoDBTestCase): self.assertEqual(Person.objects(height=189).count(), 1) - def test_from_son(self): - # 771 - class MyPerson(self.Person): - meta = dict(shard_key=["id"]) + def test_shard_key_mutability_after_from_json(self): + """Ensure that a document ID can be modified after from_json. - p = MyPerson.from_json('{"name": "name", "age": 27}', created=True) - self.assertEqual(p.id, None) - p.id = ( - "12345" - ) # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here - p = MyPerson._from_son({"name": "name", "age": 27}, created=True) - self.assertEqual(p.id, None) - p.id = ( - "12345" - ) # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here + If you instantiate a document by using from_json/_from_son and you + indicate that this should be considered a new document (vs a doc that + already exists in the database), then you should be able to modify + fields that are part of its shard key (note that this is not permitted + on docs that are already persisted). - def test_from_son_created_False_without_id(self): - class MyPerson(Document): + See https://github.com/mongoengine/mongoengine/issues/771 for details. + """ + + class Person(Document): + name = StringField() + age = IntField() + meta = {"shard_key": ("id", "name")} + + p = Person.from_json('{"name": "name", "age": 27}', created=True) + self.assertEqual(p._created, True) + p.name = "new name" + p.id = "12345" + self.assertEqual(p.name, "new name") + self.assertEqual(p.id, "12345") + + def test_shard_key_mutability_after_from_son(self): + """Ensure that a document ID can be modified after _from_son. + + See `test_shard_key_mutability_after_from_json` above for more details. + """ + + class Person(Document): + name = StringField() + age = IntField() + meta = {"shard_key": ("id", "name")} + + p = Person._from_son({"name": "name", "age": 27}, created=True) + self.assertEqual(p._created, True) + p.name = "new name" + p.id = "12345" + self.assertEqual(p.name, "new name") + self.assertEqual(p.id, "12345") + + def test_from_json_created_false_without_an_id(self): + class Person(Document): name = StringField() - MyPerson.objects.delete() + Person.objects.delete() - p = MyPerson.from_json('{"name": "a_fancy_name"}', created=False) - self.assertFalse(p._created) - self.assertIsNone(p.id) + p = Person.from_json('{"name": "name"}', created=False) + self.assertEqual(p._created, False) + self.assertEqual(p.id, None) + + # Make sure the document is subsequently persisted correctly. p.save() - self.assertIsNotNone(p.id) - saved_p = MyPerson.objects.get(id=p.id) - self.assertEqual(saved_p.name, "a_fancy_name") + self.assertTrue(p.id is not None) + saved_p = Person.objects.get(id=p.id) + self.assertEqual(saved_p.name, "name") - def test_from_son_created_False_with_id(self): - # 1854 - class MyPerson(Document): + def test_from_json_created_false_with_an_id(self): + """See https://github.com/mongoengine/mongoengine/issues/1854""" + + class Person(Document): name = StringField() - MyPerson.objects.delete() + Person.objects.delete() - p = MyPerson.from_json( - '{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=False + p = Person.from_json( + '{"_id": "5b85a8b04ec5dc2da388296e", "name": "name"}', created=False ) - self.assertFalse(p._created) + self.assertEqual(p._created, False) self.assertEqual(p._changed_fields, []) - self.assertEqual(p.name, "a_fancy_name") + self.assertEqual(p.name, "name") self.assertEqual(p.id, ObjectId("5b85a8b04ec5dc2da388296e")) p.save() with self.assertRaises(DoesNotExist): - # Since created=False and we gave an id in the json and _changed_fields is empty - # mongoengine assumes that the document exits with that structure already - # and calling .save() didn't save anything - MyPerson.objects.get(id=p.id) + # Since the object is considered as already persisted (thanks to + # `created=False` and an existing ID), and we haven't changed any + # fields (i.e. `_changed_fields` is empty), the document is + # considered unchanged and hence the `save()` call above did + # nothing. + Person.objects.get(id=p.id) self.assertFalse(p._created) - p.name = "a new fancy name" + p.name = "a new name" self.assertEqual(p._changed_fields, ["name"]) p.save() - saved_p = MyPerson.objects.get(id=p.id) + saved_p = Person.objects.get(id=p.id) self.assertEqual(saved_p.name, p.name) - def test_from_son_created_True_with_an_id(self): - class MyPerson(Document): + def test_from_json_created_true_with_an_id(self): + class Person(Document): name = StringField() - MyPerson.objects.delete() + Person.objects.delete() - p = MyPerson.from_json( - '{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=True + p = Person.from_json( + '{"_id": "5b85a8b04ec5dc2da388296e", "name": "name"}', created=True ) self.assertTrue(p._created) self.assertEqual(p._changed_fields, []) - self.assertEqual(p.name, "a_fancy_name") + self.assertEqual(p.name, "name") self.assertEqual(p.id, ObjectId("5b85a8b04ec5dc2da388296e")) p.save() - saved_p = MyPerson.objects.get(id=p.id) + saved_p = Person.objects.get(id=p.id) self.assertEqual(saved_p, p) - self.assertEqual(p.name, "a_fancy_name") + self.assertEqual(saved_p.name, "name") def test_null_field(self): # 734 From 2d02551d0a41405664dc26604e90669f6e65aca6 Mon Sep 17 00:00:00 2001 From: Nuno Rodrigues Date: Mon, 15 Jul 2019 16:53:24 +0000 Subject: [PATCH 043/216] Add test --- tests/test_connection.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/test_connection.py b/tests/test_connection.py index f9c9d098..78f7e52f 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -4,6 +4,8 @@ from pymongo import MongoClient from pymongo.errors import OperationFailure, InvalidName from pymongo import ReadPreference +from mongoengine import Document + try: import unittest2 as unittest except ImportError: @@ -269,6 +271,14 @@ class ConnectionTest(unittest.TestCase): conn = get_connection("testdb7") self.assertIsInstance(conn, mongomock.MongoClient) + disconnect_all() + class SomeDocument(Document): pass + conn = connect(host="mongomock://localhost:27017/mongoenginetest8") + some_document = SomeDocument() + some_document.save() + self.assertEqual(conn.get_default_database().name, "mongoenginetest8") + self.assertEqual(conn.database_names()[0], "mongoenginetest8") + def test_connect_with_host_list(self): """Ensure that the connect() method works when host is a list From 2424ece0c52e2253fbfec45a3b11cbda9e4a5b45 Mon Sep 17 00:00:00 2001 From: Nuno Rodrigues Date: Tue, 16 Jul 2019 13:40:46 +0000 Subject: [PATCH 044/216] Fix a linting error --- tests/test_connection.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/test_connection.py b/tests/test_connection.py index 78f7e52f..57141958 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -272,7 +272,10 @@ class ConnectionTest(unittest.TestCase): self.assertIsInstance(conn, mongomock.MongoClient) disconnect_all() - class SomeDocument(Document): pass + + class SomeDocument(Document): + pass + conn = connect(host="mongomock://localhost:27017/mongoenginetest8") some_document = SomeDocument() some_document.save() From 05a22d5a54b8bd7a6a411732db90464fbbb29371 Mon Sep 17 00:00:00 2001 From: Nuno Rodrigues Date: Mon, 22 Jul 2019 08:42:17 +0000 Subject: [PATCH 045/216] Extract test to its own method --- tests/test_connection.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/tests/test_connection.py b/tests/test_connection.py index 57141958..b7dc9268 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -271,16 +271,25 @@ class ConnectionTest(unittest.TestCase): conn = get_connection("testdb7") self.assertIsInstance(conn, mongomock.MongoClient) + def test_default_database_with_mocking(self): + """Ensure that the default database is correctly set when using mongomock. + """ + try: + import mongomock + except ImportError: + raise SkipTest("you need mongomock installed to run this testcase") + disconnect_all() class SomeDocument(Document): pass - conn = connect(host="mongomock://localhost:27017/mongoenginetest8") + conn = connect(host="mongomock://localhost:27017/mongoenginetest") some_document = SomeDocument() + # database won't exist until we save a document some_document.save() - self.assertEqual(conn.get_default_database().name, "mongoenginetest8") - self.assertEqual(conn.database_names()[0], "mongoenginetest8") + self.assertEqual(conn.get_default_database().name, "mongoenginetest") + self.assertEqual(conn.database_names()[0], "mongoenginetest") def test_connect_with_host_list(self): """Ensure that the connect() method works when host is a list From f4a06ad65d4bcec40384391d1f3fcf94a70f5111 Mon Sep 17 00:00:00 2001 From: Nuno Rodrigues Date: Mon, 22 Jul 2019 08:45:35 +0000 Subject: [PATCH 046/216] Optimize database name getter calls --- mongoengine/connection.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 01e9a7af..4e0c60b0 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -108,8 +108,9 @@ def _get_connection_settings( uri_dict = uri_parser.parse_uri(new_entity) - if uri_dict.get("database"): - conn_settings["name"] = uri_dict.get("database") + database = uri_dict.get("database") + if database: + conn_settings["name"] = database # Handle URI style connections, only updating connection params which # were explicitly specified in the URI. @@ -117,8 +118,9 @@ def _get_connection_settings( uri_dict = uri_parser.parse_uri(entity) resolved_hosts.append(entity) - if uri_dict.get("database"): - conn_settings["name"] = uri_dict.get("database") + database = uri_dict.get("database") + if database: + conn_settings["name"] = database for param in ("read_preference", "username", "password"): if uri_dict.get(param): From c68e3e1238bf77997dbe65415935abe8528ffef8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 24 Jul 2019 21:37:16 +0200 Subject: [PATCH 047/216] Add test case for list update by negative index --- tests/document/instance.py | 49 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 47 insertions(+), 2 deletions(-) diff --git a/tests/document/instance.py b/tests/document/instance.py index d8841a40..d0193b60 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -39,10 +39,10 @@ from tests.utils import MongoDBTestCase, get_as_pymongo TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "../fields/mongoengine.png") -__all__ = ("InstanceTest",) +__all__ = ("TestDocumentInstance",) -class InstanceTest(MongoDBTestCase): +class TestDocumentInstance(MongoDBTestCase): def setUp(self): class Job(EmbeddedDocument): name = StringField() @@ -3599,6 +3599,51 @@ class InstanceTest(MongoDBTestCase): self.assertEqual(b._instance, a) self.assertEqual(idx, 2) + def test_updating_listfield_manipulate_list(self): + class Company(Document): + name = StringField() + employees = ListField(field=DictField()) + + Company.drop_collection() + + comp = Company(name="BigBank", employees=[{"name": "John"}]) + comp.save() + comp.employees.append({"name": "Bill"}) + comp.save() + + stored_comp = get_as_pymongo(comp) + self.assertEqual( + stored_comp, + { + "_id": comp.id, + "employees": [{"name": "John"}, {"name": "Bill"}], + "name": "BigBank", + }, + ) + + comp = comp.reload() + comp.employees[0]["color"] = "red" + comp.employees[-1]["color"] = "blue" + comp.employees[-1].update({"size": "xl"}) + comp.save() + + assert len(comp.employees) == 2 + assert comp.employees[0] == {"name": "John", "color": "red"} + assert comp.employees[1] == {"name": "Bill", "size": "xl", "color": "blue"} + + stored_comp = get_as_pymongo(comp) + self.assertEqual( + stored_comp, + { + "_id": comp.id, + "employees": [ + {"name": "John", "color": "red"}, + {"size": "xl", "color": "blue", "name": "Bill"}, + ], + "name": "BigBank", + }, + ) + def test_falsey_pk(self): """Ensure that we can create and update a document with Falsey PK.""" From b887ea96236c62a04a6962dabc747dda14eb1057 Mon Sep 17 00:00:00 2001 From: otrofimov Date: Thu, 8 Aug 2019 11:55:45 +0300 Subject: [PATCH 048/216] Implement collation for queryset --- mongoengine/queryset/base.py | 31 +++++++++++++++++++++++++++++++ tests/document/indexes.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 60 insertions(+) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index ba3ac95a..b0e1bff2 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -80,6 +80,7 @@ class BaseQuerySet(object): self._limit = None self._skip = None self._hint = -1 # Using -1 as None is a valid value for hint + self._collation = None self._batch_size = None self.only_fields = [] self._max_time_ms = None @@ -781,6 +782,7 @@ class BaseQuerySet(object): "_limit", "_skip", "_hint", + "_collation", "_auto_dereference", "_search_text", "only_fields", @@ -863,6 +865,32 @@ class BaseQuerySet(object): return queryset + def collation(self, collation=None): + """ + Collation allows users to specify language-specific rules for string + comparison, such as rules for lettercase and accent marks. + :param collation: `~pymongo.collation.Collation` or dict with + following fields: + { + locale: str, + caseLevel: bool, + caseFirst: str, + strength: int, + numericOrdering: bool, + alternate: str, + maxVariable: str, + backwards: str + } + Collation should be added to indexes like in test example + """ + queryset = self.clone() + queryset._collation = collation + + if queryset._cursor_obj: + queryset._cursor_obj.collation(collation) + + return queryset + def batch_size(self, size): """Limit the number of documents returned in a single batch (each batch requires a round trip to the server). @@ -1636,6 +1664,9 @@ class BaseQuerySet(object): if self._hint != -1: self._cursor_obj.hint(self._hint) + if self._collation is not None: + self._cursor_obj.collation(self._collation) + if self._batch_size is not None: self._cursor_obj.batch_size(self._batch_size) diff --git a/tests/document/indexes.py b/tests/document/indexes.py index 570e619e..0bc23d1c 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -539,6 +539,35 @@ class IndexesTest(unittest.TestCase): with self.assertRaises(ValueError): BlogPost.objects.hint(("tags", 1)).count() + def test_collation(self): + base = {'locale': "en", 'strength': 2} + + class BlogPost(Document): + name = StringField() + meta = {"indexes": [ + {"fields": ["name"], "name": 'name_index', + 'collation': base} + ]} + + BlogPost.drop_collection() + + names = tuple("%sag %i" % ('t' if n % 2 == 0 else 'T', n) for n in range(10)) + for name in names: + BlogPost(name=name).save() + + query_result = BlogPost.objects.collation(base).order_by('name') + self.assertEqual([x.name for x in query_result], + sorted(names, key=lambda x: x.lower())) + self.assertEqual(10, query_result.count()) + + incorrect_collation = {'arndom': 'wrdo'} + with self.assertRaises(OperationFailure): + BlogPost.objects.collation(incorrect_collation).count() + + query_result = BlogPost.objects.collation({}).order_by('name') + self.assertEqual([x.name for x in query_result], + sorted(names)) + def test_unique(self): """Ensure that uniqueness constraints are applied to fields. """ From fbb3bf869c9cdea0b6c5060e2f57fabf5b8c5e5d Mon Sep 17 00:00:00 2001 From: otrofimov Date: Thu, 8 Aug 2019 15:56:20 +0300 Subject: [PATCH 049/216] compatibility with black --- tests/document/indexes.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/tests/document/indexes.py b/tests/document/indexes.py index 0bc23d1c..fa3d1706 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -540,33 +540,34 @@ class IndexesTest(unittest.TestCase): BlogPost.objects.hint(("tags", 1)).count() def test_collation(self): - base = {'locale': "en", 'strength': 2} + base = {"locale": "en", "strength": 2} class BlogPost(Document): name = StringField() - meta = {"indexes": [ - {"fields": ["name"], "name": 'name_index', - 'collation': base} - ]} + meta = { + "indexes": [ + {"fields": ["name"], "name": "name_index", "collation": base} + ] + } BlogPost.drop_collection() - names = tuple("%sag %i" % ('t' if n % 2 == 0 else 'T', n) for n in range(10)) + names = tuple("%sag %i" % ("t" if n % 2 == 0 else "T", n) for n in range(10)) for name in names: BlogPost(name=name).save() - query_result = BlogPost.objects.collation(base).order_by('name') - self.assertEqual([x.name for x in query_result], - sorted(names, key=lambda x: x.lower())) + query_result = BlogPost.objects.collation(base).order_by("name") + self.assertEqual( + [x.name for x in query_result], sorted(names, key=lambda x: x.lower()) + ) self.assertEqual(10, query_result.count()) - incorrect_collation = {'arndom': 'wrdo'} + incorrect_collation = {"arndom": "wrdo"} with self.assertRaises(OperationFailure): BlogPost.objects.collation(incorrect_collation).count() - query_result = BlogPost.objects.collation({}).order_by('name') - self.assertEqual([x.name for x in query_result], - sorted(names)) + query_result = BlogPost.objects.collation({}).order_by("name") + self.assertEqual([x.name for x in query_result], sorted(names)) def test_unique(self): """Ensure that uniqueness constraints are applied to fields. From eecbb5ca90192a28354efd817f1383ad674100ba Mon Sep 17 00:00:00 2001 From: Ali Mirlou Date: Tue, 20 Aug 2019 19:53:49 +0430 Subject: [PATCH 050/216] Fix small typo --- mongoengine/fields.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 9b9fef6e..f8f527a3 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -2291,7 +2291,7 @@ class LineStringField(GeoJsonBaseField): .. code-block:: js {'type' : 'LineString' , - 'coordinates' : [[x1, y1], [x1, y1] ... [xn, yn]]} + 'coordinates' : [[x1, y1], [x2, y2] ... [xn, yn]]} You can either pass a dict with the full information or a list of points. From e86cf962e99e15eaa59e14380ff50fd9e25ac6fc Mon Sep 17 00:00:00 2001 From: Erdenezul Batmunkh Date: Wed, 21 Aug 2019 13:08:30 +0200 Subject: [PATCH 051/216] Change misleading error message --- mongoengine/queryset/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index ba3ac95a..46b20d78 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -355,8 +355,8 @@ class BaseQuerySet(object): except pymongo.errors.BulkWriteError as err: # inserting documents that already have an _id field will # give huge performance debt or raise - message = u"Document must not have _id value before bulk write (%s)" - raise NotUniqueError(message % six.text_type(err)) + message = u"Bulk write error: (%s)" + raise NotUniqueError(message % six.text_type(err.details)) except pymongo.errors.OperationFailure as err: message = "Could not save document (%s)" if re.match("^E1100[01] duplicate key", six.text_type(err)): From 71a6f3d1a46702bf5d66e704519671e828626cd2 Mon Sep 17 00:00:00 2001 From: otrofimov Date: Wed, 21 Aug 2019 18:26:10 +0300 Subject: [PATCH 052/216] test_collation: Added test with `pymongo.collation.Collation` object Readable list of BlogPost names for test --- tests/document/indexes.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/tests/document/indexes.py b/tests/document/indexes.py index fa3d1706..dcd3fc6a 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -3,6 +3,7 @@ import unittest from datetime import datetime from nose.plugins.skip import SkipTest +from pymongo.collation import Collation from pymongo.errors import OperationFailure import pymongo from six import iteritems @@ -552,7 +553,7 @@ class IndexesTest(unittest.TestCase): BlogPost.drop_collection() - names = tuple("%sag %i" % ("t" if n % 2 == 0 else "T", n) for n in range(10)) + names = ["tag1", "Tag2", "tag3", "Tag4", "tag5"] for name in names: BlogPost(name=name).save() @@ -560,7 +561,13 @@ class IndexesTest(unittest.TestCase): self.assertEqual( [x.name for x in query_result], sorted(names, key=lambda x: x.lower()) ) - self.assertEqual(10, query_result.count()) + self.assertEqual(5, query_result.count()) + + query_result = BlogPost.objects.collation(Collation(**base)).order_by("name") + self.assertEqual( + [x.name for x in query_result], sorted(names, key=lambda x: x.lower()) + ) + self.assertEqual(5, query_result.count()) incorrect_collation = {"arndom": "wrdo"} with self.assertRaises(OperationFailure): From ddececbfead80b2565c4c086a9cbd52ed07e17c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 28 Aug 2019 16:01:44 +0300 Subject: [PATCH 053/216] rename all test files so that they are prefixed by test_{orginal_filename}.py --- tests/__init__.py | 4 -- tests/all_warnings/__init__.py | 40 ------------- tests/all_warnings/test_warnings.py | 37 ++++++++++++ tests/document/__init__.py | 13 ----- ...class_methods.py => test_class_methods.py} | 9 +-- tests/document/{delta.py => test_delta.py} | 4 +- .../document/{dynamic.py => test_dynamic.py} | 2 +- .../document/{indexes.py => test_indexes.py} | 5 +- .../{inheritance.py => test_inheritance.py} | 8 +-- .../{instance.py => test_instance.py} | 4 +- ...lisation.py => test_json_serialisation.py} | 11 +--- .../{validation.py => test_validation.py} | 8 +-- tests/fields/__init__.py | 3 - tests/fields/test_binary_field.py | 5 +- tests/fields/{fields.py => test_fields.py} | 57 +++++++++---------- .../{file_tests.py => test_file_field.py} | 6 +- tests/fields/{geo.py => test_geo_fields.py} | 10 +--- tests/queryset/__init__.py | 6 -- .../{field_list.py => test_field_list.py} | 6 +- tests/queryset/{geo.py => test_geo.py} | 5 +- .../{queryset.py => test_queryset.py} | 2 +- 21 files changed, 90 insertions(+), 155 deletions(-) create mode 100644 tests/all_warnings/test_warnings.py rename tests/document/{class_methods.py => test_class_methods.py} (99%) rename tests/document/{delta.py => test_delta.py} (99%) rename tests/document/{dynamic.py => test_dynamic.py} (99%) rename tests/document/{indexes.py => test_indexes.py} (99%) rename tests/document/{inheritance.py => test_inheritance.py} (99%) rename tests/document/{instance.py => test_instance.py} (99%) rename tests/document/{json_serialisation.py => test_json_serialisation.py} (95%) rename tests/document/{validation.py => test_validation.py} (97%) rename tests/fields/{fields.py => test_fields.py} (99%) rename tests/fields/{file_tests.py => test_file_field.py} (99%) rename tests/fields/{geo.py => test_geo_fields.py} (98%) rename tests/queryset/{field_list.py => test_field_list.py} (99%) rename tests/queryset/{geo.py => test_geo.py} (99%) rename tests/queryset/{queryset.py => test_queryset.py} (99%) diff --git a/tests/__init__.py b/tests/__init__.py index 08db7186..e69de29b 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,4 +0,0 @@ -from .all_warnings import AllWarnings -from .document import * -from .queryset import * -from .fields import * diff --git a/tests/all_warnings/__init__.py b/tests/all_warnings/__init__.py index a755e7a3..e69de29b 100644 --- a/tests/all_warnings/__init__.py +++ b/tests/all_warnings/__init__.py @@ -1,40 +0,0 @@ -""" -This test has been put into a module. This is because it tests warnings that -only get triggered on first hit. This way we can ensure its imported into the -top level and called first by the test suite. -""" -import unittest -import warnings - -from mongoengine import * - - -__all__ = ("AllWarnings",) - - -class AllWarnings(unittest.TestCase): - def setUp(self): - connect(db="mongoenginetest") - self.warning_list = [] - self.showwarning_default = warnings.showwarning - warnings.showwarning = self.append_to_warning_list - - def append_to_warning_list(self, message, category, *args): - self.warning_list.append({"message": message, "category": category}) - - def tearDown(self): - # restore default handling of warnings - warnings.showwarning = self.showwarning_default - - def test_document_collection_syntax_warning(self): - class NonAbstractBase(Document): - meta = {"allow_inheritance": True} - - class InheritedDocumentFailTest(NonAbstractBase): - meta = {"collection": "fail"} - - warning = self.warning_list[0] - self.assertEqual(SyntaxWarning, warning["category"]) - self.assertEqual( - "non_abstract_base", InheritedDocumentFailTest._get_collection_name() - ) diff --git a/tests/all_warnings/test_warnings.py b/tests/all_warnings/test_warnings.py new file mode 100644 index 00000000..67204617 --- /dev/null +++ b/tests/all_warnings/test_warnings.py @@ -0,0 +1,37 @@ +""" +This test has been put into a module. This is because it tests warnings that +only get triggered on first hit. This way we can ensure its imported into the +top level and called first by the test suite. +""" +import unittest +import warnings + +from mongoengine import * + + +class TestAllWarnings(unittest.TestCase): + def setUp(self): + connect(db="mongoenginetest") + self.warning_list = [] + self.showwarning_default = warnings.showwarning + warnings.showwarning = self.append_to_warning_list + + def append_to_warning_list(self, message, category, *args): + self.warning_list.append({"message": message, "category": category}) + + def tearDown(self): + # restore default handling of warnings + warnings.showwarning = self.showwarning_default + + def test_document_collection_syntax_warning(self): + class NonAbstractBase(Document): + meta = {"allow_inheritance": True} + + class InheritedDocumentFailTest(NonAbstractBase): + meta = {"collection": "fail"} + + warning = self.warning_list[0] + self.assertEqual(SyntaxWarning, warning["category"]) + self.assertEqual( + "non_abstract_base", InheritedDocumentFailTest._get_collection_name() + ) diff --git a/tests/document/__init__.py b/tests/document/__init__.py index f2230c48..e69de29b 100644 --- a/tests/document/__init__.py +++ b/tests/document/__init__.py @@ -1,13 +0,0 @@ -import unittest - -from .class_methods import * -from .delta import * -from .dynamic import * -from .indexes import * -from .inheritance import * -from .instance import * -from .json_serialisation import * -from .validation import * - -if __name__ == "__main__": - unittest.main() diff --git a/tests/document/class_methods.py b/tests/document/test_class_methods.py similarity index 99% rename from tests/document/class_methods.py rename to tests/document/test_class_methods.py index 87f1215b..c5df0843 100644 --- a/tests/document/class_methods.py +++ b/tests/document/test_class_methods.py @@ -2,15 +2,12 @@ import unittest from mongoengine import * -from mongoengine.pymongo_support import list_collection_names - -from mongoengine.queryset import NULLIFY, PULL from mongoengine.connection import get_db - -__all__ = ("ClassMethodsTest",) +from mongoengine.pymongo_support import list_collection_names +from mongoengine.queryset import NULLIFY, PULL -class ClassMethodsTest(unittest.TestCase): +class TestClassMethods(unittest.TestCase): def setUp(self): connect(db="mongoenginetest") self.db = get_db() diff --git a/tests/document/delta.py b/tests/document/test_delta.py similarity index 99% rename from tests/document/delta.py rename to tests/document/test_delta.py index 8f1575e6..632d9b3f 100644 --- a/tests/document/delta.py +++ b/tests/document/test_delta.py @@ -7,9 +7,9 @@ from mongoengine.pymongo_support import list_collection_names from tests.utils import MongoDBTestCase -class DeltaTest(MongoDBTestCase): +class TestDelta(MongoDBTestCase): def setUp(self): - super(DeltaTest, self).setUp() + super(TestDelta, self).setUp() class Person(Document): name = StringField() diff --git a/tests/document/dynamic.py b/tests/document/test_dynamic.py similarity index 99% rename from tests/document/dynamic.py rename to tests/document/test_dynamic.py index 414d3352..6b517d24 100644 --- a/tests/document/dynamic.py +++ b/tests/document/test_dynamic.py @@ -179,7 +179,7 @@ class TestDynamicDocument(MongoDBTestCase): def test_three_level_complex_data_lookups(self): """Ensure you can query three level document dynamic fields""" - p = self.Person.objects.create(misc={"hello": {"hello2": "world"}}) + self.Person.objects.create(misc={"hello": {"hello2": "world"}}) self.assertEqual(1, self.Person.objects(misc__hello__hello2="world").count()) def test_complex_embedded_document_validation(self): diff --git a/tests/document/indexes.py b/tests/document/test_indexes.py similarity index 99% rename from tests/document/indexes.py rename to tests/document/test_indexes.py index 570e619e..f94eb359 100644 --- a/tests/document/indexes.py +++ b/tests/document/test_indexes.py @@ -4,16 +4,13 @@ from datetime import datetime from nose.plugins.skip import SkipTest from pymongo.errors import OperationFailure -import pymongo from six import iteritems from mongoengine import * from mongoengine.connection import get_db -__all__ = ("IndexesTest",) - -class IndexesTest(unittest.TestCase): +class TestIndexes(unittest.TestCase): def setUp(self): self.connection = connect(db="mongoenginetest") self.db = get_db() diff --git a/tests/document/inheritance.py b/tests/document/test_inheritance.py similarity index 99% rename from tests/document/inheritance.py rename to tests/document/test_inheritance.py index 4f21d5f4..4bb46e58 100644 --- a/tests/document/inheritance.py +++ b/tests/document/test_inheritance.py @@ -15,13 +15,11 @@ from mongoengine import ( StringField, ) from mongoengine.pymongo_support import list_collection_names -from tests.utils import MongoDBTestCase from tests.fixtures import Base - -__all__ = ("InheritanceTest",) +from tests.utils import MongoDBTestCase -class InheritanceTest(MongoDBTestCase): +class TestInheritance(MongoDBTestCase): def tearDown(self): for collection in list_collection_names(self.db): self.db.drop_collection(collection) @@ -401,7 +399,7 @@ class InheritanceTest(MongoDBTestCase): class Animal(FinalDocument): name = StringField() - with self.assertRaises(ValueError) as cm: + with self.assertRaises(ValueError): class Mammal(Animal): pass diff --git a/tests/document/instance.py b/tests/document/test_instance.py similarity index 99% rename from tests/document/instance.py rename to tests/document/test_instance.py index d8841a40..9b4a16e5 100644 --- a/tests/document/instance.py +++ b/tests/document/test_instance.py @@ -39,10 +39,8 @@ from tests.utils import MongoDBTestCase, get_as_pymongo TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "../fields/mongoengine.png") -__all__ = ("InstanceTest",) - -class InstanceTest(MongoDBTestCase): +class TestInstance(MongoDBTestCase): def setUp(self): class Job(EmbeddedDocument): name = StringField() diff --git a/tests/document/json_serialisation.py b/tests/document/test_json_serialisation.py similarity index 95% rename from tests/document/json_serialisation.py rename to tests/document/test_json_serialisation.py index 33d5a6d9..26a4a6c1 100644 --- a/tests/document/json_serialisation.py +++ b/tests/document/test_json_serialisation.py @@ -1,21 +1,14 @@ import unittest import uuid -from nose.plugins.skip import SkipTest from datetime import datetime from bson import ObjectId -import pymongo - from mongoengine import * - -__all__ = ("TestJson",) +from tests.utils import MongoDBTestCase -class TestJson(unittest.TestCase): - def setUp(self): - connect(db="mongoenginetest") - +class TestJson(MongoDBTestCase): def test_json_names(self): """ Going to test reported issue: diff --git a/tests/document/validation.py b/tests/document/test_validation.py similarity index 97% rename from tests/document/validation.py rename to tests/document/test_validation.py index 78199231..7449dd33 100644 --- a/tests/document/validation.py +++ b/tests/document/test_validation.py @@ -3,14 +3,10 @@ import unittest from datetime import datetime from mongoengine import * - -__all__ = ("ValidatorErrorTest",) +from tests.utils import MongoDBTestCase -class ValidatorErrorTest(unittest.TestCase): - def setUp(self): - connect(db="mongoenginetest") - +class TestValidatorError(MongoDBTestCase): def test_to_dict(self): """Ensure a ValidationError handles error to_dict correctly. """ diff --git a/tests/fields/__init__.py b/tests/fields/__init__.py index 4994d0c6..e69de29b 100644 --- a/tests/fields/__init__.py +++ b/tests/fields/__init__.py @@ -1,3 +0,0 @@ -from .fields import * -from .file_tests import * -from .geo import * diff --git a/tests/fields/test_binary_field.py b/tests/fields/test_binary_field.py index df4bf2de..719df922 100644 --- a/tests/fields/test_binary_field.py +++ b/tests/fields/test_binary_field.py @@ -1,11 +1,10 @@ # -*- coding: utf-8 -*- import uuid +from bson import Binary from nose.plugins.skip import SkipTest import six -from bson import Binary - from mongoengine import * from tests.utils import MongoDBTestCase @@ -77,8 +76,6 @@ class TestBinaryField(MongoDBTestCase): self.assertEqual(0, Attachment.objects.count()) def test_primary_filter_by_binary_pk_as_str(self): - raise SkipTest("Querying by id as string is not currently supported") - class Attachment(Document): id = BinaryField(primary_key=True) diff --git a/tests/fields/fields.py b/tests/fields/test_fields.py similarity index 99% rename from tests/fields/fields.py rename to tests/fields/test_fields.py index 49e9508c..d9279c22 100644 --- a/tests/fields/fields.py +++ b/tests/fields/test_fields.py @@ -2,39 +2,38 @@ import datetime import unittest +from bson import DBRef, ObjectId, SON from nose.plugins.skip import SkipTest -from bson import DBRef, ObjectId, SON - from mongoengine import ( - Document, - StringField, - IntField, - DateTimeField, - DateField, - ValidationError, + BooleanField, ComplexDateTimeField, - FloatField, - ListField, - ReferenceField, + DateField, + DateTimeField, DictField, + Document, + DoesNotExist, + DynamicDocument, + DynamicField, EmbeddedDocument, EmbeddedDocumentField, - GenericReferenceField, - DoesNotExist, - NotRegistered, - OperationError, - DynamicField, - FieldDoesNotExist, EmbeddedDocumentListField, - MultipleObjectsReturned, - NotUniqueError, - BooleanField, - ObjectIdField, - SortedListField, + FieldDoesNotExist, + FloatField, GenericLazyReferenceField, + GenericReferenceField, + IntField, LazyReferenceField, - DynamicDocument, + ListField, + MultipleObjectsReturned, + NotRegistered, + NotUniqueError, + ObjectIdField, + OperationError, + ReferenceField, + SortedListField, + StringField, + ValidationError, ) from mongoengine.base import BaseField, EmbeddedDocumentList, _document_registry from mongoengine.errors import DeprecatedError @@ -42,7 +41,7 @@ from mongoengine.errors import DeprecatedError from tests.utils import MongoDBTestCase -class FieldTest(MongoDBTestCase): +class TestField(MongoDBTestCase): def test_default_values_nothing_set(self): """Ensure that default field values are used when creating a document. @@ -343,7 +342,7 @@ class FieldTest(MongoDBTestCase): doc.save() # Unset all the fields - obj = HandleNoneFields._get_collection().update( + HandleNoneFields._get_collection().update( {"_id": doc.id}, {"$unset": {"str_fld": 1, "int_fld": 1, "flt_fld": 1, "comp_dt_fld": 1}}, ) @@ -416,13 +415,13 @@ class FieldTest(MongoDBTestCase): # name starting with $ with self.assertRaises(ValueError): - class User(Document): + class UserX1(Document): name = StringField(db_field="$name") # name containing a null character with self.assertRaises(ValueError): - class User(Document): + class UserX2(Document): name = StringField(db_field="name\0") def test_list_validation(self): @@ -2267,7 +2266,7 @@ class FieldTest(MongoDBTestCase): Doc(bar="test") -class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): +class TestEmbeddedDocumentListField(MongoDBTestCase): def setUp(self): """ Create two BlogPost entries in the database, each with @@ -2320,7 +2319,7 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): # Test with a Document post = self.BlogPost(comments=Title(content="garbage")) - with self.assertRaises(ValidationError) as e: + with self.assertRaises(ValidationError): post.validate() self.assertIn("'comments'", str(ctx_err.exception)) self.assertIn( diff --git a/tests/fields/file_tests.py b/tests/fields/test_file_field.py similarity index 99% rename from tests/fields/file_tests.py rename to tests/fields/test_file_field.py index dd2fe609..49eb5bc2 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/test_file_field.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- import copy import os -import unittest import tempfile +import unittest import gridfs +from nose.plugins.skip import SkipTest import six -from nose.plugins.skip import SkipTest from mongoengine import * from mongoengine.connection import get_db from mongoengine.python_support import StringIO @@ -35,7 +35,7 @@ def get_file(path): return bytes_io -class FileTest(MongoDBTestCase): +class TestFileField(MongoDBTestCase): def tearDown(self): self.db.drop_collection("fs.files") self.db.drop_collection("fs.chunks") diff --git a/tests/fields/geo.py b/tests/fields/test_geo_fields.py similarity index 98% rename from tests/fields/geo.py rename to tests/fields/test_geo_fields.py index 446d7171..ff4cbc83 100644 --- a/tests/fields/geo.py +++ b/tests/fields/test_geo_fields.py @@ -2,16 +2,10 @@ import unittest from mongoengine import * -from mongoengine.connection import get_db - -__all__ = ("GeoFieldTest",) +from tests.utils import MongoDBTestCase -class GeoFieldTest(unittest.TestCase): - def setUp(self): - connect(db="mongoenginetest") - self.db = get_db() - +class TestGeoField(MongoDBTestCase): def _test_for_expected_error(self, Cls, loc, expected): try: Cls(loc=loc).validate() diff --git a/tests/queryset/__init__.py b/tests/queryset/__init__.py index 31016966..e69de29b 100644 --- a/tests/queryset/__init__.py +++ b/tests/queryset/__init__.py @@ -1,6 +0,0 @@ -from .transform import * -from .field_list import * -from .queryset import * -from .visitor import * -from .geo import * -from .modify import * diff --git a/tests/queryset/field_list.py b/tests/queryset/test_field_list.py similarity index 99% rename from tests/queryset/field_list.py rename to tests/queryset/test_field_list.py index 9f0fe827..703c2031 100644 --- a/tests/queryset/field_list.py +++ b/tests/queryset/test_field_list.py @@ -3,10 +3,8 @@ import unittest from mongoengine import * from mongoengine.queryset import QueryFieldList -__all__ = ("QueryFieldListTest", "OnlyExcludeAllTest") - -class QueryFieldListTest(unittest.TestCase): +class TestQueryFieldList(unittest.TestCase): def test_empty(self): q = QueryFieldList() self.assertFalse(q) @@ -66,7 +64,7 @@ class QueryFieldListTest(unittest.TestCase): self.assertEqual(q.as_dict(), {"a": {"$slice": 5}}) -class OnlyExcludeAllTest(unittest.TestCase): +class TestOnlyExcludeAll(unittest.TestCase): def setUp(self): connect(db="mongoenginetest") diff --git a/tests/queryset/geo.py b/tests/queryset/test_geo.py similarity index 99% rename from tests/queryset/geo.py rename to tests/queryset/test_geo.py index 95dc913d..343f864b 100644 --- a/tests/queryset/geo.py +++ b/tests/queryset/test_geo.py @@ -6,10 +6,7 @@ from mongoengine import * from tests.utils import MongoDBTestCase -__all__ = ("GeoQueriesTest",) - - -class GeoQueriesTest(MongoDBTestCase): +class TestGeoQueries(MongoDBTestCase): def _create_event_data(self, point_field_class=GeoPointField): """Create some sample data re-used in many of the tests below.""" diff --git a/tests/queryset/queryset.py b/tests/queryset/test_queryset.py similarity index 99% rename from tests/queryset/queryset.py rename to tests/queryset/test_queryset.py index 9dc68f2e..a9ecaef5 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/test_queryset.py @@ -41,7 +41,7 @@ def get_key_compat(mongo_ver): return ORDER_BY_KEY, CMD_QUERY_KEY -class QuerySetTest(unittest.TestCase): +class TestQueryset(unittest.TestCase): def setUp(self): connect(db="mongoenginetest") connect(db="mongoenginetest2", alias="test2") From a06e605e671bfcbe336addd780ae7c9e79069b99 Mon Sep 17 00:00:00 2001 From: Erdenezul Batmunkh Date: Thu, 29 Aug 2019 11:11:27 +0200 Subject: [PATCH 054/216] Add BulkWriteError exception --- mongoengine/errors.py | 5 +++++ mongoengine/queryset/base.py | 3 ++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/mongoengine/errors.py b/mongoengine/errors.py index 9852f2a1..b76243d3 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -12,6 +12,7 @@ __all__ = ( "InvalidQueryError", "OperationError", "NotUniqueError", + "BulkWriteError", "FieldDoesNotExist", "ValidationError", "SaveConditionError", @@ -51,6 +52,10 @@ class NotUniqueError(OperationError): pass +class BulkWriteError(OperationError): + pass + + class SaveConditionError(OperationError): pass diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 46b20d78..6d3fb41a 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -20,6 +20,7 @@ from mongoengine.common import _import_class from mongoengine.connection import get_db from mongoengine.context_managers import set_write_concern, switch_db from mongoengine.errors import ( + BulkWriteError, InvalidQueryError, LookUpError, NotUniqueError, @@ -356,7 +357,7 @@ class BaseQuerySet(object): # inserting documents that already have an _id field will # give huge performance debt or raise message = u"Bulk write error: (%s)" - raise NotUniqueError(message % six.text_type(err.details)) + raise BulkWriteError(message % six.text_type(err.details)) except pymongo.errors.OperationFailure as err: message = "Could not save document (%s)" if re.match("^E1100[01] duplicate key", six.text_type(err)): From 2267b7e7d740409dd1b9f648d1fe5b9e1cfdb7a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 30 Aug 2019 16:27:56 +0300 Subject: [PATCH 055/216] rename remaining files for pytest migration --- tests/queryset/{modify.py => test_modify.py} | 6 ++---- tests/queryset/{pickable.py => test_pickable.py} | 11 +++-------- tests/queryset/{transform.py => test_transform.py} | 4 +--- tests/queryset/{visitor.py => test_visitor.py} | 4 +--- 4 files changed, 7 insertions(+), 18 deletions(-) rename tests/queryset/{modify.py => test_modify.py} (96%) rename tests/queryset/{pickable.py => test_pickable.py} (87%) rename tests/queryset/{transform.py => test_transform.py} (99%) rename tests/queryset/{visitor.py => test_visitor.py} (99%) diff --git a/tests/queryset/modify.py b/tests/queryset/test_modify.py similarity index 96% rename from tests/queryset/modify.py rename to tests/queryset/test_modify.py index e092d11c..60f4884c 100644 --- a/tests/queryset/modify.py +++ b/tests/queryset/test_modify.py @@ -1,8 +1,6 @@ import unittest -from mongoengine import connect, Document, IntField, StringField, ListField - -__all__ = ("FindAndModifyTest",) +from mongoengine import Document, IntField, ListField, StringField, connect class Doc(Document): @@ -10,7 +8,7 @@ class Doc(Document): value = IntField() -class FindAndModifyTest(unittest.TestCase): +class TestFindAndModify(unittest.TestCase): def setUp(self): connect(db="mongoenginetest") Doc.drop_collection() diff --git a/tests/queryset/pickable.py b/tests/queryset/test_pickable.py similarity index 87% rename from tests/queryset/pickable.py rename to tests/queryset/test_pickable.py index 0945fcbc..fbdd1ff0 100644 --- a/tests/queryset/pickable.py +++ b/tests/queryset/test_pickable.py @@ -1,10 +1,8 @@ import pickle import unittest -from pymongo.mongo_client import MongoClient -from mongoengine import Document, StringField, IntField -from mongoengine.connection import connect -__author__ = "stas" +from mongoengine import Document, IntField, StringField +from mongoengine.connection import connect class Person(Document): @@ -20,11 +18,8 @@ class TestQuerysetPickable(unittest.TestCase): def setUp(self): super(TestQuerysetPickable, self).setUp() - - connection = connect(db="test") # type: pymongo.mongo_client.MongoClient - + connection = connect(db="test") connection.drop_database("test") - self.john = Person.objects.create(name="John", age=21) def test_picke_simple_qs(self): diff --git a/tests/queryset/transform.py b/tests/queryset/test_transform.py similarity index 99% rename from tests/queryset/transform.py rename to tests/queryset/test_transform.py index cfcd8c22..8207351d 100644 --- a/tests/queryset/transform.py +++ b/tests/queryset/test_transform.py @@ -5,10 +5,8 @@ from bson.son import SON from mongoengine import * from mongoengine.queryset import Q, transform -__all__ = ("TransformTest",) - -class TransformTest(unittest.TestCase): +class TestTransform(unittest.TestCase): def setUp(self): connect(db="mongoenginetest") diff --git a/tests/queryset/visitor.py b/tests/queryset/test_visitor.py similarity index 99% rename from tests/queryset/visitor.py rename to tests/queryset/test_visitor.py index 0a22416f..acadabd4 100644 --- a/tests/queryset/visitor.py +++ b/tests/queryset/test_visitor.py @@ -8,10 +8,8 @@ from mongoengine import * from mongoengine.errors import InvalidQueryError from mongoengine.queryset import Q -__all__ = ("QTest",) - -class QTest(unittest.TestCase): +class TestQ(unittest.TestCase): def setUp(self): connect(db="mongoenginetest") From 693195f70be3b675757f700e40c6a66a74aa4b50 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 31 Aug 2019 22:28:31 +0300 Subject: [PATCH 056/216] fix test_pickable that was brought back to life recently --- tests/queryset/test_pickable.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/queryset/test_pickable.py b/tests/queryset/test_pickable.py index fbdd1ff0..8c4e3426 100644 --- a/tests/queryset/test_pickable.py +++ b/tests/queryset/test_pickable.py @@ -3,6 +3,7 @@ import unittest from mongoengine import Document, IntField, StringField from mongoengine.connection import connect +from tests.utils import MongoDBTestCase class Person(Document): @@ -10,7 +11,7 @@ class Person(Document): age = IntField() -class TestQuerysetPickable(unittest.TestCase): +class TestQuerysetPickable(MongoDBTestCase): """ Test for adding pickling support for QuerySet instances See issue https://github.com/MongoEngine/mongoengine/issues/442 @@ -18,8 +19,6 @@ class TestQuerysetPickable(unittest.TestCase): def setUp(self): super(TestQuerysetPickable, self).setUp() - connection = connect(db="test") - connection.drop_database("test") self.john = Person.objects.create(name="John", age=21) def test_picke_simple_qs(self): From 47f8a126ca167cb8fe020e3cc5604b155dfcdebc Mon Sep 17 00:00:00 2001 From: Arto Jantunen Date: Tue, 3 Sep 2019 14:36:06 +0300 Subject: [PATCH 057/216] Only set no_cursor_timeout when requested Previously this was always set for all requests. The parameter is only documented as supported for certain queries, so this was probably wrong. Mongo version 4.2 fails update queries that have this parameter set making mongoengine unusable there. Fixes #2148. --- mongoengine/queryset/base.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index ba3ac95a..ffa099ac 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1576,7 +1576,9 @@ class BaseQuerySet(object): if self._snapshot: msg = "The snapshot option is not anymore available with PyMongo 3+" warnings.warn(msg, DeprecationWarning) - cursor_args = {"no_cursor_timeout": not self._timeout} + cursor_args = {} + if not self._timeout: + cursor_args["no_cursor_timeout"] = True if self._loaded_fields: cursor_args[fields_name] = self._loaded_fields.as_dict() From 1dbe7a3163033703720f4ccd3c6391b5f3f8d490 Mon Sep 17 00:00:00 2001 From: Erdenezul Batmunkh Date: Tue, 3 Sep 2019 16:17:09 +0200 Subject: [PATCH 058/216] Add log in changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 55fa4b25..5422f113 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -18,6 +18,7 @@ Development - Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125 - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 - The codebase is now formatted using ``black``. #2109 +- In bulk write insert, the detailed error message would raise in exception. Changes in 0.18.2 ================= From 7d94af0e3181751894884cbfcd55fe9383db028b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 11 Sep 2019 21:53:30 +0200 Subject: [PATCH 059/216] add test coverage for no_cursor_timeout to support recent fix --- mongoengine/queryset/base.py | 1 + tests/queryset/test_queryset.py | 12 +++++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index ffa099ac..570ad37f 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1576,6 +1576,7 @@ class BaseQuerySet(object): if self._snapshot: msg = "The snapshot option is not anymore available with PyMongo 3+" warnings.warn(msg, DeprecationWarning) + cursor_args = {} if not self._timeout: cursor_args["no_cursor_timeout"] = True diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index a9ecaef5..e7e59905 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -5809,9 +5809,19 @@ class TestQueryset(unittest.TestCase): self.Person.objects.create(name="Baz") self.assertEqual(self.Person.objects.count(with_limit_and_skip=True), 3) - newPerson = self.Person.objects.create(name="Foo_1") + self.Person.objects.create(name="Foo_1") self.assertEqual(self.Person.objects.count(with_limit_and_skip=True), 4) + def test_no_cursor_timeout(self): + qs = self.Person.objects() + self.assertEqual(qs._cursor_args, {}) # ensure no regression of #2148 + + qs = self.Person.objects().timeout(True) + self.assertEqual(qs._cursor_args, {}) + + qs = self.Person.objects().timeout(False) + self.assertEqual(qs._cursor_args, {"no_cursor_timeout": True}) + if __name__ == "__main__": unittest.main() From 7ac74b1c1f60967852bea294c318adc0a45e347e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 15 Sep 2019 23:27:34 +0200 Subject: [PATCH 060/216] Document Model.objects.aggregate entrypoint with an example --- docs/guide/querying.rst | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 151855a6..50218aed 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -349,9 +349,9 @@ Just as with limiting and skipping results, there is a method on a You could technically use ``len(User.objects)`` to get the same result, but it would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`. When you execute a server-side count query, you let MongoDB do the heavy -lifting and you receive a single integer over the wire. Meanwhile, len() +lifting and you receive a single integer over the wire. Meanwhile, ``len()`` retrieves all the results, places them in a local cache, and finally counts -them. If we compare the performance of the two operations, len() is much slower +them. If we compare the performance of the two operations, ``len()`` is much slower than :meth:`~mongoengine.queryset.QuerySet.count`. Further aggregation @@ -386,6 +386,18 @@ would be generating "tag-clouds":: top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] +MongoDB aggregation API +----------------------- +If you need to run aggregation pipelines, MongoEngine provides an entry point to `pymongo's aggregation framework `_ + through :meth:`~mongoengine.queryset.base.aggregate`. Checkout pymongo's documentation for the syntax and pipeline. +An example of its use would be :: + + class Person(Document): + name = StringField() + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects().aggregate(*pipeline) # Would return e.g: [{"_id": ObjectId('5d7eac82aae098e4ed3784c7'), "name": "JOHN DOE"}] + Query efficiency and performance ================================ From be2c4f2b3cdfe13d9abe92312409761d91f0040c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Mon, 16 Sep 2019 21:15:35 +0200 Subject: [PATCH 061/216] fix formatting and improve doc based on review --- docs/guide/querying.rst | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 50218aed..d64c169c 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -388,15 +388,22 @@ would be generating "tag-clouds":: MongoDB aggregation API ----------------------- -If you need to run aggregation pipelines, MongoEngine provides an entry point to `pymongo's aggregation framework `_ - through :meth:`~mongoengine.queryset.base.aggregate`. Checkout pymongo's documentation for the syntax and pipeline. -An example of its use would be :: +If you need to run aggregation pipelines, MongoEngine provides an entry point `Pymongo's aggregation framework `_ +through :meth:`~mongoengine.queryset.QuerySet.aggregate`. Check out Pymongo's documentation for the syntax and pipeline. +An example of its use would be:: class Person(Document): name = StringField() - pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] - data = Person.objects().aggregate(*pipeline) # Would return e.g: [{"_id": ObjectId('5d7eac82aae098e4ed3784c7'), "name": "JOHN DOE"}] + Person(name='John').save() + Person(name='Bob').save() + + pipeline = [ + {"$sort" : {"name" : -1}}, + {"$project": {"_id": 0, "name": {"$toUpper": "$name"}}} + ] + data = Person.objects().aggregate(*pipeline) + assert data == [{'name': 'BOB'}, {'name': 'JOHN'}] Query efficiency and performance ================================ From 8f288fe45875050ac39985dcf9bcc304b6c1f15e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 29 Sep 2019 22:48:46 +0200 Subject: [PATCH 062/216] add mongodb 4.0 to travis and docs --- .travis.yml | 10 ++++++---- README.rst | 4 ++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index 54a6befd..21321841 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,7 @@ # with a very large number of jobs, hence we only test a subset of all the # combinations: # * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, -# tested against Python v2.7, v3.5, v3.6, and PyPy. +# tested against Python v2.7, v3.5, v3.6, v3.7, PyPy and PyPy3. # * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo # combination: MongoDB v3.4, PyMongo v3.4, Python v2.7. # * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8. @@ -30,15 +30,16 @@ dist: xenial env: global: + - MONGODB_4_0=4.0.12 - MONGODB_3_4=3.4.17 - MONGODB_3_6=3.6.12 + - PYMONGO_3_9=3.9 - PYMONGO_3_6=3.6 - PYMONGO_3_4=3.4 matrix: - - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_6} + - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_9} matrix: - # Finish the build as soon as one job fails fast_finish: true @@ -47,7 +48,8 @@ matrix: env: MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_4} - python: 3.7 env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} - + - python: 3.7 + env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_9} install: # Install Mongo diff --git a/README.rst b/README.rst index 679980f8..82b32893 100644 --- a/README.rst +++ b/README.rst @@ -26,10 +26,10 @@ an `API reference `_. Supported MongoDB Versions ========================== -MongoEngine is currently tested against MongoDB v3.4 and v3.6. Future versions +MongoEngine is currently tested against MongoDB v3.4, v3.6 and v4.0. Future versions should be supported as well, but aren't actively tested at the moment. Make sure to open an issue or submit a pull request if you experience any problems -with MongoDB version > 3.6. +with MongoDB version > 4.0. Installation ============ From b61c8cd104975b5fb47681387abc443c8c8430b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 1 Oct 2019 22:17:19 +0200 Subject: [PATCH 063/216] fix tox envs --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index a1ae8444..a7921c61 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27,py35,pypy,pypy3}-{mg34,mg36} +envlist = {py27,py35,pypy,pypy3}-{mg34,mg36, mg39} [testenv] commands = @@ -8,5 +8,6 @@ deps = nose mg34: pymongo>=3.4,<3.5 mg36: pymongo>=3.6,<3.7 + mg39: pymongo>=3.9,<4.0 setenv = PYTHON_EGG_CACHE = {envdir}/python-eggs From e3cd553f8211d473b54fb3c91da8bf9fa2ad053d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 4 Oct 2019 21:30:32 +0200 Subject: [PATCH 064/216] add latest pymongo 3.9 as part of the CI --- .travis.yml | 5 ++++- mongoengine/queryset/base.py | 1 + tests/queryset/test_queryset.py | 26 +++++++++----------------- tox.ini | 1 + 4 files changed, 15 insertions(+), 18 deletions(-) diff --git a/.travis.yml b/.travis.yml index 54a6befd..af1e2b14 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,10 +32,11 @@ env: global: - MONGODB_3_4=3.4.17 - MONGODB_3_6=3.6.12 + - PYMONGO_3_9=3.9 - PYMONGO_3_6=3.6 - PYMONGO_3_4=3.4 matrix: - - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_6} + - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_9} matrix: @@ -47,6 +48,8 @@ matrix: env: MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_4} - python: 3.7 env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} + - python: 3.7 + env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} install: diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index cde06d54..a09cbf99 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1639,6 +1639,7 @@ class BaseQuerySet(object): ).find(self._query, **self._cursor_args) else: self._cursor_obj = self._collection.find(self._query, **self._cursor_args) + # Apply "where" clauses to cursor if self._where_clause: where_clause = self._sub_js_fields(self._where_clause) diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index e7e59905..16213254 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -4641,43 +4641,35 @@ class TestQueryset(unittest.TestCase): bars = Bar.objects.read_preference(ReadPreference.SECONDARY_PREFERRED) self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) self.assertEqual( - bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED + bars._cursor.collection.read_preference, ReadPreference.SECONDARY_PREFERRED ) # Make sure that `.read_preference(...)` does accept string values. self.assertRaises(TypeError, Bar.objects.read_preference, "Primary") + def assert_read_pref(qs, expected_read_pref): + self.assertEqual(qs._read_preference, expected_read_pref) + self.assertEqual(qs._cursor.collection.read_preference, expected_read_pref) + # Make sure read preference is respected after a `.skip(...)`. bars = Bar.objects.skip(1).read_preference(ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) - self.assertEqual( - bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED - ) + assert_read_pref(bars, ReadPreference.SECONDARY_PREFERRED) # Make sure read preference is respected after a `.limit(...)`. bars = Bar.objects.limit(1).read_preference(ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) - self.assertEqual( - bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED - ) + assert_read_pref(bars, ReadPreference.SECONDARY_PREFERRED) # Make sure read preference is respected after an `.order_by(...)`. bars = Bar.objects.order_by("txt").read_preference( ReadPreference.SECONDARY_PREFERRED ) - self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) - self.assertEqual( - bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED - ) + assert_read_pref(bars, ReadPreference.SECONDARY_PREFERRED) # Make sure read preference is respected after a `.hint(...)`. bars = Bar.objects.hint([("txt", 1)]).read_preference( ReadPreference.SECONDARY_PREFERRED ) - self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) - self.assertEqual( - bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED - ) + assert_read_pref(bars, ReadPreference.SECONDARY_PREFERRED) def test_read_preference_aggregation_framework(self): class Bar(Document): diff --git a/tox.ini b/tox.ini index a1ae8444..b4a57818 100644 --- a/tox.ini +++ b/tox.ini @@ -8,5 +8,6 @@ deps = nose mg34: pymongo>=3.4,<3.5 mg36: pymongo>=3.6,<3.7 + mg39: pymongo>=3.9,<4.0 setenv = PYTHON_EGG_CACHE = {envdir}/python-eggs From 71e8d9a49067f1c790739f43fb3ff35baf01c458 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 10 Sep 2019 23:02:32 +0200 Subject: [PATCH 065/216] Added a FAQ to doc and Document the fact that we dont support motor --- docs/faq.rst | 13 +++++++++++++ docs/index.rst | 4 ++++ 2 files changed, 17 insertions(+) create mode 100644 docs/faq.rst diff --git a/docs/faq.rst b/docs/faq.rst new file mode 100644 index 00000000..27cd6937 --- /dev/null +++ b/docs/faq.rst @@ -0,0 +1,13 @@ +========================== +Frequently Asked Questions +========================== + +Does MongoEngine support asynchronous drivers (Motor, TxMongo)? +--------------------------------------------------------------- + +No, MongoEngine is exclusively based on PyMongo and isn't designed to support other driver. +If this is a requirement for your project, check the alternative: `uMongo`_ and `MotorEngine`_. + +.. _uMongo: https://umongo.readthedocs.io/ +.. _MotorEngine: https://motorengine.readthedocs.io/ + diff --git a/docs/index.rst b/docs/index.rst index 2102df02..686ef547 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -23,6 +23,9 @@ MongoDB. To install it, simply run :doc:`upgrade` How to upgrade MongoEngine. +:doc:`faq` + Frequently Asked Questions + :doc:`django` Using MongoEngine and Django @@ -73,6 +76,7 @@ formats for offline reading. apireference changelog upgrade + faq django Indices and tables From 19f12f3f2f380987c23dcafb8f288eb51177363d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 4 Oct 2019 21:51:12 +0200 Subject: [PATCH 066/216] document pymongo in RTD and make it point to github --- docs/index.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/index.rst b/docs/index.rst index 686ef547..662968d4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -29,6 +29,12 @@ MongoDB. To install it, simply run :doc:`django` Using MongoEngine and Django +MongoDB and driver support +-------------------------- + +MongoEngine is based on the PyMongo driver and tested against multiple versions of MongoDB. +For further details, please refer to the `readme `_. + Community --------- From 1e17b5ac66148387a18d078f4b21cc406beef4f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 5 Oct 2019 14:24:54 +0200 Subject: [PATCH 067/216] Fix docstring format to improve pycharm inspection --- mongoengine/base/document.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index a962a82b..2be8dd6f 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -62,13 +62,13 @@ class BaseDocument(object): """ Initialise a document or an embedded document. - :param dict values: A dictionary of keys and values for the document. + :param values: A dictionary of keys and values for the document. It may contain additional reserved keywords, e.g. "__auto_convert". - :param bool __auto_convert: If True, supplied values will be converted + :param __auto_convert: If True, supplied values will be converted to Python-type values via each field's `to_python` method. - :param set __only_fields: A set of fields that have been loaded for + :param __only_fields: A set of fields that have been loaded for this document. Empty if all fields have been loaded. - :param bool _created: Indicates whether this is a brand new document + :param _created: Indicates whether this is a brand new document or whether it's already been persisted before. Defaults to true. """ self._initialised = False From 5bcc6791947f0520a171dc5d4b843c9321efe683 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Thu, 10 Oct 2019 22:55:44 +0200 Subject: [PATCH 068/216] fix 2 pymongo deprecation warnings --- tests/test_connection.py | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/tests/test_connection.py b/tests/test_connection.py index b7dc9268..071f4207 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1,35 +1,33 @@ import datetime +from bson.tz_util import utc +from nose.plugins.skip import SkipTest +import pymongo from pymongo import MongoClient -from pymongo.errors import OperationFailure, InvalidName from pymongo import ReadPreference +from pymongo.errors import InvalidName, OperationFailure -from mongoengine import Document try: import unittest2 as unittest except ImportError: import unittest -from nose.plugins.skip import SkipTest -import pymongo -from bson.tz_util import utc - -from mongoengine import ( - connect, - register_connection, - Document, - DateTimeField, - disconnect_all, - StringField, -) import mongoengine.connection +from mongoengine import ( + DateTimeField, + Document, + StringField, + connect, + disconnect_all, + register_connection, +) from mongoengine.connection import ( ConnectionFailure, - get_db, - get_connection, - disconnect, DEFAULT_DATABASE_NAME, + disconnect, + get_connection, + get_db, ) @@ -289,7 +287,7 @@ class ConnectionTest(unittest.TestCase): # database won't exist until we save a document some_document.save() self.assertEqual(conn.get_default_database().name, "mongoenginetest") - self.assertEqual(conn.database_names()[0], "mongoenginetest") + self.assertEqual(conn.list_database_names()[0], "mongoenginetest") def test_connect_with_host_list(self): """Ensure that the connect() method works when host is a list @@ -631,8 +629,10 @@ class ConnectionTest(unittest.TestCase): """Ensure write concern can be specified in connect() via a kwarg or as part of the connection URI. """ - conn1 = connect(alias="conn1", host="mongodb://localhost/testing?w=1&j=true") - conn2 = connect("testing", alias="conn2", w=1, j=True) + conn1 = connect( + alias="conn1", host="mongodb://localhost/testing?w=1&journal=true" + ) + conn2 = connect("testing", alias="conn2", w=1, journal=True) self.assertEqual(conn1.write_concern.document, {"w": 1, "j": True}) self.assertEqual(conn2.write_concern.document, {"w": 1, "j": True}) From c60ed32f3a795a36f15e419fb09fe40b826947d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 16 Oct 2019 21:25:17 +0200 Subject: [PATCH 069/216] Documented how pymongo.monitoring can be used with MongoEngine --- docs/changelog.rst | 2 + docs/guide/index.rst | 1 + docs/guide/logging-monitoring.rst | 80 +++++++++++++++++++++++++++++++ 3 files changed, 83 insertions(+) create mode 100644 docs/guide/logging-monitoring.rst diff --git a/docs/changelog.rst b/docs/changelog.rst index 5422f113..58d7f272 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,6 +6,8 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). +- Documentation improvements: + - Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver. - BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112 - Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``. - Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``. diff --git a/docs/guide/index.rst b/docs/guide/index.rst index 46eb7af2..a0364ec1 100644 --- a/docs/guide/index.rst +++ b/docs/guide/index.rst @@ -13,4 +13,5 @@ User Guide gridfs signals text-indexes + logging-monitoring mongomock diff --git a/docs/guide/logging-monitoring.rst b/docs/guide/logging-monitoring.rst new file mode 100644 index 00000000..9f523b79 --- /dev/null +++ b/docs/guide/logging-monitoring.rst @@ -0,0 +1,80 @@ +================== +Logging/Monitoring +================== + +It is possible to use `pymongo.monitoring `_ to monitor +the driver events (e.g: queries, connections, etc). This can be handy if you want to monitor the queries issued by +MongoEngine to the driver. + +To use `pymongo.monitoring` with MongoEngine, you need to make sure that you are registering the listeners +**before** establishing the database connection (i.e calling `connect`): + +The following snippet provides a basic logging of all command events: + +.. code-block:: python + + import logging + from pymongo import monitoring + from mongoengine import * + + log = logging.getLogger() + log.setLevel(logging.DEBUG) + logging.basicConfig(level=logging.DEBUG) + + + class CommandLogger(monitoring.CommandListener): + + def started(self, event): + log.debug("Command {0.command_name} with request id " + "{0.request_id} started on server " + "{0.connection_id}".format(event)) + + def succeeded(self, event): + log.debug("Command {0.command_name} with request id " + "{0.request_id} on server {0.connection_id} " + "succeeded in {0.duration_micros} " + "microseconds".format(event)) + + def failed(self, event): + log.debug("Command {0.command_name} with request id " + "{0.request_id} on server {0.connection_id} " + "failed in {0.duration_micros} " + "microseconds".format(event)) + + monitoring.register(CommandLogger()) + + + class Jedi(Document): + name = StringField() + + + connect() + + + log.info('GO!') + + log.info('Saving an item through MongoEngine...') + Jedi(name='Obi-Wan Kenobii').save() + + log.info('Querying through MongoEngine...') + obiwan = Jedi.objects.first() + + log.info('Updating through MongoEngine...') + obiwan.name = 'Obi-Wan Kenobi' + obiwan.save() + + +Executing this prints the following output:: + + INFO:root:GO! + INFO:root:Saving an item through MongoEngine... + DEBUG:root:Command insert with request id 1681692777 started on server ('localhost', 27017) + DEBUG:root:Command insert with request id 1681692777 on server ('localhost', 27017) succeeded in 562 microseconds + INFO:root:Querying through MongoEngine... + DEBUG:root:Command find with request id 1714636915 started on server ('localhost', 27017) + DEBUG:root:Command find with request id 1714636915 on server ('localhost', 27017) succeeded in 341 microseconds + INFO:root:Updating through MongoEngine... + DEBUG:root:Command update with request id 1957747793 started on server ('localhost', 27017) + DEBUG:root:Command update with request id 1957747793 on server ('localhost', 27017) succeeded in 455 microseconds + +More details can of course be obtained by checking the `event` argument from the `CommandListener`. From 8bf5370b6cdac97e00b314d6cd57016494a25873 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Mon, 28 Oct 2019 22:05:13 +0100 Subject: [PATCH 070/216] Improve error message from InvalidDocumentError whenever an embedded document has a bad shape (e.g due to migration) --- docs/changelog.rst | 1 + mongoengine/base/document.py | 9 +++++++-- tests/document/test_instance.py | 23 +++++++++++++++++++++++ 3 files changed, 31 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 5422f113..a717b837 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -15,6 +15,7 @@ Development - If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it. - BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 - From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required. +- Improve error message related to InvalidDocumentError #2180 - Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125 - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 - The codebase is now formatted using ``black``. #2109 diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index a962a82b..a967436a 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -732,7 +732,10 @@ class BaseDocument(object): only_fields = [] if son and not isinstance(son, dict): - raise ValueError("The source SON object needs to be of type 'dict'") + raise ValueError( + "The source SON object needs to be of type 'dict' but a '%s' was found" + % type(son) + ) # Get the class name from the document, falling back to the given # class if unavailable @@ -770,7 +773,9 @@ class BaseDocument(object): errors_dict[field_name] = e if errors_dict: - errors = "\n".join(["%s - %s" % (k, v) for k, v in errors_dict.items()]) + errors = "\n".join( + ["Field '%s' - %s" % (k, v) for k, v in errors_dict.items()] + ) msg = "Invalid data to create a `%s` instance.\n%s" % ( cls._class_name, errors, diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 9b4a16e5..60e5313d 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -3656,6 +3656,29 @@ class TestInstance(MongoDBTestCase): with self.assertRaises(DuplicateKeyError): User.objects().select_related() + def test_embedded_document_failed_while_loading_instance_when_it_is_not_a_dict( + self + ): + class LightSaber(EmbeddedDocument): + color = StringField() + + class Jedi(Document): + light_saber = EmbeddedDocumentField(LightSaber) + + coll = Jedi._get_collection() + Jedi(light_saber=LightSaber(color="red")).save() + _ = list(Jedi.objects) # Ensure a proper document loads without errors + + # Forces a document with a wrong shape (may occur in case of migration) + coll.insert_one({"light_saber": "I_should_be_a_dict"}) + + with self.assertRaises(InvalidDocumentError) as cm: + list(Jedi.objects) + self.assertEqual( + str(cm.exception), + "Invalid data to create a `Jedi` instance.\nField 'light_saber' - The source SON object needs to be of type 'dict' but a '' was found", + ) + class ObjectKeyTestCase(MongoDBTestCase): def test_object_key_simple_document(self): From 54ca7bf09fa70a4fba3d83d8fc77090cddaaae67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Mon, 28 Oct 2019 22:38:21 +0100 Subject: [PATCH 071/216] fix associated test to avoid discrepencies btw py2 and py3 --- tests/document/test_instance.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 60e5313d..7a868d29 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -3670,13 +3670,16 @@ class TestInstance(MongoDBTestCase): _ = list(Jedi.objects) # Ensure a proper document loads without errors # Forces a document with a wrong shape (may occur in case of migration) - coll.insert_one({"light_saber": "I_should_be_a_dict"}) + value = u"I_should_be_a_dict" + coll.insert_one({"light_saber": value}) with self.assertRaises(InvalidDocumentError) as cm: list(Jedi.objects) + self.assertEqual( str(cm.exception), - "Invalid data to create a `Jedi` instance.\nField 'light_saber' - The source SON object needs to be of type 'dict' but a '' was found", + "Invalid data to create a `Jedi` instance.\nField 'light_saber' - The source SON object needs to be of type 'dict' but a '%s' was found" + % type(value), ) From bbfa97886188584ffcc7cfb73d084e2206832c42 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 25 Aug 2019 15:21:30 +0300 Subject: [PATCH 072/216] switch test runner from nose to pytest --- .travis.yml | 6 ++--- README.rst | 11 ++++---- setup.cfg | 11 ++++---- setup.py | 66 ++++++++++++++++++++++++++++++++++++++++++++++-- tests/test_ci.py | 9 +++++++ tox.ini | 2 +- 6 files changed, 87 insertions(+), 18 deletions(-) create mode 100644 tests/test_ci.py diff --git a/.travis.yml b/.travis.yml index af1e2b14..9d2ba8c1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -63,8 +63,8 @@ install: - pip install flake8 flake8-import-order - pip install tox # tox 3.11.0 has requirement virtualenv>=14.0.0 - pip install virtualenv # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) - # Install the tox venv. - - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test + # Install the tox venv (we make pytest avoid running the test by giving a bad pattern) + - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder" # Install black for Python v3.7 only. - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then pip install black; fi @@ -76,7 +76,7 @@ before_script: - mongo --eval 'db.version();' # Make sure mongo is awake script: - - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage + - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') #-- --with-coverage # For now only submit coveralls for Python v2.7. Python v3.x currently shows # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible diff --git a/README.rst b/README.rst index 679980f8..853d8fbe 100644 --- a/README.rst +++ b/README.rst @@ -116,7 +116,8 @@ Some simple examples of what MongoEngine code looks like: Tests ===== To run the test suite, ensure you are running a local instance of MongoDB on -the standard port and have ``nose`` installed. Then, run ``python setup.py nosetests``. +the standard port and have ``pytest`` installed. Then, run ``python setup.py test`` +or simply ``pytest``. To run the test suite on every supported Python and PyMongo version, you can use ``tox``. You'll need to make sure you have each supported Python version @@ -129,16 +130,14 @@ installed in your environment and then: # Run the test suites $ tox -If you wish to run a subset of tests, use the nosetests convention: +If you wish to run a subset of tests, use the pytest convention: .. code-block:: shell # Run all the tests in a particular test file - $ python setup.py nosetests --tests tests/fields/fields.py + $ pytest tests/fields/test_fields.py # Run only particular test class in that file - $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest - # Use the -s option if you want to print some debug statements or use pdb - $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest -s + $ pytest tests/fields/test_fields.py::TestField Community ========= diff --git a/setup.cfg b/setup.cfg index 4bded428..ae1b4f7e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,11 +1,10 @@ -[nosetests] -verbosity=2 -detailed-errors=1 -#tests=tests -cover-package=mongoengine - [flake8] ignore=E501,F401,F403,F405,I201,I202,W504, W605, W503 exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests max-complexity=47 application-import-names=mongoengine,tests + +[tool:pytest] +# Limits the discovery to tests directory +# avoids that it runs for instance the benchmark +testpaths = tests diff --git a/setup.py b/setup.py index c73a93ff..81cc9744 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,9 @@ import os import sys + +from pkg_resources import normalize_path from setuptools import find_packages, setup +from setuptools.command.test import test as TestCommand # Hack to silence atexit traceback in newer python versions try: @@ -24,6 +27,65 @@ def get_version(version_tuple): return ".".join(map(str, version_tuple)) +class PyTest(TestCommand): + """Will force pytest to search for tests inside the build directory + for 2to3 converted code (used by tox), instead of the current directory. + Required as long as we need 2to3 + + Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations + Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html + """ + + # https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands + # Allows to provide pytest command arguments through the test runner command `python setup.py test` + # e.g: `python setup.py test -a "-k=test"` + user_options = [("pytest-args=", "a", "Arguments to pass to py.test")] + + def initialize_options(self): + TestCommand.initialize_options(self) + self.pytest_args = "" + + def finalize_options(self): + TestCommand.finalize_options(self) + self.test_args = ["tests"] + self.test_suite = True + + def run_tests(self): + # import here, cause outside the eggs aren't loaded + from pkg_resources import _namespace_packages + import pytest + + # Purge modules under test from sys.modules. The test loader will + # re-import them from the build location. Required when 2to3 is used + # with namespace packages. + if sys.version_info >= (3,) and getattr(self.distribution, "use_2to3", False): + print("Hack for 2to3", self.test_args) + module = self.test_args[-1].split(".")[0] + if module in _namespace_packages: + del_modules = [] + if module in sys.modules: + del_modules.append(module) + module += "." + for name in sys.modules: + if name.startswith(module): + del_modules.append(name) + map(sys.modules.__delitem__, del_modules) + + # Run on the build directory for 2to3-built code + # This will prevent the old 2.x code from being found + # by py.test discovery mechanism, that apparently + # ignores sys.path.. + ei_cmd = self.get_finalized_command("egg_info") + self.test_args = [normalize_path(ei_cmd.egg_base)] + + print(self.test_args, self.pytest_args) + cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else []) + print(cmd_args) + errno = pytest.main(cmd_args) + + sys.exit(errno) + + # Dirty hack to get version number from monogengine/__init__.py - we can't # import it as it depends on PyMongo and PyMongo isn't installed until this # file is read @@ -51,7 +113,7 @@ CLASSIFIERS = [ extra_opts = { "packages": find_packages(exclude=["tests", "tests.*"]), - "tests_require": ["nose", "coverage==4.2", "blinker", "Pillow>=2.0.0"], + "tests_require": ["pytest<5.0", "coverage==4.2", "blinker", "Pillow>=2.0.0"], } if sys.version_info[0] == 3: extra_opts["use_2to3"] = True @@ -79,6 +141,6 @@ setup( platforms=["any"], classifiers=CLASSIFIERS, install_requires=["pymongo>=3.4", "six"], - test_suite="nose.collector", + cmdclass={"test": PyTest}, **extra_opts ) diff --git a/tests/test_ci.py b/tests/test_ci.py new file mode 100644 index 00000000..04a800eb --- /dev/null +++ b/tests/test_ci.py @@ -0,0 +1,9 @@ +def test_ci_placeholder(): + # This empty test is used within the CI to + # setup the tox venv without running the test suite + # if we simply skip all test with pytest -k=wrong_pattern + # pytest command would return with exit_code=5 (i.e "no tests run") + # making travis fail + # this empty test is the recommended way to handle this + # as described in https://github.com/pytest-dev/pytest/issues/2393 + pass diff --git a/tox.ini b/tox.ini index b4a57818..94ccc9cf 100644 --- a/tox.ini +++ b/tox.ini @@ -3,7 +3,7 @@ envlist = {py27,py35,pypy,pypy3}-{mg34,mg36} [testenv] commands = - python setup.py nosetests {posargs} + python setup.py test {posargs} deps = nose mg34: pymongo>=3.4,<3.5 From 5a16dda50d228d7670bfa7467be7e6124369406f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Mon, 26 Aug 2019 17:12:56 +0300 Subject: [PATCH 073/216] fix coverage for pytest runner --- .travis.yml | 4 ++-- setup.py | 8 +++++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 9d2ba8c1..2992d416 100644 --- a/.travis.yml +++ b/.travis.yml @@ -76,13 +76,13 @@ before_script: - mongo --eval 'db.version();' # Make sure mongo is awake script: - - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') #-- --with-coverage + - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" # For now only submit coveralls for Python v2.7. Python v3.x currently shows # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible # code in a separate dir and runs tests on that. after_success: -- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi +- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then cat .coverage & coveralls --verbose; fi notifications: irc: irc.freenode.org#mongoengine diff --git a/setup.py b/setup.py index 81cc9744..94f71162 100644 --- a/setup.py +++ b/setup.py @@ -113,7 +113,13 @@ CLASSIFIERS = [ extra_opts = { "packages": find_packages(exclude=["tests", "tests.*"]), - "tests_require": ["pytest<5.0", "coverage==4.2", "blinker", "Pillow>=2.0.0"], + "tests_require": [ + "pytest<5.0", + "pytest-cov", + "coverage", + "blinker", + "Pillow>=2.0.0", + ], } if sys.version_info[0] == 3: extra_opts["use_2to3"] = True From 51ea3e3c6ff562260616127cde2f806008abc85d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 28 Aug 2019 15:07:27 +0300 Subject: [PATCH 074/216] fix for recent coverage/coveralls compatibility issue --- .travis.yml | 4 ++-- setup.py | 9 +++------ 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2992d416..cbf34cde 100644 --- a/.travis.yml +++ b/.travis.yml @@ -63,7 +63,7 @@ install: - pip install flake8 flake8-import-order - pip install tox # tox 3.11.0 has requirement virtualenv>=14.0.0 - pip install virtualenv # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) - # Install the tox venv (we make pytest avoid running the test by giving a bad pattern) + # tox dryrun to setup the tox venv (we run a mock test). - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder" # Install black for Python v3.7 only. - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then pip install black; fi @@ -82,7 +82,7 @@ script: # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible # code in a separate dir and runs tests on that. after_success: -- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then cat .coverage & coveralls --verbose; fi +- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; else echo "coveralls only sent for py27"; fi notifications: irc: irc.freenode.org#mongoengine diff --git a/setup.py b/setup.py index 94f71162..2bc1ae1c 100644 --- a/setup.py +++ b/setup.py @@ -37,8 +37,9 @@ class PyTest(TestCommand): """ # https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands - # Allows to provide pytest command arguments through the test runner command `python setup.py test` + # Allows to provide pytest command argument through the test runner command `python setup.py test` # e.g: `python setup.py test -a "-k=test"` + # This only works for 1 argument though user_options = [("pytest-args=", "a", "Arguments to pass to py.test")] def initialize_options(self): @@ -59,7 +60,6 @@ class PyTest(TestCommand): # re-import them from the build location. Required when 2to3 is used # with namespace packages. if sys.version_info >= (3,) and getattr(self.distribution, "use_2to3", False): - print("Hack for 2to3", self.test_args) module = self.test_args[-1].split(".")[0] if module in _namespace_packages: del_modules = [] @@ -78,11 +78,8 @@ class PyTest(TestCommand): ei_cmd = self.get_finalized_command("egg_info") self.test_args = [normalize_path(ei_cmd.egg_base)] - print(self.test_args, self.pytest_args) cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else []) - print(cmd_args) errno = pytest.main(cmd_args) - sys.exit(errno) @@ -116,7 +113,7 @@ extra_opts = { "tests_require": [ "pytest<5.0", "pytest-cov", - "coverage", + "coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls "blinker", "Pillow>=2.0.0", ], From 6040b4b494f93efea415ad6e05a1d33e5834e6c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Thu, 31 Oct 2019 21:33:19 +0100 Subject: [PATCH 075/216] update changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 58d7f272..249d99b1 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -19,6 +19,7 @@ Development - From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required. - Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125 - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 +- Switch from nosetest to pytest as test runner #2114 - The codebase is now formatted using ``black``. #2109 - In bulk write insert, the detailed error message would raise in exception. From 37ca79e9c58e04ce0fd3a6775d804eb4dad6d8c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Thu, 31 Oct 2019 22:39:53 +0100 Subject: [PATCH 076/216] fix black formatting --- docs/conf.py | 3 ++- mongoengine/context_managers.py | 4 ++-- mongoengine/queryset/base.py | 4 +--- tests/document/test_instance.py | 4 ++-- tests/fields/test_complex_datetime_field.py | 2 +- tests/fields/test_embedded_document_field.py | 12 +++++------- tests/fields/test_fields.py | 8 ++++---- tests/fields/test_reference_field.py | 2 +- tests/test_common.py | 2 +- tests/test_connection.py | 2 +- tests/test_context_managers.py | 6 +++--- tests/test_datastructures.py | 10 +++++----- tests/test_replicaset_connection.py | 2 +- tests/test_utils.py | 2 +- tests/utils.py | 2 +- 15 files changed, 31 insertions(+), 34 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 0d642e0c..48c8e859 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -11,7 +11,8 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys, os +import os +import sys import sphinx_rtd_theme diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 3424a5d5..d8dfeaac 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -247,8 +247,8 @@ class query_counter(object): - self._ctx_query_counter ) self._ctx_query_counter += ( - 1 - ) # Account for the query we just issued to gather the information + 1 # Account for the query we just issued to gather the information + ) return count diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index a09cbf99..a648391e 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1193,9 +1193,7 @@ class BaseQuerySet(object): validate_read_preference("read_preference", read_preference) queryset = self.clone() queryset._read_preference = read_preference - queryset._cursor_obj = ( - None - ) # we need to re-create the cursor object whenever we apply read_preference + queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_preference return queryset def scalar(self, *fields): diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 9b4a16e5..203e2cce 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -1615,7 +1615,7 @@ class TestInstance(MongoDBTestCase): self.assertEqual(person.active, False) def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_embedded_doc( - self + self, ): # Refers to Issue #1685 class EmbeddedChildModel(EmbeddedDocument): @@ -1629,7 +1629,7 @@ class TestInstance(MongoDBTestCase): self.assertEqual(changed_fields, []) def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_different_doc( - self + self, ): # Refers to Issue #1685 class User(Document): diff --git a/tests/fields/test_complex_datetime_field.py b/tests/fields/test_complex_datetime_field.py index 4eea5bdc..611c0ff8 100644 --- a/tests/fields/test_complex_datetime_field.py +++ b/tests/fields/test_complex_datetime_field.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import datetime -import math import itertools +import math import re from mongoengine import * diff --git a/tests/fields/test_embedded_document_field.py b/tests/fields/test_embedded_document_field.py index 6b420781..8db8c180 100644 --- a/tests/fields/test_embedded_document_field.py +++ b/tests/fields/test_embedded_document_field.py @@ -1,17 +1,15 @@ # -*- coding: utf-8 -*- from mongoengine import ( Document, - StringField, - ValidationError, EmbeddedDocument, EmbeddedDocumentField, - InvalidQueryError, - LookUpError, - IntField, GenericEmbeddedDocumentField, + IntField, + InvalidQueryError, ListField, - EmbeddedDocumentListField, - ReferenceField, + LookUpError, + StringField, + ValidationError, ) from tests.utils import MongoDBTestCase diff --git a/tests/fields/test_fields.py b/tests/fields/test_fields.py index d9279c22..bd2149e6 100644 --- a/tests/fields/test_fields.py +++ b/tests/fields/test_fields.py @@ -79,7 +79,7 @@ class TestField(MongoDBTestCase): self.assertEqual(data_to_be_saved, ["age", "created", "day", "name", "userid"]) def test_custom_field_validation_raise_deprecated_error_when_validation_return_something( - self + self, ): # Covers introduction of a breaking change in the validation parameter (0.18) def _not_empty(z): @@ -202,7 +202,7 @@ class TestField(MongoDBTestCase): self.assertEqual(data_to_be_saved, ["age", "created", "userid"]) def test_default_value_is_not_used_when_changing_value_to_empty_list_for_strict_doc( - self + self, ): """List field with default can be set to the empty list (strict)""" # Issue #1733 @@ -216,7 +216,7 @@ class TestField(MongoDBTestCase): self.assertEqual(reloaded.x, []) def test_default_value_is_not_used_when_changing_value_to_empty_list_for_dyn_doc( - self + self, ): """List field with default can be set to the empty list (dynamic)""" # Issue #1733 @@ -1245,7 +1245,7 @@ class TestField(MongoDBTestCase): self.assertEqual(a.b.c.txt, "hi") def test_embedded_document_field_cant_reference_using_a_str_if_it_does_not_exist_yet( - self + self, ): raise SkipTest( "Using a string reference in an EmbeddedDocumentField does not work if the class isnt registerd yet" diff --git a/tests/fields/test_reference_field.py b/tests/fields/test_reference_field.py index 5fd053fe..783a46da 100644 --- a/tests/fields/test_reference_field.py +++ b/tests/fields/test_reference_field.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -from bson import SON, DBRef +from bson import DBRef, SON from mongoengine import * diff --git a/tests/test_common.py b/tests/test_common.py index 5d702668..28f0b992 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -1,7 +1,7 @@ import unittest -from mongoengine.common import _import_class from mongoengine import Document +from mongoengine.common import _import_class class TestCommon(unittest.TestCase): diff --git a/tests/test_connection.py b/tests/test_connection.py index 071f4207..1519a835 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -168,7 +168,7 @@ class ConnectionTest(unittest.TestCase): ) def test_connect_fails_if_similar_connection_settings_arent_defined_the_same_way( - self + self, ): """Intended to keep the detecton function simple but robust""" db_name = "mongoenginetest" diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index dc9b9bf3..32e48a70 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -3,11 +3,11 @@ import unittest from mongoengine import * from mongoengine.connection import get_db from mongoengine.context_managers import ( - switch_db, - switch_collection, - no_sub_classes, no_dereference, + no_sub_classes, query_counter, + switch_collection, + switch_db, ) from mongoengine.pymongo_support import count_documents diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 7def2ac7..ff7598be 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -2,7 +2,7 @@ import unittest from six import iterkeys from mongoengine import Document -from mongoengine.base.datastructures import StrictDict, BaseList, BaseDict +from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict class DocumentStub(object): @@ -20,8 +20,8 @@ class TestBaseDict(unittest.TestCase): fake_doc = DocumentStub() base_list = BaseDict(dict_items, instance=None, name="my_name") base_list._instance = ( - fake_doc - ) # hack to inject the mock, it does not work in the constructor + fake_doc # hack to inject the mock, it does not work in the constructor + ) return base_list def test___init___(self): @@ -156,8 +156,8 @@ class TestBaseList(unittest.TestCase): fake_doc = DocumentStub() base_list = BaseList(list_items, instance=None, name="my_name") base_list._instance = ( - fake_doc - ) # hack to inject the mock, it does not work in the constructor + fake_doc # hack to inject the mock, it does not work in the constructor + ) return base_list def test___init___(self): diff --git a/tests/test_replicaset_connection.py b/tests/test_replicaset_connection.py index 6dfab407..e92f3d09 100644 --- a/tests/test_replicaset_connection.py +++ b/tests/test_replicaset_connection.py @@ -1,7 +1,7 @@ import unittest -from pymongo import ReadPreference from pymongo import MongoClient +from pymongo import ReadPreference import mongoengine from mongoengine.connection import ConnectionFailure diff --git a/tests/test_utils.py b/tests/test_utils.py index 2d1e8b00..897c19b2 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,5 +1,5 @@ -import unittest import re +import unittest from mongoengine.base.utils import LazyRegexCompiler diff --git a/tests/utils.py b/tests/utils.py index eb3f016f..0719d6ef 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -4,7 +4,7 @@ import unittest from nose.plugins.skip import SkipTest from mongoengine import connect -from mongoengine.connection import get_db, disconnect_all +from mongoengine.connection import disconnect_all, get_db from mongoengine.mongodb_support import get_mongodb_version From ac25f4b98bd8c4b6daad46faf1e8a163928d7bc2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 30 Aug 2019 16:13:30 +0300 Subject: [PATCH 077/216] ran unittest2pytest --- tests/all_warnings/test_warnings.py | 6 +- tests/document/test_class_methods.py | 133 +- tests/document/test_delta.py | 638 +++---- tests/document/test_dynamic.py | 193 +- tests/document/test_indexes.py | 285 ++- tests/document/test_inheritance.py | 256 ++- tests/document/test_instance.py | 1047 +++++----- tests/document/test_json_serialisation.py | 8 +- tests/document/test_validation.py | 76 +- tests/fields/test_binary_field.py | 40 +- tests/fields/test_boolean_field.py | 16 +- tests/fields/test_cached_reference_field.py | 179 +- tests/fields/test_complex_datetime_field.py | 46 +- tests/fields/test_date_field.py | 45 +- tests/fields/test_datetime_field.py | 71 +- tests/fields/test_decimal_field.py | 30 +- tests/fields/test_dict_field.py | 139 +- tests/fields/test_email_field.py | 37 +- tests/fields/test_embedded_document_field.py | 103 +- tests/fields/test_fields.py | 851 ++++----- tests/fields/test_file_field.py | 166 +- tests/fields/test_float_field.py | 20 +- tests/fields/test_geo_fields.py | 46 +- tests/fields/test_int_field.py | 14 +- tests/fields/test_lazy_reference_field.py | 118 +- tests/fields/test_long_field.py | 16 +- tests/fields/test_map_field.py | 31 +- tests/fields/test_reference_field.py | 46 +- tests/fields/test_sequence_field.py | 99 +- tests/fields/test_url_field.py | 15 +- tests/fields/test_uuid_field.py | 19 +- tests/queryset/test_field_list.py | 197 +- tests/queryset/test_geo.py | 205 +- tests/queryset/test_modify.py | 32 +- tests/queryset/test_pickable.py | 10 +- tests/queryset/test_queryset.py | 1784 +++++++++--------- tests/queryset/test_transform.py | 178 +- tests/queryset/test_visitor.py | 172 +- tests/test_common.py | 6 +- tests/test_connection.py | 254 ++- tests/test_context_managers.py | 139 +- tests/test_datastructures.py | 241 +-- tests/test_dereference.py | 386 ++-- tests/test_replicaset_connection.py | 2 +- tests/test_signals.py | 265 ++- tests/test_utils.py | 15 +- 46 files changed, 4247 insertions(+), 4428 deletions(-) diff --git a/tests/all_warnings/test_warnings.py b/tests/all_warnings/test_warnings.py index 67204617..a9910121 100644 --- a/tests/all_warnings/test_warnings.py +++ b/tests/all_warnings/test_warnings.py @@ -31,7 +31,5 @@ class TestAllWarnings(unittest.TestCase): meta = {"collection": "fail"} warning = self.warning_list[0] - self.assertEqual(SyntaxWarning, warning["category"]) - self.assertEqual( - "non_abstract_base", InheritedDocumentFailTest._get_collection_name() - ) + assert SyntaxWarning == warning["category"] + assert "non_abstract_base" == InheritedDocumentFailTest._get_collection_name() diff --git a/tests/document/test_class_methods.py b/tests/document/test_class_methods.py index c5df0843..98909d2f 100644 --- a/tests/document/test_class_methods.py +++ b/tests/document/test_class_methods.py @@ -29,43 +29,40 @@ class TestClassMethods(unittest.TestCase): def test_definition(self): """Ensure that document may be defined using fields. """ - self.assertEqual( - ["_cls", "age", "id", "name"], sorted(self.Person._fields.keys()) - ) - self.assertEqual( - ["IntField", "ObjectIdField", "StringField", "StringField"], - sorted([x.__class__.__name__ for x in self.Person._fields.values()]), + assert ["_cls", "age", "id", "name"] == sorted(self.Person._fields.keys()) + assert ["IntField", "ObjectIdField", "StringField", "StringField"] == sorted( + [x.__class__.__name__ for x in self.Person._fields.values()] ) def test_get_db(self): """Ensure that get_db returns the expected db. """ db = self.Person._get_db() - self.assertEqual(self.db, db) + assert self.db == db def test_get_collection_name(self): """Ensure that get_collection_name returns the expected collection name. """ collection_name = "person" - self.assertEqual(collection_name, self.Person._get_collection_name()) + assert collection_name == self.Person._get_collection_name() def test_get_collection(self): """Ensure that get_collection returns the expected collection. """ collection_name = "person" collection = self.Person._get_collection() - self.assertEqual(self.db[collection_name], collection) + assert self.db[collection_name] == collection def test_drop_collection(self): """Ensure that the collection may be dropped from the database. """ collection_name = "person" self.Person(name="Test").save() - self.assertIn(collection_name, list_collection_names(self.db)) + assert collection_name in list_collection_names(self.db) self.Person.drop_collection() - self.assertNotIn(collection_name, list_collection_names(self.db)) + assert collection_name not in list_collection_names(self.db) def test_register_delete_rule(self): """Ensure that register delete rule adds a delete rule to the document @@ -75,12 +72,10 @@ class TestClassMethods(unittest.TestCase): class Job(Document): employee = ReferenceField(self.Person) - self.assertEqual(self.Person._meta.get("delete_rules"), None) + assert self.Person._meta.get("delete_rules") == None self.Person.register_delete_rule(Job, "employee", NULLIFY) - self.assertEqual( - self.Person._meta["delete_rules"], {(Job, "employee"): NULLIFY} - ) + assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY} def test_compare_indexes(self): """ Ensure that the indexes are properly created and that @@ -98,22 +93,22 @@ class TestClassMethods(unittest.TestCase): BlogPost.drop_collection() BlogPost.ensure_indexes() - self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []}) + assert BlogPost.compare_indexes() == {"missing": [], "extra": []} BlogPost.ensure_index(["author", "description"]) - self.assertEqual( - BlogPost.compare_indexes(), - {"missing": [], "extra": [[("author", 1), ("description", 1)]]}, - ) + assert BlogPost.compare_indexes() == { + "missing": [], + "extra": [[("author", 1), ("description", 1)]], + } BlogPost._get_collection().drop_index("author_1_description_1") - self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []}) + assert BlogPost.compare_indexes() == {"missing": [], "extra": []} BlogPost._get_collection().drop_index("author_1_title_1") - self.assertEqual( - BlogPost.compare_indexes(), - {"missing": [[("author", 1), ("title", 1)]], "extra": []}, - ) + assert BlogPost.compare_indexes() == { + "missing": [[("author", 1), ("title", 1)]], + "extra": [], + } def test_compare_indexes_inheritance(self): """ Ensure that the indexes are properly created and that @@ -138,22 +133,22 @@ class TestClassMethods(unittest.TestCase): BlogPost.ensure_indexes() BlogPostWithTags.ensure_indexes() - self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []}) + assert BlogPost.compare_indexes() == {"missing": [], "extra": []} BlogPostWithTags.ensure_index(["author", "tag_list"]) - self.assertEqual( - BlogPost.compare_indexes(), - {"missing": [], "extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]]}, - ) + assert BlogPost.compare_indexes() == { + "missing": [], + "extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]], + } BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tag_list_1") - self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []}) + assert BlogPost.compare_indexes() == {"missing": [], "extra": []} BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tags_1") - self.assertEqual( - BlogPost.compare_indexes(), - {"missing": [[("_cls", 1), ("author", 1), ("tags", 1)]], "extra": []}, - ) + assert BlogPost.compare_indexes() == { + "missing": [[("_cls", 1), ("author", 1), ("tags", 1)]], + "extra": [], + } def test_compare_indexes_multiple_subclasses(self): """ Ensure that compare_indexes behaves correctly if called from a @@ -182,13 +177,9 @@ class TestClassMethods(unittest.TestCase): BlogPostWithTags.ensure_indexes() BlogPostWithCustomField.ensure_indexes() - self.assertEqual(BlogPost.compare_indexes(), {"missing": [], "extra": []}) - self.assertEqual( - BlogPostWithTags.compare_indexes(), {"missing": [], "extra": []} - ) - self.assertEqual( - BlogPostWithCustomField.compare_indexes(), {"missing": [], "extra": []} - ) + assert BlogPost.compare_indexes() == {"missing": [], "extra": []} + assert BlogPostWithTags.compare_indexes() == {"missing": [], "extra": []} + assert BlogPostWithCustomField.compare_indexes() == {"missing": [], "extra": []} def test_compare_indexes_for_text_indexes(self): """ Ensure that compare_indexes behaves correctly for text indexes """ @@ -210,7 +201,7 @@ class TestClassMethods(unittest.TestCase): Doc.ensure_indexes() actual = Doc.compare_indexes() expected = {"missing": [], "extra": []} - self.assertEqual(actual, expected) + assert actual == expected def test_list_indexes_inheritance(self): """ ensure that all of the indexes are listed regardless of the super- @@ -240,19 +231,14 @@ class TestClassMethods(unittest.TestCase): BlogPostWithTags.ensure_indexes() BlogPostWithTagsAndExtraText.ensure_indexes() - self.assertEqual(BlogPost.list_indexes(), BlogPostWithTags.list_indexes()) - self.assertEqual( - BlogPost.list_indexes(), BlogPostWithTagsAndExtraText.list_indexes() - ) - self.assertEqual( - BlogPost.list_indexes(), - [ - [("_cls", 1), ("author", 1), ("tags", 1)], - [("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)], - [(u"_id", 1)], - [("_cls", 1)], - ], - ) + assert BlogPost.list_indexes() == BlogPostWithTags.list_indexes() + assert BlogPost.list_indexes() == BlogPostWithTagsAndExtraText.list_indexes() + assert BlogPost.list_indexes() == [ + [("_cls", 1), ("author", 1), ("tags", 1)], + [("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)], + [(u"_id", 1)], + [("_cls", 1)], + ] def test_register_delete_rule_inherited(self): class Vaccine(Document): @@ -271,8 +257,8 @@ class TestClassMethods(unittest.TestCase): class Cat(Animal): name = StringField(required=True) - self.assertEqual(Vaccine._meta["delete_rules"][(Animal, "vaccine_made")], PULL) - self.assertEqual(Vaccine._meta["delete_rules"][(Cat, "vaccine_made")], PULL) + assert Vaccine._meta["delete_rules"][(Animal, "vaccine_made")] == PULL + assert Vaccine._meta["delete_rules"][(Cat, "vaccine_made")] == PULL def test_collection_naming(self): """Ensure that a collection with a specified name may be used. @@ -281,19 +267,17 @@ class TestClassMethods(unittest.TestCase): class DefaultNamingTest(Document): pass - self.assertEqual( - "default_naming_test", DefaultNamingTest._get_collection_name() - ) + assert "default_naming_test" == DefaultNamingTest._get_collection_name() class CustomNamingTest(Document): meta = {"collection": "pimp_my_collection"} - self.assertEqual("pimp_my_collection", CustomNamingTest._get_collection_name()) + assert "pimp_my_collection" == CustomNamingTest._get_collection_name() class DynamicNamingTest(Document): meta = {"collection": lambda c: "DYNAMO"} - self.assertEqual("DYNAMO", DynamicNamingTest._get_collection_name()) + assert "DYNAMO" == DynamicNamingTest._get_collection_name() # Use Abstract class to handle backwards compatibility class BaseDocument(Document): @@ -302,14 +286,12 @@ class TestClassMethods(unittest.TestCase): class OldNamingConvention(BaseDocument): pass - self.assertEqual( - "oldnamingconvention", OldNamingConvention._get_collection_name() - ) + assert "oldnamingconvention" == OldNamingConvention._get_collection_name() class InheritedAbstractNamingTest(BaseDocument): meta = {"collection": "wibble"} - self.assertEqual("wibble", InheritedAbstractNamingTest._get_collection_name()) + assert "wibble" == InheritedAbstractNamingTest._get_collection_name() # Mixin tests class BaseMixin(object): @@ -318,8 +300,9 @@ class TestClassMethods(unittest.TestCase): class OldMixinNamingConvention(Document, BaseMixin): pass - self.assertEqual( - "oldmixinnamingconvention", OldMixinNamingConvention._get_collection_name() + assert ( + "oldmixinnamingconvention" + == OldMixinNamingConvention._get_collection_name() ) class BaseMixin(object): @@ -331,7 +314,7 @@ class TestClassMethods(unittest.TestCase): class MyDocument(BaseDocument): pass - self.assertEqual("basedocument", MyDocument._get_collection_name()) + assert "basedocument" == MyDocument._get_collection_name() def test_custom_collection_name_operations(self): """Ensure that a collection with a specified name is used as expected. @@ -343,16 +326,16 @@ class TestClassMethods(unittest.TestCase): meta = {"collection": collection_name} Person(name="Test User").save() - self.assertIn(collection_name, list_collection_names(self.db)) + assert collection_name in list_collection_names(self.db) user_obj = self.db[collection_name].find_one() - self.assertEqual(user_obj["name"], "Test User") + assert user_obj["name"] == "Test User" user_obj = Person.objects[0] - self.assertEqual(user_obj.name, "Test User") + assert user_obj.name == "Test User" Person.drop_collection() - self.assertNotIn(collection_name, list_collection_names(self.db)) + assert collection_name not in list_collection_names(self.db) def test_collection_name_and_primary(self): """Ensure that a collection with a specified name may be used. @@ -365,7 +348,7 @@ class TestClassMethods(unittest.TestCase): Person(name="Test User").save() user_obj = Person.objects.first() - self.assertEqual(user_obj.name, "Test User") + assert user_obj.name == "Test User" Person.drop_collection() diff --git a/tests/document/test_delta.py b/tests/document/test_delta.py index 632d9b3f..2324211b 100644 --- a/tests/document/test_delta.py +++ b/tests/document/test_delta.py @@ -41,40 +41,40 @@ class TestDelta(MongoDBTestCase): doc.save() doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) + assert doc._get_changed_fields() == [] + assert doc._delta() == ({}, {}) doc.string_field = "hello" - self.assertEqual(doc._get_changed_fields(), ["string_field"]) - self.assertEqual(doc._delta(), ({"string_field": "hello"}, {})) + assert doc._get_changed_fields() == ["string_field"] + assert doc._delta() == ({"string_field": "hello"}, {}) doc._changed_fields = [] doc.int_field = 1 - self.assertEqual(doc._get_changed_fields(), ["int_field"]) - self.assertEqual(doc._delta(), ({"int_field": 1}, {})) + assert doc._get_changed_fields() == ["int_field"] + assert doc._delta() == ({"int_field": 1}, {}) doc._changed_fields = [] dict_value = {"hello": "world", "ping": "pong"} doc.dict_field = dict_value - self.assertEqual(doc._get_changed_fields(), ["dict_field"]) - self.assertEqual(doc._delta(), ({"dict_field": dict_value}, {})) + assert doc._get_changed_fields() == ["dict_field"] + assert doc._delta() == ({"dict_field": dict_value}, {}) doc._changed_fields = [] list_value = ["1", 2, {"hello": "world"}] doc.list_field = list_value - self.assertEqual(doc._get_changed_fields(), ["list_field"]) - self.assertEqual(doc._delta(), ({"list_field": list_value}, {})) + assert doc._get_changed_fields() == ["list_field"] + assert doc._delta() == ({"list_field": list_value}, {}) # Test unsetting doc._changed_fields = [] doc.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ["dict_field"]) - self.assertEqual(doc._delta(), ({}, {"dict_field": 1})) + assert doc._get_changed_fields() == ["dict_field"] + assert doc._delta() == ({}, {"dict_field": 1}) doc._changed_fields = [] doc.list_field = [] - self.assertEqual(doc._get_changed_fields(), ["list_field"]) - self.assertEqual(doc._delta(), ({}, {"list_field": 1})) + assert doc._get_changed_fields() == ["list_field"] + assert doc._delta() == ({}, {"list_field": 1}) def test_delta_recursive(self): self.delta_recursive(Document, EmbeddedDocument) @@ -102,8 +102,8 @@ class TestDelta(MongoDBTestCase): doc.save() doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) + assert doc._get_changed_fields() == [] + assert doc._delta() == ({}, {}) embedded_1 = Embedded() embedded_1.id = "010101" @@ -113,7 +113,7 @@ class TestDelta(MongoDBTestCase): embedded_1.list_field = ["1", 2, {"hello": "world"}] doc.embedded_field = embedded_1 - self.assertEqual(doc._get_changed_fields(), ["embedded_field"]) + assert doc._get_changed_fields() == ["embedded_field"] embedded_delta = { "id": "010101", @@ -122,27 +122,27 @@ class TestDelta(MongoDBTestCase): "dict_field": {"hello": "world"}, "list_field": ["1", 2, {"hello": "world"}], } - self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) - self.assertEqual(doc._delta(), ({"embedded_field": embedded_delta}, {})) + assert doc.embedded_field._delta() == (embedded_delta, {}) + assert doc._delta() == ({"embedded_field": embedded_delta}, {}) doc.save() doc = doc.reload(10) doc.embedded_field.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ["embedded_field.dict_field"]) - self.assertEqual(doc.embedded_field._delta(), ({}, {"dict_field": 1})) - self.assertEqual(doc._delta(), ({}, {"embedded_field.dict_field": 1})) + assert doc._get_changed_fields() == ["embedded_field.dict_field"] + assert doc.embedded_field._delta() == ({}, {"dict_field": 1}) + assert doc._delta() == ({}, {"embedded_field.dict_field": 1}) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.dict_field, {}) + assert doc.embedded_field.dict_field == {} doc.embedded_field.list_field = [] - self.assertEqual(doc._get_changed_fields(), ["embedded_field.list_field"]) - self.assertEqual(doc.embedded_field._delta(), ({}, {"list_field": 1})) - self.assertEqual(doc._delta(), ({}, {"embedded_field.list_field": 1})) + assert doc._get_changed_fields() == ["embedded_field.list_field"] + assert doc.embedded_field._delta() == ({}, {"list_field": 1}) + assert doc._delta() == ({}, {"embedded_field.list_field": 1}) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field, []) + assert doc.embedded_field.list_field == [] embedded_2 = Embedded() embedded_2.string_field = "hello" @@ -151,148 +151,128 @@ class TestDelta(MongoDBTestCase): embedded_2.list_field = ["1", 2, {"hello": "world"}] doc.embedded_field.list_field = ["1", 2, embedded_2] - self.assertEqual(doc._get_changed_fields(), ["embedded_field.list_field"]) + assert doc._get_changed_fields() == ["embedded_field.list_field"] - self.assertEqual( - doc.embedded_field._delta(), - ( - { - "list_field": [ - "1", - 2, - { - "_cls": "Embedded", - "string_field": "hello", - "dict_field": {"hello": "world"}, - "int_field": 1, - "list_field": ["1", 2, {"hello": "world"}], - }, - ] - }, - {}, - ), + assert doc.embedded_field._delta() == ( + { + "list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "string_field": "hello", + "dict_field": {"hello": "world"}, + "int_field": 1, + "list_field": ["1", 2, {"hello": "world"}], + }, + ] + }, + {}, ) - self.assertEqual( - doc._delta(), - ( - { - "embedded_field.list_field": [ - "1", - 2, - { - "_cls": "Embedded", - "string_field": "hello", - "dict_field": {"hello": "world"}, - "int_field": 1, - "list_field": ["1", 2, {"hello": "world"}], - }, - ] - }, - {}, - ), + assert doc._delta() == ( + { + "embedded_field.list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "string_field": "hello", + "dict_field": {"hello": "world"}, + "int_field": 1, + "list_field": ["1", 2, {"hello": "world"}], + }, + ] + }, + {}, ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[0], "1") - self.assertEqual(doc.embedded_field.list_field[1], 2) + assert doc.embedded_field.list_field[0] == "1" + assert doc.embedded_field.list_field[1] == 2 for k in doc.embedded_field.list_field[2]._fields: - self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k]) + assert doc.embedded_field.list_field[2][k] == embedded_2[k] doc.embedded_field.list_field[2].string_field = "world" - self.assertEqual( - doc._get_changed_fields(), ["embedded_field.list_field.2.string_field"] + assert doc._get_changed_fields() == ["embedded_field.list_field.2.string_field"] + assert doc.embedded_field._delta() == ( + {"list_field.2.string_field": "world"}, + {}, ) - self.assertEqual( - doc.embedded_field._delta(), ({"list_field.2.string_field": "world"}, {}) - ) - self.assertEqual( - doc._delta(), ({"embedded_field.list_field.2.string_field": "world"}, {}) + assert doc._delta() == ( + {"embedded_field.list_field.2.string_field": "world"}, + {}, ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, "world") + assert doc.embedded_field.list_field[2].string_field == "world" # Test multiple assignments doc.embedded_field.list_field[2].string_field = "hello world" doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] - self.assertEqual(doc._get_changed_fields(), ["embedded_field.list_field.2"]) - self.assertEqual( - doc.embedded_field._delta(), - ( - { - "list_field.2": { - "_cls": "Embedded", - "string_field": "hello world", - "int_field": 1, - "list_field": ["1", 2, {"hello": "world"}], - "dict_field": {"hello": "world"}, - } - }, - {}, - ), + assert doc._get_changed_fields() == ["embedded_field.list_field.2"] + assert doc.embedded_field._delta() == ( + { + "list_field.2": { + "_cls": "Embedded", + "string_field": "hello world", + "int_field": 1, + "list_field": ["1", 2, {"hello": "world"}], + "dict_field": {"hello": "world"}, + } + }, + {}, ) - self.assertEqual( - doc._delta(), - ( - { - "embedded_field.list_field.2": { - "_cls": "Embedded", - "string_field": "hello world", - "int_field": 1, - "list_field": ["1", 2, {"hello": "world"}], - "dict_field": {"hello": "world"}, - } - }, - {}, - ), + assert doc._delta() == ( + { + "embedded_field.list_field.2": { + "_cls": "Embedded", + "string_field": "hello world", + "int_field": 1, + "list_field": ["1", 2, {"hello": "world"}], + "dict_field": {"hello": "world"}, + } + }, + {}, ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, "hello world") + assert doc.embedded_field.list_field[2].string_field == "hello world" # Test list native methods doc.embedded_field.list_field[2].list_field.pop(0) - self.assertEqual( - doc._delta(), - ({"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]}, {}), + assert doc._delta() == ( + {"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]}, + {}, ) doc.save() doc = doc.reload(10) doc.embedded_field.list_field[2].list_field.append(1) - self.assertEqual( - doc._delta(), - ( - {"embedded_field.list_field.2.list_field": [2, {"hello": "world"}, 1]}, - {}, - ), + assert doc._delta() == ( + {"embedded_field.list_field.2.list_field": [2, {"hello": "world"}, 1]}, + {}, ) doc.save() doc = doc.reload(10) - self.assertEqual( - doc.embedded_field.list_field[2].list_field, [2, {"hello": "world"}, 1] - ) + assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1] doc.embedded_field.list_field[2].list_field.sort(key=str) doc.save() doc = doc.reload(10) - self.assertEqual( - doc.embedded_field.list_field[2].list_field, [1, 2, {"hello": "world"}] - ) + assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}] del doc.embedded_field.list_field[2].list_field[2]["hello"] - self.assertEqual( - doc._delta(), ({}, {"embedded_field.list_field.2.list_field.2.hello": 1}) + assert doc._delta() == ( + {}, + {"embedded_field.list_field.2.list_field.2.hello": 1}, ) doc.save() doc = doc.reload(10) del doc.embedded_field.list_field[2].list_field - self.assertEqual( - doc._delta(), ({}, {"embedded_field.list_field.2.list_field": 1}) - ) + assert doc._delta() == ({}, {"embedded_field.list_field.2.list_field": 1}) doc.save() doc = doc.reload(10) @@ -302,12 +282,8 @@ class TestDelta(MongoDBTestCase): doc = doc.reload(10) doc.dict_field["Embedded"].string_field = "Hello World" - self.assertEqual( - doc._get_changed_fields(), ["dict_field.Embedded.string_field"] - ) - self.assertEqual( - doc._delta(), ({"dict_field.Embedded.string_field": "Hello World"}, {}) - ) + assert doc._get_changed_fields() == ["dict_field.Embedded.string_field"] + assert doc._delta() == ({"dict_field.Embedded.string_field": "Hello World"}, {}) def test_circular_reference_deltas(self): self.circular_reference_deltas(Document, Document) @@ -338,8 +314,8 @@ class TestDelta(MongoDBTestCase): p = Person.objects[0].select_related() o = Organization.objects.first() - self.assertEqual(p.owns[0], o) - self.assertEqual(o.owner, p) + assert p.owns[0] == o + assert o.owner == p def test_circular_reference_deltas_2(self): self.circular_reference_deltas_2(Document, Document) @@ -379,9 +355,9 @@ class TestDelta(MongoDBTestCase): e = Person.objects.get(name="employee") o = Organization.objects.first() - self.assertEqual(p.owns[0], o) - self.assertEqual(o.owner, p) - self.assertEqual(e.employer, o) + assert p.owns[0] == o + assert o.owner == p + assert e.employer == o return person, organization, employee @@ -401,40 +377,40 @@ class TestDelta(MongoDBTestCase): doc.save() doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) + assert doc._get_changed_fields() == [] + assert doc._delta() == ({}, {}) doc.string_field = "hello" - self.assertEqual(doc._get_changed_fields(), ["db_string_field"]) - self.assertEqual(doc._delta(), ({"db_string_field": "hello"}, {})) + assert doc._get_changed_fields() == ["db_string_field"] + assert doc._delta() == ({"db_string_field": "hello"}, {}) doc._changed_fields = [] doc.int_field = 1 - self.assertEqual(doc._get_changed_fields(), ["db_int_field"]) - self.assertEqual(doc._delta(), ({"db_int_field": 1}, {})) + assert doc._get_changed_fields() == ["db_int_field"] + assert doc._delta() == ({"db_int_field": 1}, {}) doc._changed_fields = [] dict_value = {"hello": "world", "ping": "pong"} doc.dict_field = dict_value - self.assertEqual(doc._get_changed_fields(), ["db_dict_field"]) - self.assertEqual(doc._delta(), ({"db_dict_field": dict_value}, {})) + assert doc._get_changed_fields() == ["db_dict_field"] + assert doc._delta() == ({"db_dict_field": dict_value}, {}) doc._changed_fields = [] list_value = ["1", 2, {"hello": "world"}] doc.list_field = list_value - self.assertEqual(doc._get_changed_fields(), ["db_list_field"]) - self.assertEqual(doc._delta(), ({"db_list_field": list_value}, {})) + assert doc._get_changed_fields() == ["db_list_field"] + assert doc._delta() == ({"db_list_field": list_value}, {}) # Test unsetting doc._changed_fields = [] doc.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ["db_dict_field"]) - self.assertEqual(doc._delta(), ({}, {"db_dict_field": 1})) + assert doc._get_changed_fields() == ["db_dict_field"] + assert doc._delta() == ({}, {"db_dict_field": 1}) doc._changed_fields = [] doc.list_field = [] - self.assertEqual(doc._get_changed_fields(), ["db_list_field"]) - self.assertEqual(doc._delta(), ({}, {"db_list_field": 1})) + assert doc._get_changed_fields() == ["db_list_field"] + assert doc._delta() == ({}, {"db_list_field": 1}) # Test it saves that data doc = Doc() @@ -447,10 +423,10 @@ class TestDelta(MongoDBTestCase): doc.save() doc = doc.reload(10) - self.assertEqual(doc.string_field, "hello") - self.assertEqual(doc.int_field, 1) - self.assertEqual(doc.dict_field, {"hello": "world"}) - self.assertEqual(doc.list_field, ["1", 2, {"hello": "world"}]) + assert doc.string_field == "hello" + assert doc.int_field == 1 + assert doc.dict_field == {"hello": "world"} + assert doc.list_field == ["1", 2, {"hello": "world"}] def test_delta_recursive_db_field(self): self.delta_recursive_db_field(Document, EmbeddedDocument) @@ -479,8 +455,8 @@ class TestDelta(MongoDBTestCase): doc.save() doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) + assert doc._get_changed_fields() == [] + assert doc._delta() == ({}, {}) embedded_1 = Embedded() embedded_1.string_field = "hello" @@ -489,7 +465,7 @@ class TestDelta(MongoDBTestCase): embedded_1.list_field = ["1", 2, {"hello": "world"}] doc.embedded_field = embedded_1 - self.assertEqual(doc._get_changed_fields(), ["db_embedded_field"]) + assert doc._get_changed_fields() == ["db_embedded_field"] embedded_delta = { "db_string_field": "hello", @@ -497,27 +473,27 @@ class TestDelta(MongoDBTestCase): "db_dict_field": {"hello": "world"}, "db_list_field": ["1", 2, {"hello": "world"}], } - self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) - self.assertEqual(doc._delta(), ({"db_embedded_field": embedded_delta}, {})) + assert doc.embedded_field._delta() == (embedded_delta, {}) + assert doc._delta() == ({"db_embedded_field": embedded_delta}, {}) doc.save() doc = doc.reload(10) doc.embedded_field.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ["db_embedded_field.db_dict_field"]) - self.assertEqual(doc.embedded_field._delta(), ({}, {"db_dict_field": 1})) - self.assertEqual(doc._delta(), ({}, {"db_embedded_field.db_dict_field": 1})) + assert doc._get_changed_fields() == ["db_embedded_field.db_dict_field"] + assert doc.embedded_field._delta() == ({}, {"db_dict_field": 1}) + assert doc._delta() == ({}, {"db_embedded_field.db_dict_field": 1}) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.dict_field, {}) + assert doc.embedded_field.dict_field == {} doc.embedded_field.list_field = [] - self.assertEqual(doc._get_changed_fields(), ["db_embedded_field.db_list_field"]) - self.assertEqual(doc.embedded_field._delta(), ({}, {"db_list_field": 1})) - self.assertEqual(doc._delta(), ({}, {"db_embedded_field.db_list_field": 1})) + assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"] + assert doc.embedded_field._delta() == ({}, {"db_list_field": 1}) + assert doc._delta() == ({}, {"db_embedded_field.db_list_field": 1}) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field, []) + assert doc.embedded_field.list_field == [] embedded_2 = Embedded() embedded_2.string_field = "hello" @@ -526,166 +502,142 @@ class TestDelta(MongoDBTestCase): embedded_2.list_field = ["1", 2, {"hello": "world"}] doc.embedded_field.list_field = ["1", 2, embedded_2] - self.assertEqual(doc._get_changed_fields(), ["db_embedded_field.db_list_field"]) - self.assertEqual( - doc.embedded_field._delta(), - ( - { - "db_list_field": [ - "1", - 2, - { - "_cls": "Embedded", - "db_string_field": "hello", - "db_dict_field": {"hello": "world"}, - "db_int_field": 1, - "db_list_field": ["1", 2, {"hello": "world"}], - }, - ] - }, - {}, - ), + assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"] + assert doc.embedded_field._delta() == ( + { + "db_list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "db_string_field": "hello", + "db_dict_field": {"hello": "world"}, + "db_int_field": 1, + "db_list_field": ["1", 2, {"hello": "world"}], + }, + ] + }, + {}, ) - self.assertEqual( - doc._delta(), - ( - { - "db_embedded_field.db_list_field": [ - "1", - 2, - { - "_cls": "Embedded", - "db_string_field": "hello", - "db_dict_field": {"hello": "world"}, - "db_int_field": 1, - "db_list_field": ["1", 2, {"hello": "world"}], - }, - ] - }, - {}, - ), + assert doc._delta() == ( + { + "db_embedded_field.db_list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "db_string_field": "hello", + "db_dict_field": {"hello": "world"}, + "db_int_field": 1, + "db_list_field": ["1", 2, {"hello": "world"}], + }, + ] + }, + {}, ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[0], "1") - self.assertEqual(doc.embedded_field.list_field[1], 2) + assert doc.embedded_field.list_field[0] == "1" + assert doc.embedded_field.list_field[1] == 2 for k in doc.embedded_field.list_field[2]._fields: - self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k]) + assert doc.embedded_field.list_field[2][k] == embedded_2[k] doc.embedded_field.list_field[2].string_field = "world" - self.assertEqual( - doc._get_changed_fields(), - ["db_embedded_field.db_list_field.2.db_string_field"], + assert doc._get_changed_fields() == [ + "db_embedded_field.db_list_field.2.db_string_field" + ] + assert doc.embedded_field._delta() == ( + {"db_list_field.2.db_string_field": "world"}, + {}, ) - self.assertEqual( - doc.embedded_field._delta(), - ({"db_list_field.2.db_string_field": "world"}, {}), - ) - self.assertEqual( - doc._delta(), - ({"db_embedded_field.db_list_field.2.db_string_field": "world"}, {}), + assert doc._delta() == ( + {"db_embedded_field.db_list_field.2.db_string_field": "world"}, + {}, ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, "world") + assert doc.embedded_field.list_field[2].string_field == "world" # Test multiple assignments doc.embedded_field.list_field[2].string_field = "hello world" doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] - self.assertEqual( - doc._get_changed_fields(), ["db_embedded_field.db_list_field.2"] + assert doc._get_changed_fields() == ["db_embedded_field.db_list_field.2"] + assert doc.embedded_field._delta() == ( + { + "db_list_field.2": { + "_cls": "Embedded", + "db_string_field": "hello world", + "db_int_field": 1, + "db_list_field": ["1", 2, {"hello": "world"}], + "db_dict_field": {"hello": "world"}, + } + }, + {}, ) - self.assertEqual( - doc.embedded_field._delta(), - ( - { - "db_list_field.2": { - "_cls": "Embedded", - "db_string_field": "hello world", - "db_int_field": 1, - "db_list_field": ["1", 2, {"hello": "world"}], - "db_dict_field": {"hello": "world"}, - } - }, - {}, - ), - ) - self.assertEqual( - doc._delta(), - ( - { - "db_embedded_field.db_list_field.2": { - "_cls": "Embedded", - "db_string_field": "hello world", - "db_int_field": 1, - "db_list_field": ["1", 2, {"hello": "world"}], - "db_dict_field": {"hello": "world"}, - } - }, - {}, - ), + assert doc._delta() == ( + { + "db_embedded_field.db_list_field.2": { + "_cls": "Embedded", + "db_string_field": "hello world", + "db_int_field": 1, + "db_list_field": ["1", 2, {"hello": "world"}], + "db_dict_field": {"hello": "world"}, + } + }, + {}, ) doc.save() doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, "hello world") + assert doc.embedded_field.list_field[2].string_field == "hello world" # Test list native methods doc.embedded_field.list_field[2].list_field.pop(0) - self.assertEqual( - doc._delta(), - ( - { - "db_embedded_field.db_list_field.2.db_list_field": [ - 2, - {"hello": "world"}, - ] - }, - {}, - ), + assert doc._delta() == ( + { + "db_embedded_field.db_list_field.2.db_list_field": [ + 2, + {"hello": "world"}, + ] + }, + {}, ) doc.save() doc = doc.reload(10) doc.embedded_field.list_field[2].list_field.append(1) - self.assertEqual( - doc._delta(), - ( - { - "db_embedded_field.db_list_field.2.db_list_field": [ - 2, - {"hello": "world"}, - 1, - ] - }, - {}, - ), + assert doc._delta() == ( + { + "db_embedded_field.db_list_field.2.db_list_field": [ + 2, + {"hello": "world"}, + 1, + ] + }, + {}, ) doc.save() doc = doc.reload(10) - self.assertEqual( - doc.embedded_field.list_field[2].list_field, [2, {"hello": "world"}, 1] - ) + assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1] doc.embedded_field.list_field[2].list_field.sort(key=str) doc.save() doc = doc.reload(10) - self.assertEqual( - doc.embedded_field.list_field[2].list_field, [1, 2, {"hello": "world"}] - ) + assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}] del doc.embedded_field.list_field[2].list_field[2]["hello"] - self.assertEqual( - doc._delta(), - ({}, {"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1}), + assert doc._delta() == ( + {}, + {"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1}, ) doc.save() doc = doc.reload(10) del doc.embedded_field.list_field[2].list_field - self.assertEqual( - doc._delta(), ({}, {"db_embedded_field.db_list_field.2.db_list_field": 1}) + assert doc._delta() == ( + {}, + {"db_embedded_field.db_list_field.2.db_list_field": 1}, ) def test_delta_for_dynamic_documents(self): @@ -696,14 +648,16 @@ class TestDelta(MongoDBTestCase): Person.drop_collection() p = Person(name="James", age=34) - self.assertEqual( - p._delta(), (SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), {}) + assert p._delta() == ( + SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), + {}, ) p.doc = 123 del p.doc - self.assertEqual( - p._delta(), (SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), {}) + assert p._delta() == ( + SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), + {}, ) p = Person() @@ -712,18 +666,18 @@ class TestDelta(MongoDBTestCase): p.save() p.age = 24 - self.assertEqual(p.age, 24) - self.assertEqual(p._get_changed_fields(), ["age"]) - self.assertEqual(p._delta(), ({"age": 24}, {})) + assert p.age == 24 + assert p._get_changed_fields() == ["age"] + assert p._delta() == ({"age": 24}, {}) p = Person.objects(age=22).get() p.age = 24 - self.assertEqual(p.age, 24) - self.assertEqual(p._get_changed_fields(), ["age"]) - self.assertEqual(p._delta(), ({"age": 24}, {})) + assert p.age == 24 + assert p._get_changed_fields() == ["age"] + assert p._delta() == ({"age": 24}, {}) p.save() - self.assertEqual(1, Person.objects(age=24).count()) + assert 1 == Person.objects(age=24).count() def test_dynamic_delta(self): class Doc(DynamicDocument): @@ -734,40 +688,40 @@ class TestDelta(MongoDBTestCase): doc.save() doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) + assert doc._get_changed_fields() == [] + assert doc._delta() == ({}, {}) doc.string_field = "hello" - self.assertEqual(doc._get_changed_fields(), ["string_field"]) - self.assertEqual(doc._delta(), ({"string_field": "hello"}, {})) + assert doc._get_changed_fields() == ["string_field"] + assert doc._delta() == ({"string_field": "hello"}, {}) doc._changed_fields = [] doc.int_field = 1 - self.assertEqual(doc._get_changed_fields(), ["int_field"]) - self.assertEqual(doc._delta(), ({"int_field": 1}, {})) + assert doc._get_changed_fields() == ["int_field"] + assert doc._delta() == ({"int_field": 1}, {}) doc._changed_fields = [] dict_value = {"hello": "world", "ping": "pong"} doc.dict_field = dict_value - self.assertEqual(doc._get_changed_fields(), ["dict_field"]) - self.assertEqual(doc._delta(), ({"dict_field": dict_value}, {})) + assert doc._get_changed_fields() == ["dict_field"] + assert doc._delta() == ({"dict_field": dict_value}, {}) doc._changed_fields = [] list_value = ["1", 2, {"hello": "world"}] doc.list_field = list_value - self.assertEqual(doc._get_changed_fields(), ["list_field"]) - self.assertEqual(doc._delta(), ({"list_field": list_value}, {})) + assert doc._get_changed_fields() == ["list_field"] + assert doc._delta() == ({"list_field": list_value}, {}) # Test unsetting doc._changed_fields = [] doc.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ["dict_field"]) - self.assertEqual(doc._delta(), ({}, {"dict_field": 1})) + assert doc._get_changed_fields() == ["dict_field"] + assert doc._delta() == ({}, {"dict_field": 1}) doc._changed_fields = [] doc.list_field = [] - self.assertEqual(doc._get_changed_fields(), ["list_field"]) - self.assertEqual(doc._delta(), ({}, {"list_field": 1})) + assert doc._get_changed_fields() == ["list_field"] + assert doc._delta() == ({}, {"list_field": 1}) def test_delta_with_dbref_true(self): person, organization, employee = self.circular_reference_deltas_2( @@ -775,16 +729,16 @@ class TestDelta(MongoDBTestCase): ) employee.name = "test" - self.assertEqual(organization._get_changed_fields(), []) + assert organization._get_changed_fields() == [] updates, removals = organization._delta() - self.assertEqual({}, removals) - self.assertEqual({}, updates) + assert {} == removals + assert {} == updates organization.employees.append(person) updates, removals = organization._delta() - self.assertEqual({}, removals) - self.assertIn("employees", updates) + assert {} == removals + assert "employees" in updates def test_delta_with_dbref_false(self): person, organization, employee = self.circular_reference_deltas_2( @@ -792,16 +746,16 @@ class TestDelta(MongoDBTestCase): ) employee.name = "test" - self.assertEqual(organization._get_changed_fields(), []) + assert organization._get_changed_fields() == [] updates, removals = organization._delta() - self.assertEqual({}, removals) - self.assertEqual({}, updates) + assert {} == removals + assert {} == updates organization.employees.append(person) updates, removals = organization._delta() - self.assertEqual({}, removals) - self.assertIn("employees", updates) + assert {} == removals + assert "employees" in updates def test_nested_nested_fields_mark_as_changed(self): class EmbeddedDoc(EmbeddedDocument): @@ -821,11 +775,11 @@ class TestDelta(MongoDBTestCase): subdoc = mydoc.subs["a"]["b"] subdoc.name = "bar" - self.assertEqual(["name"], subdoc._get_changed_fields()) - self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields()) + assert ["name"] == subdoc._get_changed_fields() + assert ["subs.a.b.name"] == mydoc._get_changed_fields() mydoc._clear_changed_fields() - self.assertEqual([], mydoc._get_changed_fields()) + assert [] == mydoc._get_changed_fields() def test_lower_level_mark_as_changed(self): class EmbeddedDoc(EmbeddedDocument): @@ -840,17 +794,17 @@ class TestDelta(MongoDBTestCase): mydoc = MyDoc.objects.first() mydoc.subs["a"] = EmbeddedDoc() - self.assertEqual(["subs.a"], mydoc._get_changed_fields()) + assert ["subs.a"] == mydoc._get_changed_fields() subdoc = mydoc.subs["a"] subdoc.name = "bar" - self.assertEqual(["name"], subdoc._get_changed_fields()) - self.assertEqual(["subs.a"], mydoc._get_changed_fields()) + assert ["name"] == subdoc._get_changed_fields() + assert ["subs.a"] == mydoc._get_changed_fields() mydoc.save() mydoc._clear_changed_fields() - self.assertEqual([], mydoc._get_changed_fields()) + assert [] == mydoc._get_changed_fields() def test_upper_level_mark_as_changed(self): class EmbeddedDoc(EmbeddedDocument): @@ -867,15 +821,15 @@ class TestDelta(MongoDBTestCase): subdoc = mydoc.subs["a"] subdoc.name = "bar" - self.assertEqual(["name"], subdoc._get_changed_fields()) - self.assertEqual(["subs.a.name"], mydoc._get_changed_fields()) + assert ["name"] == subdoc._get_changed_fields() + assert ["subs.a.name"] == mydoc._get_changed_fields() mydoc.subs["a"] = EmbeddedDoc() - self.assertEqual(["subs.a"], mydoc._get_changed_fields()) + assert ["subs.a"] == mydoc._get_changed_fields() mydoc.save() mydoc._clear_changed_fields() - self.assertEqual([], mydoc._get_changed_fields()) + assert [] == mydoc._get_changed_fields() def test_referenced_object_changed_attributes(self): """Ensures that when you save a new reference to a field, the referenced object isn't altered""" @@ -902,22 +856,22 @@ class TestDelta(MongoDBTestCase): org1.reload() org2.reload() user.reload() - self.assertEqual(org1.name, "Org 1") - self.assertEqual(org2.name, "Org 2") - self.assertEqual(user.name, "Fred") + assert org1.name == "Org 1" + assert org2.name == "Org 2" + assert user.name == "Fred" user.name = "Harold" user.org = org2 org2.name = "New Org 2" - self.assertEqual(org2.name, "New Org 2") + assert org2.name == "New Org 2" user.save() org2.save() - self.assertEqual(org2.name, "New Org 2") + assert org2.name == "New Org 2" org2.reload() - self.assertEqual(org2.name, "New Org 2") + assert org2.name == "New Org 2" def test_delta_for_nested_map_fields(self): class UInfoDocument(Document): @@ -950,12 +904,12 @@ class TestDelta(MongoDBTestCase): d.users["007"]["rolist"].append(EmbeddedRole(type="oops")) d.users["007"]["info"] = uinfo delta = d._delta() - self.assertEqual(True, "users.007.roles.666" in delta[0]) - self.assertEqual(True, "users.007.rolist" in delta[0]) - self.assertEqual(True, "users.007.info" in delta[0]) - self.assertEqual("superadmin", delta[0]["users.007.roles.666"]["type"]) - self.assertEqual("oops", delta[0]["users.007.rolist"][0]["type"]) - self.assertEqual(uinfo.id, delta[0]["users.007.info"]) + assert True == ("users.007.roles.666" in delta[0]) + assert True == ("users.007.rolist" in delta[0]) + assert True == ("users.007.info" in delta[0]) + assert "superadmin" == delta[0]["users.007.roles.666"]["type"] + assert "oops" == delta[0]["users.007.rolist"][0]["type"] + assert uinfo.id == delta[0]["users.007.info"] if __name__ == "__main__": diff --git a/tests/document/test_dynamic.py b/tests/document/test_dynamic.py index 6b517d24..a6f46862 100644 --- a/tests/document/test_dynamic.py +++ b/tests/document/test_dynamic.py @@ -2,6 +2,7 @@ import unittest from mongoengine import * from tests.utils import MongoDBTestCase +import pytest __all__ = ("TestDynamicDocument",) @@ -25,15 +26,15 @@ class TestDynamicDocument(MongoDBTestCase): p.name = "James" p.age = 34 - self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James", "age": 34}) - self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"]) + assert p.to_mongo() == {"_cls": "Person", "name": "James", "age": 34} + assert p.to_mongo().keys() == ["_cls", "name", "age"] p.save() - self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"]) + assert p.to_mongo().keys() == ["_id", "_cls", "name", "age"] - self.assertEqual(self.Person.objects.first().age, 34) + assert self.Person.objects.first().age == 34 # Confirm no changes to self.Person - self.assertFalse(hasattr(self.Person, "age")) + assert not hasattr(self.Person, "age") def test_change_scope_of_variable(self): """Test changing the scope of a dynamic field has no adverse effects""" @@ -47,7 +48,7 @@ class TestDynamicDocument(MongoDBTestCase): p.save() p = self.Person.objects.get() - self.assertEqual(p.misc, {"hello": "world"}) + assert p.misc == {"hello": "world"} def test_delete_dynamic_field(self): """Test deleting a dynamic field works""" @@ -62,19 +63,19 @@ class TestDynamicDocument(MongoDBTestCase): p.save() p = self.Person.objects.get() - self.assertEqual(p.misc, {"hello": "world"}) + assert p.misc == {"hello": "world"} collection = self.db[self.Person._get_collection_name()] obj = collection.find_one() - self.assertEqual(sorted(obj.keys()), ["_cls", "_id", "misc", "name"]) + assert sorted(obj.keys()) == ["_cls", "_id", "misc", "name"] del p.misc p.save() p = self.Person.objects.get() - self.assertFalse(hasattr(p, "misc")) + assert not hasattr(p, "misc") obj = collection.find_one() - self.assertEqual(sorted(obj.keys()), ["_cls", "_id", "name"]) + assert sorted(obj.keys()) == ["_cls", "_id", "name"] def test_reload_after_unsetting(self): p = self.Person() @@ -88,12 +89,12 @@ class TestDynamicDocument(MongoDBTestCase): p = self.Person.objects.create() p.update(age=1) - self.assertEqual(len(p._data), 3) - self.assertEqual(sorted(p._data.keys()), ["_cls", "id", "name"]) + assert len(p._data) == 3 + assert sorted(p._data.keys()) == ["_cls", "id", "name"] p.reload() - self.assertEqual(len(p._data), 4) - self.assertEqual(sorted(p._data.keys()), ["_cls", "age", "id", "name"]) + assert len(p._data) == 4 + assert sorted(p._data.keys()) == ["_cls", "age", "id", "name"] def test_fields_without_underscore(self): """Ensure we can query dynamic fields""" @@ -103,16 +104,18 @@ class TestDynamicDocument(MongoDBTestCase): p.save() raw_p = Person.objects.as_pymongo().get(id=p.id) - self.assertEqual(raw_p, {"_cls": u"Person", "_id": p.id, "name": u"Dean"}) + assert raw_p == {"_cls": u"Person", "_id": p.id, "name": u"Dean"} p.name = "OldDean" p.newattr = "garbage" p.save() raw_p = Person.objects.as_pymongo().get(id=p.id) - self.assertEqual( - raw_p, - {"_cls": u"Person", "_id": p.id, "name": "OldDean", "newattr": u"garbage"}, - ) + assert raw_p == { + "_cls": u"Person", + "_id": p.id, + "name": "OldDean", + "newattr": u"garbage", + } def test_fields_containing_underscore(self): """Ensure we can query dynamic fields""" @@ -127,14 +130,14 @@ class TestDynamicDocument(MongoDBTestCase): p.save() raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) - self.assertEqual(raw_p, {"_id": p.id, "_name": u"Dean", "name": u"Dean"}) + assert raw_p == {"_id": p.id, "_name": u"Dean", "name": u"Dean"} p.name = "OldDean" p._name = "NewDean" p._newattr1 = "garbage" # Unknown fields won't be added p.save() raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) - self.assertEqual(raw_p, {"_id": p.id, "_name": u"NewDean", "name": u"OldDean"}) + assert raw_p == {"_id": p.id, "_name": u"NewDean", "name": u"OldDean"} def test_dynamic_document_queries(self): """Ensure we can query dynamic fields""" @@ -143,10 +146,10 @@ class TestDynamicDocument(MongoDBTestCase): p.age = 22 p.save() - self.assertEqual(1, self.Person.objects(age=22).count()) + assert 1 == self.Person.objects(age=22).count() p = self.Person.objects(age=22) p = p.get() - self.assertEqual(22, p.age) + assert 22 == p.age def test_complex_dynamic_document_queries(self): class Person(DynamicDocument): @@ -166,8 +169,8 @@ class TestDynamicDocument(MongoDBTestCase): p2.age = 10 p2.save() - self.assertEqual(Person.objects(age__icontains="ten").count(), 2) - self.assertEqual(Person.objects(age__gte=10).count(), 1) + assert Person.objects(age__icontains="ten").count() == 2 + assert Person.objects(age__gte=10).count() == 1 def test_complex_data_lookups(self): """Ensure you can query dynamic document dynamic fields""" @@ -175,12 +178,12 @@ class TestDynamicDocument(MongoDBTestCase): p.misc = {"hello": "world"} p.save() - self.assertEqual(1, self.Person.objects(misc__hello="world").count()) + assert 1 == self.Person.objects(misc__hello="world").count() def test_three_level_complex_data_lookups(self): """Ensure you can query three level document dynamic fields""" self.Person.objects.create(misc={"hello": {"hello2": "world"}}) - self.assertEqual(1, self.Person.objects(misc__hello__hello2="world").count()) + assert 1 == self.Person.objects(misc__hello__hello2="world").count() def test_complex_embedded_document_validation(self): """Ensure embedded dynamic documents may be validated""" @@ -198,11 +201,13 @@ class TestDynamicDocument(MongoDBTestCase): embedded_doc_1.validate() embedded_doc_2 = Embedded(content="this is not a url") - self.assertRaises(ValidationError, embedded_doc_2.validate) + with pytest.raises(ValidationError): + embedded_doc_2.validate() doc.embedded_field_1 = embedded_doc_1 doc.embedded_field_2 = embedded_doc_2 - self.assertRaises(ValidationError, doc.validate) + with pytest.raises(ValidationError): + doc.validate() def test_inheritance(self): """Ensure that dynamic document plays nice with inheritance""" @@ -212,11 +217,9 @@ class TestDynamicDocument(MongoDBTestCase): Employee.drop_collection() - self.assertIn("name", Employee._fields) - self.assertIn("salary", Employee._fields) - self.assertEqual( - Employee._get_collection_name(), self.Person._get_collection_name() - ) + assert "name" in Employee._fields + assert "salary" in Employee._fields + assert Employee._get_collection_name() == self.Person._get_collection_name() joe_bloggs = Employee() joe_bloggs.name = "Joe Bloggs" @@ -224,11 +227,11 @@ class TestDynamicDocument(MongoDBTestCase): joe_bloggs.age = 20 joe_bloggs.save() - self.assertEqual(1, self.Person.objects(age=20).count()) - self.assertEqual(1, Employee.objects(age=20).count()) + assert 1 == self.Person.objects(age=20).count() + assert 1 == Employee.objects(age=20).count() joe_bloggs = self.Person.objects.first() - self.assertIsInstance(joe_bloggs, Employee) + assert isinstance(joe_bloggs, Employee) def test_embedded_dynamic_document(self): """Test dynamic embedded documents""" @@ -249,26 +252,23 @@ class TestDynamicDocument(MongoDBTestCase): embedded_1.list_field = ["1", 2, {"hello": "world"}] doc.embedded_field = embedded_1 - self.assertEqual( - doc.to_mongo(), - { - "embedded_field": { - "_cls": "Embedded", - "string_field": "hello", - "int_field": 1, - "dict_field": {"hello": "world"}, - "list_field": ["1", 2, {"hello": "world"}], - } - }, - ) + assert doc.to_mongo() == { + "embedded_field": { + "_cls": "Embedded", + "string_field": "hello", + "int_field": 1, + "dict_field": {"hello": "world"}, + "list_field": ["1", 2, {"hello": "world"}], + } + } doc.save() doc = Doc.objects.first() - self.assertEqual(doc.embedded_field.__class__, Embedded) - self.assertEqual(doc.embedded_field.string_field, "hello") - self.assertEqual(doc.embedded_field.int_field, 1) - self.assertEqual(doc.embedded_field.dict_field, {"hello": "world"}) - self.assertEqual(doc.embedded_field.list_field, ["1", 2, {"hello": "world"}]) + assert doc.embedded_field.__class__ == Embedded + assert doc.embedded_field.string_field == "hello" + assert doc.embedded_field.int_field == 1 + assert doc.embedded_field.dict_field == {"hello": "world"} + assert doc.embedded_field.list_field == ["1", 2, {"hello": "world"}] def test_complex_embedded_documents(self): """Test complex dynamic embedded documents setups""" @@ -296,44 +296,41 @@ class TestDynamicDocument(MongoDBTestCase): embedded_1.list_field = ["1", 2, embedded_2] doc.embedded_field = embedded_1 - self.assertEqual( - doc.to_mongo(), - { - "embedded_field": { - "_cls": "Embedded", - "string_field": "hello", - "int_field": 1, - "dict_field": {"hello": "world"}, - "list_field": [ - "1", - 2, - { - "_cls": "Embedded", - "string_field": "hello", - "int_field": 1, - "dict_field": {"hello": "world"}, - "list_field": ["1", 2, {"hello": "world"}], - }, - ], - } - }, - ) + assert doc.to_mongo() == { + "embedded_field": { + "_cls": "Embedded", + "string_field": "hello", + "int_field": 1, + "dict_field": {"hello": "world"}, + "list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "string_field": "hello", + "int_field": 1, + "dict_field": {"hello": "world"}, + "list_field": ["1", 2, {"hello": "world"}], + }, + ], + } + } doc.save() doc = Doc.objects.first() - self.assertEqual(doc.embedded_field.__class__, Embedded) - self.assertEqual(doc.embedded_field.string_field, "hello") - self.assertEqual(doc.embedded_field.int_field, 1) - self.assertEqual(doc.embedded_field.dict_field, {"hello": "world"}) - self.assertEqual(doc.embedded_field.list_field[0], "1") - self.assertEqual(doc.embedded_field.list_field[1], 2) + assert doc.embedded_field.__class__ == Embedded + assert doc.embedded_field.string_field == "hello" + assert doc.embedded_field.int_field == 1 + assert doc.embedded_field.dict_field == {"hello": "world"} + assert doc.embedded_field.list_field[0] == "1" + assert doc.embedded_field.list_field[1] == 2 embedded_field = doc.embedded_field.list_field[2] - self.assertEqual(embedded_field.__class__, Embedded) - self.assertEqual(embedded_field.string_field, "hello") - self.assertEqual(embedded_field.int_field, 1) - self.assertEqual(embedded_field.dict_field, {"hello": "world"}) - self.assertEqual(embedded_field.list_field, ["1", 2, {"hello": "world"}]) + assert embedded_field.__class__ == Embedded + assert embedded_field.string_field == "hello" + assert embedded_field.int_field == 1 + assert embedded_field.dict_field == {"hello": "world"} + assert embedded_field.list_field == ["1", 2, {"hello": "world"}] def test_dynamic_and_embedded(self): """Ensure embedded documents play nicely""" @@ -352,18 +349,18 @@ class TestDynamicDocument(MongoDBTestCase): person.address.city = "Lundenne" person.save() - self.assertEqual(Person.objects.first().address.city, "Lundenne") + assert Person.objects.first().address.city == "Lundenne" person = Person.objects.first() person.address = Address(city="Londinium") person.save() - self.assertEqual(Person.objects.first().address.city, "Londinium") + assert Person.objects.first().address.city == "Londinium" person = Person.objects.first() person.age = 35 person.save() - self.assertEqual(Person.objects.first().age, 35) + assert Person.objects.first().age == 35 def test_dynamic_embedded_works_with_only(self): """Ensure custom fieldnames on a dynamic embedded document are found by qs.only()""" @@ -380,10 +377,10 @@ class TestDynamicDocument(MongoDBTestCase): name="Eric", address=Address(city="San Francisco", street_number="1337") ).save() - self.assertEqual(Person.objects.first().address.street_number, "1337") - self.assertEqual( - Person.objects.only("address__street_number").first().address.street_number, - "1337", + assert Person.objects.first().address.street_number == "1337" + assert ( + Person.objects.only("address__street_number").first().address.street_number + == "1337" ) def test_dynamic_and_embedded_dict_access(self): @@ -408,20 +405,20 @@ class TestDynamicDocument(MongoDBTestCase): person["address"]["city"] = "Lundenne" person.save() - self.assertEqual(Person.objects.first().address.city, "Lundenne") + assert Person.objects.first().address.city == "Lundenne" - self.assertEqual(Person.objects.first().phone, "555-1212") + assert Person.objects.first().phone == "555-1212" person = Person.objects.first() person.address = Address(city="Londinium") person.save() - self.assertEqual(Person.objects.first().address.city, "Londinium") + assert Person.objects.first().address.city == "Londinium" person = Person.objects.first() person["age"] = 35 person.save() - self.assertEqual(Person.objects.first().age, 35) + assert Person.objects.first().age == 35 if __name__ == "__main__": diff --git a/tests/document/test_indexes.py b/tests/document/test_indexes.py index 1b0304c4..cc1aae52 100644 --- a/tests/document/test_indexes.py +++ b/tests/document/test_indexes.py @@ -9,6 +9,7 @@ from six import iteritems from mongoengine import * from mongoengine.connection import get_db +import pytest class TestIndexes(unittest.TestCase): @@ -53,15 +54,15 @@ class TestIndexes(unittest.TestCase): {"fields": [("tags", 1)]}, {"fields": [("category", 1), ("addDate", -1)]}, ] - self.assertEqual(expected_specs, BlogPost._meta["index_specs"]) + assert expected_specs == BlogPost._meta["index_specs"] BlogPost.ensure_indexes() info = BlogPost.objects._collection.index_information() # _id, '-date', 'tags', ('cat', 'date') - self.assertEqual(len(info), 4) + assert len(info) == 4 info = [value["key"] for key, value in iteritems(info)] for expected in expected_specs: - self.assertIn(expected["fields"], info) + assert expected["fields"] in info def _index_test_inheritance(self, InheritFrom): class BlogPost(InheritFrom): @@ -78,7 +79,7 @@ class TestIndexes(unittest.TestCase): {"fields": [("_cls", 1), ("tags", 1)]}, {"fields": [("_cls", 1), ("category", 1), ("addDate", -1)]}, ] - self.assertEqual(expected_specs, BlogPost._meta["index_specs"]) + assert expected_specs == BlogPost._meta["index_specs"] BlogPost.ensure_indexes() info = BlogPost.objects._collection.index_information() @@ -86,17 +87,17 @@ class TestIndexes(unittest.TestCase): # NB: there is no index on _cls by itself, since # the indices on -date and tags will both contain # _cls as first element in the key - self.assertEqual(len(info), 4) + assert len(info) == 4 info = [value["key"] for key, value in iteritems(info)] for expected in expected_specs: - self.assertIn(expected["fields"], info) + assert expected["fields"] in info class ExtendedBlogPost(BlogPost): title = StringField() meta = {"indexes": ["title"]} expected_specs.append({"fields": [("_cls", 1), ("title", 1)]}) - self.assertEqual(expected_specs, ExtendedBlogPost._meta["index_specs"]) + assert expected_specs == ExtendedBlogPost._meta["index_specs"] BlogPost.drop_collection() @@ -104,7 +105,7 @@ class TestIndexes(unittest.TestCase): info = ExtendedBlogPost.objects._collection.index_information() info = [value["key"] for key, value in iteritems(info)] for expected in expected_specs: - self.assertIn(expected["fields"], info) + assert expected["fields"] in info def test_indexes_document_inheritance(self): """Ensure that indexes are used when meta[indexes] is specified for @@ -128,10 +129,8 @@ class TestIndexes(unittest.TestCase): class B(A): description = StringField() - self.assertEqual(A._meta["index_specs"], B._meta["index_specs"]) - self.assertEqual( - [{"fields": [("_cls", 1), ("title", 1)]}], A._meta["index_specs"] - ) + assert A._meta["index_specs"] == B._meta["index_specs"] + assert [{"fields": [("_cls", 1), ("title", 1)]}] == A._meta["index_specs"] def test_index_no_cls(self): """Ensure index specs are inhertited correctly""" @@ -144,11 +143,11 @@ class TestIndexes(unittest.TestCase): "index_cls": False, } - self.assertEqual([("title", 1)], A._meta["index_specs"][0]["fields"]) + assert [("title", 1)] == A._meta["index_specs"][0]["fields"] A._get_collection().drop_indexes() A.ensure_indexes() info = A._get_collection().index_information() - self.assertEqual(len(info.keys()), 2) + assert len(info.keys()) == 2 class B(A): c = StringField() @@ -158,8 +157,8 @@ class TestIndexes(unittest.TestCase): "allow_inheritance": True, } - self.assertEqual([("c", 1)], B._meta["index_specs"][1]["fields"]) - self.assertEqual([("_cls", 1), ("d", 1)], B._meta["index_specs"][2]["fields"]) + assert [("c", 1)] == B._meta["index_specs"][1]["fields"] + assert [("_cls", 1), ("d", 1)] == B._meta["index_specs"][2]["fields"] def test_build_index_spec_is_not_destructive(self): class MyDoc(Document): @@ -167,12 +166,12 @@ class TestIndexes(unittest.TestCase): meta = {"indexes": ["keywords"], "allow_inheritance": False} - self.assertEqual(MyDoc._meta["index_specs"], [{"fields": [("keywords", 1)]}]) + assert MyDoc._meta["index_specs"] == [{"fields": [("keywords", 1)]}] # Force index creation MyDoc.ensure_indexes() - self.assertEqual(MyDoc._meta["index_specs"], [{"fields": [("keywords", 1)]}]) + assert MyDoc._meta["index_specs"] == [{"fields": [("keywords", 1)]}] def test_embedded_document_index_meta(self): """Ensure that embedded document indexes are created explicitly @@ -187,7 +186,7 @@ class TestIndexes(unittest.TestCase): meta = {"indexes": ["rank.title"], "allow_inheritance": False} - self.assertEqual([{"fields": [("rank.title", 1)]}], Person._meta["index_specs"]) + assert [{"fields": [("rank.title", 1)]}] == Person._meta["index_specs"] Person.drop_collection() @@ -195,7 +194,7 @@ class TestIndexes(unittest.TestCase): list(Person.objects) info = Person.objects._collection.index_information() info = [value["key"] for key, value in iteritems(info)] - self.assertIn([("rank.title", 1)], info) + assert [("rank.title", 1)] in info def test_explicit_geo2d_index(self): """Ensure that geo2d indexes work when created via meta[indexes] @@ -205,14 +204,12 @@ class TestIndexes(unittest.TestCase): location = DictField() meta = {"allow_inheritance": True, "indexes": ["*location.point"]} - self.assertEqual( - [{"fields": [("location.point", "2d")]}], Place._meta["index_specs"] - ) + assert [{"fields": [("location.point", "2d")]}] == Place._meta["index_specs"] Place.ensure_indexes() info = Place._get_collection().index_information() info = [value["key"] for key, value in iteritems(info)] - self.assertIn([("location.point", "2d")], info) + assert [("location.point", "2d")] in info def test_explicit_geo2d_index_embedded(self): """Ensure that geo2d indexes work when created via meta[indexes] @@ -225,14 +222,14 @@ class TestIndexes(unittest.TestCase): current = DictField(field=EmbeddedDocumentField("EmbeddedLocation")) meta = {"allow_inheritance": True, "indexes": ["*current.location.point"]} - self.assertEqual( - [{"fields": [("current.location.point", "2d")]}], Place._meta["index_specs"] - ) + assert [{"fields": [("current.location.point", "2d")]}] == Place._meta[ + "index_specs" + ] Place.ensure_indexes() info = Place._get_collection().index_information() info = [value["key"] for key, value in iteritems(info)] - self.assertIn([("current.location.point", "2d")], info) + assert [("current.location.point", "2d")] in info def test_explicit_geosphere_index(self): """Ensure that geosphere indexes work when created via meta[indexes] @@ -242,14 +239,14 @@ class TestIndexes(unittest.TestCase): location = DictField() meta = {"allow_inheritance": True, "indexes": ["(location.point"]} - self.assertEqual( - [{"fields": [("location.point", "2dsphere")]}], Place._meta["index_specs"] - ) + assert [{"fields": [("location.point", "2dsphere")]}] == Place._meta[ + "index_specs" + ] Place.ensure_indexes() info = Place._get_collection().index_information() info = [value["key"] for key, value in iteritems(info)] - self.assertIn([("location.point", "2dsphere")], info) + assert [("location.point", "2dsphere")] in info def test_explicit_geohaystack_index(self): """Ensure that geohaystack indexes work when created via meta[indexes] @@ -264,15 +261,14 @@ class TestIndexes(unittest.TestCase): name = StringField() meta = {"indexes": [(")location.point", "name")]} - self.assertEqual( - [{"fields": [("location.point", "geoHaystack"), ("name", 1)]}], - Place._meta["index_specs"], - ) + assert [ + {"fields": [("location.point", "geoHaystack"), ("name", 1)]} + ] == Place._meta["index_specs"] Place.ensure_indexes() info = Place._get_collection().index_information() info = [value["key"] for key, value in iteritems(info)] - self.assertIn([("location.point", "geoHaystack")], info) + assert [("location.point", "geoHaystack")] in info def test_create_geohaystack_index(self): """Ensure that geohaystack indexes can be created @@ -285,7 +281,7 @@ class TestIndexes(unittest.TestCase): Place.create_index({"fields": (")location.point", "name")}, bucketSize=10) info = Place._get_collection().index_information() info = [value["key"] for key, value in iteritems(info)] - self.assertIn([("location.point", "geoHaystack"), ("name", 1)], info) + assert [("location.point", "geoHaystack"), ("name", 1)] in info def test_dictionary_indexes(self): """Ensure that indexes are used when meta[indexes] contains @@ -298,16 +294,15 @@ class TestIndexes(unittest.TestCase): tags = ListField(StringField()) meta = {"indexes": [{"fields": ["-date"], "unique": True, "sparse": True}]} - self.assertEqual( - [{"fields": [("addDate", -1)], "unique": True, "sparse": True}], - BlogPost._meta["index_specs"], - ) + assert [ + {"fields": [("addDate", -1)], "unique": True, "sparse": True} + ] == BlogPost._meta["index_specs"] BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() # _id, '-date' - self.assertEqual(len(info), 2) + assert len(info) == 2 # Indexes are lazy so use list() to perform query list(BlogPost.objects) @@ -316,7 +311,7 @@ class TestIndexes(unittest.TestCase): (value["key"], value.get("unique", False), value.get("sparse", False)) for key, value in iteritems(info) ] - self.assertIn(([("addDate", -1)], True, True), info) + assert ([("addDate", -1)], True, True) in info BlogPost.drop_collection() @@ -338,11 +333,9 @@ class TestIndexes(unittest.TestCase): Person(name="test", user_guid="123").save() - self.assertEqual(1, Person.objects.count()) + assert 1 == Person.objects.count() info = Person.objects._collection.index_information() - self.assertEqual( - sorted(info.keys()), ["_cls_1_name_1", "_cls_1_user_guid_1", "_id_"] - ) + assert sorted(info.keys()) == ["_cls_1_name_1", "_cls_1_user_guid_1", "_id_"] def test_disable_index_creation(self): """Tests setting auto_create_index to False on the connection will @@ -365,13 +358,13 @@ class TestIndexes(unittest.TestCase): User(user_guid="123").save() MongoUser(user_guid="123").save() - self.assertEqual(2, User.objects.count()) + assert 2 == User.objects.count() info = User.objects._collection.index_information() - self.assertEqual(list(info.keys()), ["_id_"]) + assert list(info.keys()) == ["_id_"] User.ensure_indexes() info = User.objects._collection.index_information() - self.assertEqual(sorted(info.keys()), ["_cls_1_user_guid_1", "_id_"]) + assert sorted(info.keys()) == ["_cls_1_user_guid_1", "_id_"] def test_embedded_document_index(self): """Tests settings an index on an embedded document @@ -389,7 +382,7 @@ class TestIndexes(unittest.TestCase): BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() - self.assertEqual(sorted(info.keys()), ["_id_", "date.yr_-1"]) + assert sorted(info.keys()) == ["_id_", "date.yr_-1"] def test_list_embedded_document_index(self): """Ensure list embedded documents can be indexed @@ -408,7 +401,7 @@ class TestIndexes(unittest.TestCase): info = BlogPost.objects._collection.index_information() # we don't use _cls in with list fields by default - self.assertEqual(sorted(info.keys()), ["_id_", "tags.tag_1"]) + assert sorted(info.keys()) == ["_id_", "tags.tag_1"] post1 = BlogPost( title="Embedded Indexes tests in place", @@ -426,7 +419,7 @@ class TestIndexes(unittest.TestCase): RecursiveDocument.ensure_indexes() info = RecursiveDocument._get_collection().index_information() - self.assertEqual(sorted(info.keys()), ["_cls_1", "_id_"]) + assert sorted(info.keys()) == ["_cls_1", "_id_"] def test_covered_index(self): """Ensure that covered indexes can be used @@ -446,46 +439,45 @@ class TestIndexes(unittest.TestCase): # Need to be explicit about covered indexes as mongoDB doesn't know if # the documents returned might have more keys in that here. query_plan = Test.objects(id=obj.id).exclude("a").explain() - self.assertEqual( + assert ( query_plan.get("queryPlanner") .get("winningPlan") .get("inputStage") - .get("stage"), - "IDHACK", + .get("stage") + == "IDHACK" ) query_plan = Test.objects(id=obj.id).only("id").explain() - self.assertEqual( + assert ( query_plan.get("queryPlanner") .get("winningPlan") .get("inputStage") - .get("stage"), - "IDHACK", + .get("stage") + == "IDHACK" ) query_plan = Test.objects(a=1).only("a").exclude("id").explain() - self.assertEqual( + assert ( query_plan.get("queryPlanner") .get("winningPlan") .get("inputStage") - .get("stage"), - "IXSCAN", + .get("stage") + == "IXSCAN" ) - self.assertEqual( - query_plan.get("queryPlanner").get("winningPlan").get("stage"), "PROJECTION" + assert ( + query_plan.get("queryPlanner").get("winningPlan").get("stage") + == "PROJECTION" ) query_plan = Test.objects(a=1).explain() - self.assertEqual( + assert ( query_plan.get("queryPlanner") .get("winningPlan") .get("inputStage") - .get("stage"), - "IXSCAN", - ) - self.assertEqual( - query_plan.get("queryPlanner").get("winningPlan").get("stage"), "FETCH" + .get("stage") + == "IXSCAN" ) + assert query_plan.get("queryPlanner").get("winningPlan").get("stage") == "FETCH" def test_index_on_id(self): class BlogPost(Document): @@ -498,9 +490,7 @@ class TestIndexes(unittest.TestCase): BlogPost.drop_collection() indexes = BlogPost.objects._collection.index_information() - self.assertEqual( - indexes["categories_1__id_1"]["key"], [("categories", 1), ("_id", 1)] - ) + assert indexes["categories_1__id_1"]["key"] == [("categories", 1), ("_id", 1)] def test_hint(self): TAGS_INDEX_NAME = "tags_1" @@ -516,25 +506,25 @@ class TestIndexes(unittest.TestCase): BlogPost(tags=tags).save() # Hinting by shape should work. - self.assertEqual(BlogPost.objects.hint([("tags", 1)]).count(), 10) + assert BlogPost.objects.hint([("tags", 1)]).count() == 10 # Hinting by index name should work. - self.assertEqual(BlogPost.objects.hint(TAGS_INDEX_NAME).count(), 10) + assert BlogPost.objects.hint(TAGS_INDEX_NAME).count() == 10 # Clearing the hint should work fine. - self.assertEqual(BlogPost.objects.hint().count(), 10) - self.assertEqual(BlogPost.objects.hint([("ZZ", 1)]).hint().count(), 10) + assert BlogPost.objects.hint().count() == 10 + assert BlogPost.objects.hint([("ZZ", 1)]).hint().count() == 10 # Hinting on a non-existent index shape should fail. - with self.assertRaises(OperationFailure): + with pytest.raises(OperationFailure): BlogPost.objects.hint([("ZZ", 1)]).count() # Hinting on a non-existent index name should fail. - with self.assertRaises(OperationFailure): + with pytest.raises(OperationFailure): BlogPost.objects.hint("Bad Name").count() # Invalid shape argument (missing list brackets) should fail. - with self.assertRaises(ValueError): + with pytest.raises(ValueError): BlogPost.objects.hint(("tags", 1)).count() def test_collation(self): @@ -588,11 +578,14 @@ class TestIndexes(unittest.TestCase): # Two posts with the same slug is not allowed post2 = BlogPost(title="test2", slug="test") - self.assertRaises(NotUniqueError, post2.save) - self.assertRaises(NotUniqueError, BlogPost.objects.insert, post2) + with pytest.raises(NotUniqueError): + post2.save() + with pytest.raises(NotUniqueError): + BlogPost.objects.insert(post2) # Ensure backwards compatibility for errors - self.assertRaises(OperationError, post2.save) + with pytest.raises(OperationError): + post2.save() def test_primary_key_unique_not_working(self): """Relates to #1445""" @@ -602,23 +595,21 @@ class TestIndexes(unittest.TestCase): Blog.drop_collection() - with self.assertRaises(OperationFailure) as ctx_err: + with pytest.raises(OperationFailure) as ctx_err: Blog(id="garbage").save() # One of the errors below should happen. Which one depends on the # PyMongo version and dict order. err_msg = str(ctx_err.exception) - self.assertTrue( - any( - [ - "The field 'unique' is not valid for an _id index specification" - in err_msg, - "The field 'background' is not valid for an _id index specification" - in err_msg, - "The field 'sparse' is not valid for an _id index specification" - in err_msg, - ] - ) + assert any( + [ + "The field 'unique' is not valid for an _id index specification" + in err_msg, + "The field 'background' is not valid for an _id index specification" + in err_msg, + "The field 'sparse' is not valid for an _id index specification" + in err_msg, + ] ) def test_unique_with(self): @@ -644,7 +635,8 @@ class TestIndexes(unittest.TestCase): # Now there will be two docs with the same slug and the same day: fail post3 = BlogPost(title="test3", date=Date(year=2010), slug="test") - self.assertRaises(OperationError, post3.save) + with pytest.raises(OperationError): + post3.save() def test_unique_embedded_document(self): """Ensure that uniqueness constraints are applied to fields on embedded documents. @@ -669,7 +661,8 @@ class TestIndexes(unittest.TestCase): # Now there will be two docs with the same sub.slug post3 = BlogPost(title="test3", sub=SubDocument(year=2010, slug="test")) - self.assertRaises(NotUniqueError, post3.save) + with pytest.raises(NotUniqueError): + post3.save() def test_unique_embedded_document_in_list(self): """ @@ -699,7 +692,8 @@ class TestIndexes(unittest.TestCase): post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")]) - self.assertRaises(NotUniqueError, post2.save) + with pytest.raises(NotUniqueError): + post2.save() def test_unique_embedded_document_in_sorted_list(self): """ @@ -729,12 +723,13 @@ class TestIndexes(unittest.TestCase): # confirm that the unique index is created indexes = BlogPost._get_collection().index_information() - self.assertIn("subs.slug_1", indexes) - self.assertTrue(indexes["subs.slug_1"]["unique"]) + assert "subs.slug_1" in indexes + assert indexes["subs.slug_1"]["unique"] post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")]) - self.assertRaises(NotUniqueError, post2.save) + with pytest.raises(NotUniqueError): + post2.save() def test_unique_embedded_document_in_embedded_document_list(self): """ @@ -764,12 +759,13 @@ class TestIndexes(unittest.TestCase): # confirm that the unique index is created indexes = BlogPost._get_collection().index_information() - self.assertIn("subs.slug_1", indexes) - self.assertTrue(indexes["subs.slug_1"]["unique"]) + assert "subs.slug_1" in indexes + assert indexes["subs.slug_1"]["unique"] post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")]) - self.assertRaises(NotUniqueError, post2.save) + with pytest.raises(NotUniqueError): + post2.save() def test_unique_with_embedded_document_and_embedded_unique(self): """Ensure that uniqueness constraints are applied to fields on @@ -795,11 +791,13 @@ class TestIndexes(unittest.TestCase): # Now there will be two docs with the same sub.slug post3 = BlogPost(title="test3", sub=SubDocument(year=2010, slug="test")) - self.assertRaises(NotUniqueError, post3.save) + with pytest.raises(NotUniqueError): + post3.save() # Now there will be two docs with the same title and year post3 = BlogPost(title="test1", sub=SubDocument(year=2009, slug="test-1")) - self.assertRaises(NotUniqueError, post3.save) + with pytest.raises(NotUniqueError): + post3.save() def test_ttl_indexes(self): class Log(Document): @@ -811,7 +809,7 @@ class TestIndexes(unittest.TestCase): # Indexes are lazy so use list() to perform query list(Log.objects) info = Log.objects._collection.index_information() - self.assertEqual(3600, info["created_1"]["expireAfterSeconds"]) + assert 3600 == info["created_1"]["expireAfterSeconds"] def test_index_drop_dups_silently_ignored(self): class Customer(Document): @@ -839,14 +837,14 @@ class TestIndexes(unittest.TestCase): cust.save() cust_dupe = Customer(cust_id=1) - with self.assertRaises(NotUniqueError): + with pytest.raises(NotUniqueError): cust_dupe.save() cust = Customer(cust_id=2) cust.save() # duplicate key on update - with self.assertRaises(NotUniqueError): + with pytest.raises(NotUniqueError): cust.cust_id = 1 cust.save() @@ -867,8 +865,8 @@ class TestIndexes(unittest.TestCase): user = User(name="huangz", password="secret2") user.save() - self.assertEqual(User.objects.count(), 1) - self.assertEqual(User.objects.get().password, "secret2") + assert User.objects.count() == 1 + assert User.objects.get().password == "secret2" def test_unique_and_primary_create(self): """Create a new record with a duplicate primary key @@ -882,11 +880,11 @@ class TestIndexes(unittest.TestCase): User.drop_collection() User.objects.create(name="huangz", password="secret") - with self.assertRaises(NotUniqueError): + with pytest.raises(NotUniqueError): User.objects.create(name="huangz", password="secret2") - self.assertEqual(User.objects.count(), 1) - self.assertEqual(User.objects.get().password, "secret") + assert User.objects.count() == 1 + assert User.objects.get().password == "secret" def test_index_with_pk(self): """Ensure you can use `pk` as part of a query""" @@ -910,7 +908,7 @@ class TestIndexes(unittest.TestCase): info = BlogPost.objects._collection.index_information() info = [value["key"] for key, value in iteritems(info)] index_item = [("_id", 1), ("comments.comment_id", 1)] - self.assertIn(index_item, info) + assert index_item in info def test_compound_key_embedded(self): class CompoundKey(EmbeddedDocument): @@ -924,10 +922,8 @@ class TestIndexes(unittest.TestCase): my_key = CompoundKey(name="n", term="ok") report = ReportEmbedded(text="OK", key=my_key).save() - self.assertEqual( - {"text": "OK", "_id": {"term": "ok", "name": "n"}}, report.to_mongo() - ) - self.assertEqual(report, ReportEmbedded.objects.get(pk=my_key)) + assert {"text": "OK", "_id": {"term": "ok", "name": "n"}} == report.to_mongo() + assert report == ReportEmbedded.objects.get(pk=my_key) def test_compound_key_dictfield(self): class ReportDictField(Document): @@ -937,15 +933,13 @@ class TestIndexes(unittest.TestCase): my_key = {"name": "n", "term": "ok"} report = ReportDictField(text="OK", key=my_key).save() - self.assertEqual( - {"text": "OK", "_id": {"term": "ok", "name": "n"}}, report.to_mongo() - ) + assert {"text": "OK", "_id": {"term": "ok", "name": "n"}} == report.to_mongo() # We can't directly call ReportDictField.objects.get(pk=my_key), # because dicts are unordered, and if the order in MongoDB is # different than the one in `my_key`, this test will fail. - self.assertEqual(report, ReportDictField.objects.get(pk__name=my_key["name"])) - self.assertEqual(report, ReportDictField.objects.get(pk__term=my_key["term"])) + assert report == ReportDictField.objects.get(pk__name=my_key["name"]) + assert report == ReportDictField.objects.get(pk__term=my_key["term"]) def test_string_indexes(self): class MyDoc(Document): @@ -954,8 +948,8 @@ class TestIndexes(unittest.TestCase): info = MyDoc.objects._collection.index_information() info = [value["key"] for key, value in iteritems(info)] - self.assertIn([("provider_ids.foo", 1)], info) - self.assertIn([("provider_ids.bar", 1)], info) + assert [("provider_ids.foo", 1)] in info + assert [("provider_ids.bar", 1)] in info def test_sparse_compound_indexes(self): class MyDoc(Document): @@ -967,11 +961,10 @@ class TestIndexes(unittest.TestCase): } info = MyDoc.objects._collection.index_information() - self.assertEqual( - [("provider_ids.foo", 1), ("provider_ids.bar", 1)], - info["provider_ids.foo_1_provider_ids.bar_1"]["key"], - ) - self.assertTrue(info["provider_ids.foo_1_provider_ids.bar_1"]["sparse"]) + assert [("provider_ids.foo", 1), ("provider_ids.bar", 1)] == info[ + "provider_ids.foo_1_provider_ids.bar_1" + ]["key"] + assert info["provider_ids.foo_1_provider_ids.bar_1"]["sparse"] def test_text_indexes(self): class Book(Document): @@ -979,9 +972,9 @@ class TestIndexes(unittest.TestCase): meta = {"indexes": ["$title"]} indexes = Book.objects._collection.index_information() - self.assertIn("title_text", indexes) + assert "title_text" in indexes key = indexes["title_text"]["key"] - self.assertIn(("_fts", "text"), key) + assert ("_fts", "text") in key def test_hashed_indexes(self): class Book(Document): @@ -989,8 +982,8 @@ class TestIndexes(unittest.TestCase): meta = {"indexes": ["#ref_id"]} indexes = Book.objects._collection.index_information() - self.assertIn("ref_id_hashed", indexes) - self.assertIn(("ref_id", "hashed"), indexes["ref_id_hashed"]["key"]) + assert "ref_id_hashed" in indexes + assert ("ref_id", "hashed") in indexes["ref_id_hashed"]["key"] def test_indexes_after_database_drop(self): """ @@ -1027,7 +1020,8 @@ class TestIndexes(unittest.TestCase): # Create Post #2 post2 = BlogPost(title="test2", slug="test") - self.assertRaises(NotUniqueError, post2.save) + with pytest.raises(NotUniqueError): + post2.save() finally: # Drop the temporary database at the end connection.drop_database("tempdatabase") @@ -1074,15 +1068,12 @@ class TestIndexes(unittest.TestCase): "dropDups" ] # drop the index dropDups - it is deprecated in MongoDB 3+ - self.assertEqual( - index_info, - { - "txt_1": {"key": [("txt", 1)], "background": False}, - "_id_": {"key": [("_id", 1)]}, - "txt2_1": {"key": [("txt2", 1)], "background": False}, - "_cls_1": {"key": [("_cls", 1)], "background": False}, - }, - ) + assert index_info == { + "txt_1": {"key": [("txt", 1)], "background": False}, + "_id_": {"key": [("_id", 1)]}, + "txt2_1": {"key": [("txt2", 1)], "background": False}, + "_cls_1": {"key": [("_cls", 1)], "background": False}, + } def test_compound_index_underscore_cls_not_overwritten(self): """ @@ -1105,7 +1096,7 @@ class TestIndexes(unittest.TestCase): TestDoc.ensure_indexes() index_info = TestDoc._get_collection().index_information() - self.assertIn("shard_1_1__cls_1_txt_1_1", index_info) + assert "shard_1_1__cls_1_txt_1_1" in index_info if __name__ == "__main__": diff --git a/tests/document/test_inheritance.py b/tests/document/test_inheritance.py index 4bb46e58..6a913b3e 100644 --- a/tests/document/test_inheritance.py +++ b/tests/document/test_inheritance.py @@ -17,6 +17,7 @@ from mongoengine import ( from mongoengine.pymongo_support import list_collection_names from tests.fixtures import Base from tests.utils import MongoDBTestCase +import pytest class TestInheritance(MongoDBTestCase): @@ -37,12 +38,12 @@ class TestInheritance(MongoDBTestCase): meta = {"allow_inheritance": True} test_doc = DataDoc(name="test", embed=EmbedData(data="data")) - self.assertEqual(test_doc._cls, "DataDoc") - self.assertEqual(test_doc.embed._cls, "EmbedData") + assert test_doc._cls == "DataDoc" + assert test_doc.embed._cls == "EmbedData" test_doc.save() saved_doc = DataDoc.objects.with_id(test_doc.id) - self.assertEqual(test_doc._cls, saved_doc._cls) - self.assertEqual(test_doc.embed._cls, saved_doc.embed._cls) + assert test_doc._cls == saved_doc._cls + assert test_doc.embed._cls == saved_doc.embed._cls test_doc.delete() def test_superclasses(self): @@ -67,12 +68,12 @@ class TestInheritance(MongoDBTestCase): class Human(Mammal): pass - self.assertEqual(Animal._superclasses, ()) - self.assertEqual(Fish._superclasses, ("Animal",)) - self.assertEqual(Guppy._superclasses, ("Animal", "Animal.Fish")) - self.assertEqual(Mammal._superclasses, ("Animal",)) - self.assertEqual(Dog._superclasses, ("Animal", "Animal.Mammal")) - self.assertEqual(Human._superclasses, ("Animal", "Animal.Mammal")) + assert Animal._superclasses == () + assert Fish._superclasses == ("Animal",) + assert Guppy._superclasses == ("Animal", "Animal.Fish") + assert Mammal._superclasses == ("Animal",) + assert Dog._superclasses == ("Animal", "Animal.Mammal") + assert Human._superclasses == ("Animal", "Animal.Mammal") def test_external_superclasses(self): """Ensure that the correct list of super classes is assembled when @@ -97,18 +98,12 @@ class TestInheritance(MongoDBTestCase): class Human(Mammal): pass - self.assertEqual(Animal._superclasses, ("Base",)) - self.assertEqual(Fish._superclasses, ("Base", "Base.Animal")) - self.assertEqual( - Guppy._superclasses, ("Base", "Base.Animal", "Base.Animal.Fish") - ) - self.assertEqual(Mammal._superclasses, ("Base", "Base.Animal")) - self.assertEqual( - Dog._superclasses, ("Base", "Base.Animal", "Base.Animal.Mammal") - ) - self.assertEqual( - Human._superclasses, ("Base", "Base.Animal", "Base.Animal.Mammal") - ) + assert Animal._superclasses == ("Base",) + assert Fish._superclasses == ("Base", "Base.Animal") + assert Guppy._superclasses == ("Base", "Base.Animal", "Base.Animal.Fish") + assert Mammal._superclasses == ("Base", "Base.Animal") + assert Dog._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal") + assert Human._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal") def test_subclasses(self): """Ensure that the correct list of _subclasses (subclasses) is @@ -133,24 +128,22 @@ class TestInheritance(MongoDBTestCase): class Human(Mammal): pass - self.assertEqual( - Animal._subclasses, - ( - "Animal", - "Animal.Fish", - "Animal.Fish.Guppy", - "Animal.Mammal", - "Animal.Mammal.Dog", - "Animal.Mammal.Human", - ), + assert Animal._subclasses == ( + "Animal", + "Animal.Fish", + "Animal.Fish.Guppy", + "Animal.Mammal", + "Animal.Mammal.Dog", + "Animal.Mammal.Human", ) - self.assertEqual(Fish._subclasses, ("Animal.Fish", "Animal.Fish.Guppy")) - self.assertEqual(Guppy._subclasses, ("Animal.Fish.Guppy",)) - self.assertEqual( - Mammal._subclasses, - ("Animal.Mammal", "Animal.Mammal.Dog", "Animal.Mammal.Human"), + assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Guppy") + assert Guppy._subclasses == ("Animal.Fish.Guppy",) + assert Mammal._subclasses == ( + "Animal.Mammal", + "Animal.Mammal.Dog", + "Animal.Mammal.Human", ) - self.assertEqual(Human._subclasses, ("Animal.Mammal.Human",)) + assert Human._subclasses == ("Animal.Mammal.Human",) def test_external_subclasses(self): """Ensure that the correct list of _subclasses (subclasses) is @@ -175,30 +168,22 @@ class TestInheritance(MongoDBTestCase): class Human(Mammal): pass - self.assertEqual( - Animal._subclasses, - ( - "Base.Animal", - "Base.Animal.Fish", - "Base.Animal.Fish.Guppy", - "Base.Animal.Mammal", - "Base.Animal.Mammal.Dog", - "Base.Animal.Mammal.Human", - ), + assert Animal._subclasses == ( + "Base.Animal", + "Base.Animal.Fish", + "Base.Animal.Fish.Guppy", + "Base.Animal.Mammal", + "Base.Animal.Mammal.Dog", + "Base.Animal.Mammal.Human", ) - self.assertEqual( - Fish._subclasses, ("Base.Animal.Fish", "Base.Animal.Fish.Guppy") + assert Fish._subclasses == ("Base.Animal.Fish", "Base.Animal.Fish.Guppy") + assert Guppy._subclasses == ("Base.Animal.Fish.Guppy",) + assert Mammal._subclasses == ( + "Base.Animal.Mammal", + "Base.Animal.Mammal.Dog", + "Base.Animal.Mammal.Human", ) - self.assertEqual(Guppy._subclasses, ("Base.Animal.Fish.Guppy",)) - self.assertEqual( - Mammal._subclasses, - ( - "Base.Animal.Mammal", - "Base.Animal.Mammal.Dog", - "Base.Animal.Mammal.Human", - ), - ) - self.assertEqual(Human._subclasses, ("Base.Animal.Mammal.Human",)) + assert Human._subclasses == ("Base.Animal.Mammal.Human",) def test_dynamic_declarations(self): """Test that declaring an extra class updates meta data""" @@ -206,33 +191,31 @@ class TestInheritance(MongoDBTestCase): class Animal(Document): meta = {"allow_inheritance": True} - self.assertEqual(Animal._superclasses, ()) - self.assertEqual(Animal._subclasses, ("Animal",)) + assert Animal._superclasses == () + assert Animal._subclasses == ("Animal",) # Test dynamically adding a class changes the meta data class Fish(Animal): pass - self.assertEqual(Animal._superclasses, ()) - self.assertEqual(Animal._subclasses, ("Animal", "Animal.Fish")) + assert Animal._superclasses == () + assert Animal._subclasses == ("Animal", "Animal.Fish") - self.assertEqual(Fish._superclasses, ("Animal",)) - self.assertEqual(Fish._subclasses, ("Animal.Fish",)) + assert Fish._superclasses == ("Animal",) + assert Fish._subclasses == ("Animal.Fish",) # Test dynamically adding an inherited class changes the meta data class Pike(Fish): pass - self.assertEqual(Animal._superclasses, ()) - self.assertEqual( - Animal._subclasses, ("Animal", "Animal.Fish", "Animal.Fish.Pike") - ) + assert Animal._superclasses == () + assert Animal._subclasses == ("Animal", "Animal.Fish", "Animal.Fish.Pike") - self.assertEqual(Fish._superclasses, ("Animal",)) - self.assertEqual(Fish._subclasses, ("Animal.Fish", "Animal.Fish.Pike")) + assert Fish._superclasses == ("Animal",) + assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Pike") - self.assertEqual(Pike._superclasses, ("Animal", "Animal.Fish")) - self.assertEqual(Pike._subclasses, ("Animal.Fish.Pike",)) + assert Pike._superclasses == ("Animal", "Animal.Fish") + assert Pike._subclasses == ("Animal.Fish.Pike",) def test_inheritance_meta_data(self): """Ensure that document may inherit fields from a superclass document. @@ -247,10 +230,10 @@ class TestInheritance(MongoDBTestCase): class Employee(Person): salary = IntField() - self.assertEqual( - ["_cls", "age", "id", "name", "salary"], sorted(Employee._fields.keys()) + assert ["_cls", "age", "id", "name", "salary"] == sorted( + Employee._fields.keys() ) - self.assertEqual(Employee._get_collection_name(), Person._get_collection_name()) + assert Employee._get_collection_name() == Person._get_collection_name() def test_inheritance_to_mongo_keys(self): """Ensure that document may inherit fields from a superclass document. @@ -265,17 +248,17 @@ class TestInheritance(MongoDBTestCase): class Employee(Person): salary = IntField() - self.assertEqual( - ["_cls", "age", "id", "name", "salary"], sorted(Employee._fields.keys()) + assert ["_cls", "age", "id", "name", "salary"] == sorted( + Employee._fields.keys() ) - self.assertEqual( - Person(name="Bob", age=35).to_mongo().keys(), ["_cls", "name", "age"] - ) - self.assertEqual( - Employee(name="Bob", age=35, salary=0).to_mongo().keys(), - ["_cls", "name", "age", "salary"], - ) - self.assertEqual(Employee._get_collection_name(), Person._get_collection_name()) + assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"] + assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [ + "_cls", + "name", + "age", + "salary", + ] + assert Employee._get_collection_name() == Person._get_collection_name() def test_indexes_and_multiple_inheritance(self): """ Ensure that all of the indexes are created for a document with @@ -301,13 +284,10 @@ class TestInheritance(MongoDBTestCase): C.ensure_indexes() - self.assertEqual( - sorted( - [idx["key"] for idx in C._get_collection().index_information().values()] - ), - sorted( - [[(u"_cls", 1), (u"b", 1)], [(u"_id", 1)], [(u"_cls", 1), (u"a", 1)]] - ), + assert sorted( + [idx["key"] for idx in C._get_collection().index_information().values()] + ) == sorted( + [[(u"_cls", 1), (u"b", 1)], [(u"_id", 1)], [(u"_cls", 1), (u"a", 1)]] ) def test_polymorphic_queries(self): @@ -338,13 +318,13 @@ class TestInheritance(MongoDBTestCase): Human().save() classes = [obj.__class__ for obj in Animal.objects] - self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human]) + assert classes == [Animal, Fish, Mammal, Dog, Human] classes = [obj.__class__ for obj in Mammal.objects] - self.assertEqual(classes, [Mammal, Dog, Human]) + assert classes == [Mammal, Dog, Human] classes = [obj.__class__ for obj in Human.objects] - self.assertEqual(classes, [Human]) + assert classes == [Human] def test_allow_inheritance(self): """Ensure that inheritance is disabled by default on simple @@ -355,20 +335,20 @@ class TestInheritance(MongoDBTestCase): name = StringField() # can't inherit because Animal didn't explicitly allow inheritance - with self.assertRaises(ValueError) as cm: + with pytest.raises(ValueError) as cm: class Dog(Animal): pass - self.assertIn("Document Animal may not be subclassed", str(cm.exception)) + assert "Document Animal may not be subclassed" in str(cm.exception) # Check that _cls etc aren't present on simple documents dog = Animal(name="dog").save() - self.assertEqual(dog.to_mongo().keys(), ["_id", "name"]) + assert dog.to_mongo().keys() == ["_id", "name"] collection = self.db[Animal._get_collection_name()] obj = collection.find_one() - self.assertNotIn("_cls", obj) + assert "_cls" not in obj def test_cant_turn_off_inheritance_on_subclass(self): """Ensure if inheritance is on in a subclass you cant turn it off. @@ -378,14 +358,14 @@ class TestInheritance(MongoDBTestCase): name = StringField() meta = {"allow_inheritance": True} - with self.assertRaises(ValueError) as cm: + with pytest.raises(ValueError) as cm: class Mammal(Animal): meta = {"allow_inheritance": False} - self.assertEqual( - str(cm.exception), - 'Only direct subclasses of Document may set "allow_inheritance" to False', + assert ( + str(cm.exception) + == 'Only direct subclasses of Document may set "allow_inheritance" to False' ) def test_allow_inheritance_abstract_document(self): @@ -399,14 +379,14 @@ class TestInheritance(MongoDBTestCase): class Animal(FinalDocument): name = StringField() - with self.assertRaises(ValueError): + with pytest.raises(ValueError): class Mammal(Animal): pass # Check that _cls isn't present in simple documents doc = Animal(name="dog") - self.assertNotIn("_cls", doc.to_mongo()) + assert "_cls" not in doc.to_mongo() def test_using_abstract_class_in_reference_field(self): # Ensures no regression of #1920 @@ -452,10 +432,10 @@ class TestInheritance(MongoDBTestCase): name = StringField() berlin = EuropeanCity(name="Berlin", continent="Europe") - self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._fields_ordered), 3) - self.assertEqual(berlin._fields_ordered[0], "id") + assert len(berlin._db_field_map) == len(berlin._fields_ordered) + assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) + assert len(berlin._fields_ordered) == 3 + assert berlin._fields_ordered[0] == "id" def test_auto_id_not_set_if_specific_in_parent_class(self): class City(Document): @@ -467,10 +447,10 @@ class TestInheritance(MongoDBTestCase): name = StringField() berlin = EuropeanCity(name="Berlin", continent="Europe") - self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._fields_ordered), 3) - self.assertEqual(berlin._fields_ordered[0], "city_id") + assert len(berlin._db_field_map) == len(berlin._fields_ordered) + assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) + assert len(berlin._fields_ordered) == 3 + assert berlin._fields_ordered[0] == "city_id" def test_auto_id_vs_non_pk_id_field(self): class City(Document): @@ -482,12 +462,12 @@ class TestInheritance(MongoDBTestCase): name = StringField() berlin = EuropeanCity(name="Berlin", continent="Europe") - self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._fields_ordered), 4) - self.assertEqual(berlin._fields_ordered[0], "auto_id_0") + assert len(berlin._db_field_map) == len(berlin._fields_ordered) + assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) + assert len(berlin._fields_ordered) == 4 + assert berlin._fields_ordered[0] == "auto_id_0" berlin.save() - self.assertEqual(berlin.pk, berlin.auto_id_0) + assert berlin.pk == berlin.auto_id_0 def test_abstract_document_creation_does_not_fail(self): class City(Document): @@ -495,9 +475,9 @@ class TestInheritance(MongoDBTestCase): meta = {"abstract": True, "allow_inheritance": False} city = City(continent="asia") - self.assertEqual(None, city.pk) + assert None == city.pk # TODO: expected error? Shouldn't we create a new error type? - with self.assertRaises(KeyError): + with pytest.raises(KeyError): setattr(city, "pk", 1) def test_allow_inheritance_embedded_document(self): @@ -506,20 +486,20 @@ class TestInheritance(MongoDBTestCase): class Comment(EmbeddedDocument): content = StringField() - with self.assertRaises(ValueError): + with pytest.raises(ValueError): class SpecialComment(Comment): pass doc = Comment(content="test") - self.assertNotIn("_cls", doc.to_mongo()) + assert "_cls" not in doc.to_mongo() class Comment(EmbeddedDocument): content = StringField() meta = {"allow_inheritance": True} doc = Comment(content="test") - self.assertIn("_cls", doc.to_mongo()) + assert "_cls" in doc.to_mongo() def test_document_inheritance(self): """Ensure mutliple inheritance of abstract documents @@ -537,7 +517,7 @@ class TestInheritance(MongoDBTestCase): pass except Exception: - self.assertTrue(False, "Couldn't create MyDocument class") + assert False, "Couldn't create MyDocument class" def test_abstract_documents(self): """Ensure that a document superclass can be marked as abstract @@ -574,20 +554,20 @@ class TestInheritance(MongoDBTestCase): for k, v in iteritems(defaults): for cls in [Animal, Fish, Guppy]: - self.assertEqual(cls._meta[k], v) + assert cls._meta[k] == v - self.assertNotIn("collection", Animal._meta) - self.assertNotIn("collection", Mammal._meta) + assert "collection" not in Animal._meta + assert "collection" not in Mammal._meta - self.assertEqual(Animal._get_collection_name(), None) - self.assertEqual(Mammal._get_collection_name(), None) + assert Animal._get_collection_name() == None + assert Mammal._get_collection_name() == None - self.assertEqual(Fish._get_collection_name(), "fish") - self.assertEqual(Guppy._get_collection_name(), "fish") - self.assertEqual(Human._get_collection_name(), "human") + assert Fish._get_collection_name() == "fish" + assert Guppy._get_collection_name() == "fish" + assert Human._get_collection_name() == "human" # ensure that a subclass of a non-abstract class can't be abstract - with self.assertRaises(ValueError): + with pytest.raises(ValueError): class EvilHuman(Human): evil = BooleanField(default=True) @@ -601,7 +581,7 @@ class TestInheritance(MongoDBTestCase): class B(A): pass - self.assertFalse(B._meta["abstract"]) + assert not B._meta["abstract"] def test_inherited_collections(self): """Ensure that subclassed documents don't override parents' @@ -647,8 +627,8 @@ class TestInheritance(MongoDBTestCase): real_person = Drinker(drink=beer) real_person.save() - self.assertEqual(Drinker.objects[0].drink.name, red_bull.name) - self.assertEqual(Drinker.objects[1].drink.name, beer.name) + assert Drinker.objects[0].drink.name == red_bull.name + assert Drinker.objects[1].drink.name == beer.name if __name__ == "__main__": diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 203e2cce..01dc492b 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -36,6 +36,7 @@ from tests.fixtures import ( PickleTest, ) from tests.utils import MongoDBTestCase, get_as_pymongo +import pytest TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "../fields/mongoengine.png") @@ -63,18 +64,17 @@ class TestInstance(MongoDBTestCase): self.db.drop_collection(collection) def assertDbEqual(self, docs): - self.assertEqual( - list(self.Person._get_collection().find().sort("id")), - sorted(docs, key=lambda doc: doc["_id"]), + assert list(self.Person._get_collection().find().sort("id")) == sorted( + docs, key=lambda doc: doc["_id"] ) def assertHasInstance(self, field, instance): - self.assertTrue(hasattr(field, "_instance")) - self.assertTrue(field._instance is not None) + assert hasattr(field, "_instance") + assert field._instance is not None if isinstance(field._instance, weakref.ProxyType): - self.assertTrue(field._instance.__eq__(instance)) + assert field._instance.__eq__(instance) else: - self.assertEqual(field._instance, instance) + assert field._instance == instance def test_capped_collection(self): """Ensure that capped collections work properly.""" @@ -89,16 +89,16 @@ class TestInstance(MongoDBTestCase): for _ in range(10): Log().save() - self.assertEqual(Log.objects.count(), 10) + assert Log.objects.count() == 10 # Check that extra documents don't increase the size Log().save() - self.assertEqual(Log.objects.count(), 10) + assert Log.objects.count() == 10 options = Log.objects._collection.options() - self.assertEqual(options["capped"], True) - self.assertEqual(options["max"], 10) - self.assertEqual(options["size"], 4096) + assert options["capped"] == True + assert options["max"] == 10 + assert options["size"] == 4096 # Check that the document cannot be redefined with different options class Log(Document): @@ -106,7 +106,7 @@ class TestInstance(MongoDBTestCase): meta = {"max_documents": 11} # Accessing Document.objects creates the collection - with self.assertRaises(InvalidCollectionError): + with pytest.raises(InvalidCollectionError): Log.objects def test_capped_collection_default(self): @@ -122,9 +122,9 @@ class TestInstance(MongoDBTestCase): Log().save() options = Log.objects._collection.options() - self.assertEqual(options["capped"], True) - self.assertEqual(options["max"], 10) - self.assertEqual(options["size"], 10 * 2 ** 20) + assert options["capped"] == True + assert options["max"] == 10 + assert options["size"] == 10 * 2 ** 20 # Check that the document with default value can be recreated class Log(Document): @@ -150,8 +150,8 @@ class TestInstance(MongoDBTestCase): Log().save() options = Log.objects._collection.options() - self.assertEqual(options["capped"], True) - self.assertTrue(options["size"] >= 10000) + assert options["capped"] == True + assert options["size"] >= 10000 # Check that the document with odd max_size value can be recreated class Log(Document): @@ -173,7 +173,7 @@ class TestInstance(MongoDBTestCase): doc = Article(title=u"привет мир") - self.assertEqual("", repr(doc)) + assert "" == repr(doc) def test_repr_none(self): """Ensure None values are handled correctly.""" @@ -185,11 +185,11 @@ class TestInstance(MongoDBTestCase): return None doc = Article(title=u"привет мир") - self.assertEqual("", repr(doc)) + assert "" == repr(doc) def test_queryset_resurrects_dropped_collection(self): self.Person.drop_collection() - self.assertEqual([], list(self.Person.objects())) + assert [] == list(self.Person.objects()) # Ensure works correctly with inhertited classes class Actor(self.Person): @@ -197,7 +197,7 @@ class TestInstance(MongoDBTestCase): Actor.objects() self.Person.drop_collection() - self.assertEqual([], list(Actor.objects())) + assert [] == list(Actor.objects()) def test_polymorphic_references(self): """Ensure that the correct subclasses are returned from a query @@ -237,7 +237,7 @@ class TestInstance(MongoDBTestCase): zoo.reload() classes = [a.__class__ for a in Zoo.objects.first().animals] - self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human]) + assert classes == [Animal, Fish, Mammal, Dog, Human] Zoo.drop_collection() @@ -250,7 +250,7 @@ class TestInstance(MongoDBTestCase): zoo.reload() classes = [a.__class__ for a in Zoo.objects.first().animals] - self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human]) + assert classes == [Animal, Fish, Mammal, Dog, Human] def test_reference_inheritance(self): class Stats(Document): @@ -275,7 +275,7 @@ class TestInstance(MongoDBTestCase): cmp_stats = CompareStats(stats=list_stats) cmp_stats.save() - self.assertEqual(list_stats, CompareStats.objects.first().stats) + assert list_stats == CompareStats.objects.first().stats def test_db_field_load(self): """Ensure we load data correctly from the right db field.""" @@ -294,8 +294,8 @@ class TestInstance(MongoDBTestCase): Person(name="Fred").save() - self.assertEqual(Person.objects.get(name="Jack").rank, "Corporal") - self.assertEqual(Person.objects.get(name="Fred").rank, "Private") + assert Person.objects.get(name="Jack").rank == "Corporal" + assert Person.objects.get(name="Fred").rank == "Private" def test_db_embedded_doc_field_load(self): """Ensure we load embedded document data correctly.""" @@ -318,8 +318,8 @@ class TestInstance(MongoDBTestCase): Person(name="Jack", rank_=Rank(title="Corporal")).save() Person(name="Fred").save() - self.assertEqual(Person.objects.get(name="Jack").rank, "Corporal") - self.assertEqual(Person.objects.get(name="Fred").rank, "Private") + assert Person.objects.get(name="Jack").rank == "Corporal" + assert Person.objects.get(name="Fred").rank == "Private" def test_custom_id_field(self): """Ensure that documents may be created with custom primary keys.""" @@ -332,15 +332,15 @@ class TestInstance(MongoDBTestCase): User.drop_collection() - self.assertEqual(User._fields["username"].db_field, "_id") - self.assertEqual(User._meta["id_field"], "username") + assert User._fields["username"].db_field == "_id" + assert User._meta["id_field"] == "username" User.objects.create(username="test", name="test user") user = User.objects.first() - self.assertEqual(user.id, "test") - self.assertEqual(user.pk, "test") + assert user.id == "test" + assert user.pk == "test" user_dict = User.objects._collection.find_one() - self.assertEqual(user_dict["_id"], "test") + assert user_dict["_id"] == "test" def test_change_custom_id_field_in_subclass(self): """Subclasses cannot override which field is the primary key.""" @@ -350,13 +350,13 @@ class TestInstance(MongoDBTestCase): name = StringField() meta = {"allow_inheritance": True} - with self.assertRaises(ValueError) as e: + with pytest.raises(ValueError) as e: class EmailUser(User): email = StringField(primary_key=True) exc = e.exception - self.assertEqual(str(exc), "Cannot override primary key field") + assert str(exc) == "Cannot override primary key field" def test_custom_id_field_is_required(self): """Ensure the custom primary key field is required.""" @@ -365,10 +365,10 @@ class TestInstance(MongoDBTestCase): username = StringField(primary_key=True) name = StringField() - with self.assertRaises(ValidationError) as e: + with pytest.raises(ValidationError) as e: User(name="test").save() exc = e.exception - self.assertTrue("Field is required: ['username']" in str(exc)) + assert "Field is required: ['username']" in str(exc) def test_document_not_registered(self): class Place(Document): @@ -388,7 +388,7 @@ class TestInstance(MongoDBTestCase): # and the NicePlace model not being imported in at query time. del _document_registry["Place.NicePlace"] - with self.assertRaises(NotRegistered): + with pytest.raises(NotRegistered): list(Place.objects.all()) def test_document_registry_regressions(self): @@ -401,26 +401,27 @@ class TestInstance(MongoDBTestCase): Location.drop_collection() - self.assertEqual(Area, get_document("Area")) - self.assertEqual(Area, get_document("Location.Area")) + assert Area == get_document("Area") + assert Area == get_document("Location.Area") def test_creation(self): """Ensure that document may be created using keyword arguments.""" person = self.Person(name="Test User", age=30) - self.assertEqual(person.name, "Test User") - self.assertEqual(person.age, 30) + assert person.name == "Test User" + assert person.age == 30 def test_to_dbref(self): """Ensure that you can get a dbref of a document.""" person = self.Person(name="Test User", age=30) - self.assertRaises(OperationError, person.to_dbref) + with pytest.raises(OperationError): + person.to_dbref() person.save() person.to_dbref() def test_key_like_attribute_access(self): person = self.Person(age=30) - self.assertEqual(person["age"], 30) - with self.assertRaises(KeyError): + assert person["age"] == 30 + with pytest.raises(KeyError): person["unknown_attr"] def test_save_abstract_document(self): @@ -430,7 +431,7 @@ class TestInstance(MongoDBTestCase): name = StringField() meta = {"abstract": True} - with self.assertRaises(InvalidDocumentError): + with pytest.raises(InvalidDocumentError): Doc(name="aaa").save() def test_reload(self): @@ -443,20 +444,20 @@ class TestInstance(MongoDBTestCase): person_obj.age = 21 person_obj.save() - self.assertEqual(person.name, "Test User") - self.assertEqual(person.age, 20) + assert person.name == "Test User" + assert person.age == 20 person.reload("age") - self.assertEqual(person.name, "Test User") - self.assertEqual(person.age, 21) + assert person.name == "Test User" + assert person.age == 21 person.reload() - self.assertEqual(person.name, "Mr Test User") - self.assertEqual(person.age, 21) + assert person.name == "Mr Test User" + assert person.age == 21 person.reload() - self.assertEqual(person.name, "Mr Test User") - self.assertEqual(person.age, 21) + assert person.name == "Mr Test User" + assert person.age == 21 def test_reload_sharded(self): class Animal(Document): @@ -471,9 +472,10 @@ class TestInstance(MongoDBTestCase): with query_counter() as q: doc.reload() query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0] - self.assertEqual( - set(query_op[CMD_QUERY_KEY]["filter"].keys()), {"_id", "superphylum"} - ) + assert set(query_op[CMD_QUERY_KEY]["filter"].keys()) == { + "_id", + "superphylum", + } def test_reload_sharded_with_db_field(self): class Person(Document): @@ -488,9 +490,7 @@ class TestInstance(MongoDBTestCase): with query_counter() as q: doc.reload() query_op = q.db.system.profile.find({"ns": "mongoenginetest.person"})[0] - self.assertEqual( - set(query_op[CMD_QUERY_KEY]["filter"].keys()), {"_id", "country"} - ) + assert set(query_op[CMD_QUERY_KEY]["filter"].keys()) == {"_id", "country"} def test_reload_sharded_nested(self): class SuperPhylum(EmbeddedDocument): @@ -526,15 +526,11 @@ class TestInstance(MongoDBTestCase): doc.name = "Cat" doc.save() query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0] - self.assertEqual(query_op["op"], "update") + assert query_op["op"] == "update" if mongo_db <= MONGODB_34: - self.assertEqual( - set(query_op["query"].keys()), set(["_id", "is_mammal"]) - ) + assert set(query_op["query"].keys()) == set(["_id", "is_mammal"]) else: - self.assertEqual( - set(query_op["command"]["q"].keys()), set(["_id", "is_mammal"]) - ) + assert set(query_op["command"]["q"].keys()) == set(["_id", "is_mammal"]) Animal.drop_collection() @@ -551,12 +547,12 @@ class TestInstance(MongoDBTestCase): user.name = "John" user.number = 2 - self.assertEqual(user._get_changed_fields(), ["name", "number"]) + assert user._get_changed_fields() == ["name", "number"] user.reload("number") - self.assertEqual(user._get_changed_fields(), ["name"]) + assert user._get_changed_fields() == ["name"] user.save() user.reload() - self.assertEqual(user.name, "John") + assert user.name == "John" def test_reload_referencing(self): """Ensures reloading updates weakrefs correctly.""" @@ -587,47 +583,44 @@ class TestInstance(MongoDBTestCase): doc.embedded_field.list_field.append(1) doc.embedded_field.dict_field["woot"] = "woot" - self.assertEqual( - doc._get_changed_fields(), - [ - "list_field", - "dict_field.woot", - "embedded_field.list_field", - "embedded_field.dict_field.woot", - ], - ) + assert doc._get_changed_fields() == [ + "list_field", + "dict_field.woot", + "embedded_field.list_field", + "embedded_field.dict_field.woot", + ] doc.save() - self.assertEqual(len(doc.list_field), 4) + assert len(doc.list_field) == 4 doc = doc.reload(10) - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(len(doc.list_field), 4) - self.assertEqual(len(doc.dict_field), 2) - self.assertEqual(len(doc.embedded_field.list_field), 4) - self.assertEqual(len(doc.embedded_field.dict_field), 2) + assert doc._get_changed_fields() == [] + assert len(doc.list_field) == 4 + assert len(doc.dict_field) == 2 + assert len(doc.embedded_field.list_field) == 4 + assert len(doc.embedded_field.dict_field) == 2 doc.list_field.append(1) doc.save() doc.dict_field["extra"] = 1 doc = doc.reload(10, "list_field") - self.assertEqual(doc._get_changed_fields(), ["dict_field.extra"]) - self.assertEqual(len(doc.list_field), 5) - self.assertEqual(len(doc.dict_field), 3) - self.assertEqual(len(doc.embedded_field.list_field), 4) - self.assertEqual(len(doc.embedded_field.dict_field), 2) + assert doc._get_changed_fields() == ["dict_field.extra"] + assert len(doc.list_field) == 5 + assert len(doc.dict_field) == 3 + assert len(doc.embedded_field.list_field) == 4 + assert len(doc.embedded_field.dict_field) == 2 def test_reload_doesnt_exist(self): class Foo(Document): pass f = Foo() - with self.assertRaises(Foo.DoesNotExist): + with pytest.raises(Foo.DoesNotExist): f.reload() f.save() f.delete() - with self.assertRaises(Foo.DoesNotExist): + with pytest.raises(Foo.DoesNotExist): f.reload() def test_reload_of_non_strict_with_special_field_name(self): @@ -646,27 +639,29 @@ class TestInstance(MongoDBTestCase): post = Post.objects.first() post.reload() - self.assertEqual(post.title, "Items eclipse") - self.assertEqual(post.items, ["more lorem", "even more ipsum"]) + assert post.title == "Items eclipse" + assert post.items == ["more lorem", "even more ipsum"] def test_dictionary_access(self): """Ensure that dictionary-style field access works properly.""" person = self.Person(name="Test User", age=30, job=self.Job()) - self.assertEqual(person["name"], "Test User") + assert person["name"] == "Test User" - self.assertRaises(KeyError, person.__getitem__, "salary") - self.assertRaises(KeyError, person.__setitem__, "salary", 50) + with pytest.raises(KeyError): + person.__getitem__("salary") + with pytest.raises(KeyError): + person.__setitem__("salary", 50) person["name"] = "Another User" - self.assertEqual(person["name"], "Another User") + assert person["name"] == "Another User" # Length = length(assigned fields + id) - self.assertEqual(len(person), 5) + assert len(person) == 5 - self.assertIn("age", person) + assert "age" in person person.age = None - self.assertNotIn("age", person) - self.assertNotIn("nationality", person) + assert "age" not in person + assert "nationality" not in person def test_embedded_document_to_mongo(self): class Person(EmbeddedDocument): @@ -678,20 +673,20 @@ class TestInstance(MongoDBTestCase): class Employee(Person): salary = IntField() - self.assertEqual( - Person(name="Bob", age=35).to_mongo().keys(), ["_cls", "name", "age"] - ) - self.assertEqual( - Employee(name="Bob", age=35, salary=0).to_mongo().keys(), - ["_cls", "name", "age", "salary"], - ) + assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"] + assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [ + "_cls", + "name", + "age", + "salary", + ] def test_embedded_document_to_mongo_id(self): class SubDoc(EmbeddedDocument): id = StringField(required=True) sub_doc = SubDoc(id="abc") - self.assertEqual(sub_doc.to_mongo().keys(), ["id"]) + assert sub_doc.to_mongo().keys() == ["id"] def test_embedded_document(self): """Ensure that embedded documents are set up correctly.""" @@ -699,8 +694,8 @@ class TestInstance(MongoDBTestCase): class Comment(EmbeddedDocument): content = StringField() - self.assertIn("content", Comment._fields) - self.assertNotIn("id", Comment._fields) + assert "content" in Comment._fields + assert "id" not in Comment._fields def test_embedded_document_instance(self): """Ensure that embedded documents can reference parent instance.""" @@ -753,7 +748,7 @@ class TestInstance(MongoDBTestCase): .to_mongo(use_db_field=False) .to_dict() ) - self.assertEqual(d["embedded_field"], [{"string": "Hi"}]) + assert d["embedded_field"] == [{"string": "Hi"}] def test_instance_is_set_on_setattr(self): class Email(EmbeddedDocument): @@ -796,7 +791,7 @@ class TestInstance(MongoDBTestCase): def clean(self): raise CustomError() - with self.assertRaises(CustomError): + with pytest.raises(CustomError): TestDocument().save() TestDocument().save(clean=False) @@ -816,10 +811,10 @@ class TestInstance(MongoDBTestCase): BlogPost.drop_collection() post = BlogPost(content="unchecked").save() - self.assertEqual(post.content, "checked") + assert post.content == "checked" # Make sure pre_save_post_validation changes makes it to the db raw_doc = get_as_pymongo(post) - self.assertEqual(raw_doc, {"content": "checked", "_id": post.id}) + assert raw_doc == {"content": "checked", "_id": post.id} # Important to disconnect as it could cause some assertions in test_signals # to fail (due to the garbage collection timing of this signal) @@ -840,17 +835,17 @@ class TestInstance(MongoDBTestCase): # Ensure clean=False prevent call to clean t = TestDocument(status="published") t.save(clean=False) - self.assertEqual(t.status, "published") - self.assertEqual(t.cleaned, False) + assert t.status == "published" + assert t.cleaned == False t = TestDocument(status="published") - self.assertEqual(t.cleaned, False) + assert t.cleaned == False t.save(clean=True) - self.assertEqual(t.status, "published") - self.assertEqual(t.cleaned, True) + assert t.status == "published" + assert t.cleaned == True raw_doc = get_as_pymongo(t) # Make sure clean changes makes it to the db - self.assertEqual(raw_doc, {"status": "published", "cleaned": True, "_id": t.id}) + assert raw_doc == {"status": "published", "cleaned": True, "_id": t.id} def test_document_embedded_clean(self): class TestEmbeddedDocument(EmbeddedDocument): @@ -875,15 +870,15 @@ class TestInstance(MongoDBTestCase): t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25, z=15)) - with self.assertRaises(ValidationError) as cm: + with pytest.raises(ValidationError) as cm: t.save() expected_msg = "Value of z != x + y" - self.assertIn(expected_msg, cm.exception.message) - self.assertEqual(cm.exception.to_dict(), {"doc": {"__all__": expected_msg}}) + assert expected_msg in cm.exception.message + assert cm.exception.to_dict() == {"doc": {"__all__": expected_msg}} t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save() - self.assertEqual(t.doc.z, 35) + assert t.doc.z == 35 # Asserts not raises t = TestDocument(doc=TestEmbeddedDocument(x=15, y=35, z=5)) @@ -892,7 +887,7 @@ class TestInstance(MongoDBTestCase): def test_modify_empty(self): doc = self.Person(name="bob", age=10).save() - with self.assertRaises(InvalidDocumentError): + with pytest.raises(InvalidDocumentError): self.Person().modify(set__age=10) self.assertDbEqual([dict(doc.to_mongo())]) @@ -902,7 +897,7 @@ class TestInstance(MongoDBTestCase): doc2 = self.Person(name="jim", age=20).save() docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())] - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): doc1.modify({"id": doc2.id}, set__value=20) self.assertDbEqual(docs) @@ -913,7 +908,7 @@ class TestInstance(MongoDBTestCase): docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())] n_modified = doc1.modify({"name": doc2.name}, set__age=100) - self.assertEqual(n_modified, 0) + assert n_modified == 0 self.assertDbEqual(docs) @@ -923,7 +918,7 @@ class TestInstance(MongoDBTestCase): docs = [dict(doc1.to_mongo())] n_modified = doc2.modify({"name": doc2.name}, set__age=100) - self.assertEqual(n_modified, 0) + assert n_modified == 0 self.assertDbEqual(docs) @@ -943,13 +938,13 @@ class TestInstance(MongoDBTestCase): n_modified = doc.modify( set__age=21, set__job__name="MongoDB", unset__job__years=True ) - self.assertEqual(n_modified, 1) + assert n_modified == 1 doc_copy.age = 21 doc_copy.job.name = "MongoDB" del doc_copy.job.years - self.assertEqual(doc.to_json(), doc_copy.to_json()) - self.assertEqual(doc._get_changed_fields(), []) + assert doc.to_json() == doc_copy.to_json() + assert doc._get_changed_fields() == [] self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())]) @@ -965,27 +960,25 @@ class TestInstance(MongoDBTestCase): tags=["python"], content=Content(keywords=["ipsum"]) ) - self.assertEqual(post.tags, ["python"]) + assert post.tags == ["python"] post.modify(push__tags__0=["code", "mongo"]) - self.assertEqual(post.tags, ["code", "mongo", "python"]) + assert post.tags == ["code", "mongo", "python"] # Assert same order of the list items is maintained in the db - self.assertEqual( - BlogPost._get_collection().find_one({"_id": post.pk})["tags"], - ["code", "mongo", "python"], - ) + assert BlogPost._get_collection().find_one({"_id": post.pk})["tags"] == [ + "code", + "mongo", + "python", + ] - self.assertEqual(post.content.keywords, ["ipsum"]) + assert post.content.keywords == ["ipsum"] post.modify(push__content__keywords__0=["lorem"]) - self.assertEqual(post.content.keywords, ["lorem", "ipsum"]) + assert post.content.keywords == ["lorem", "ipsum"] # Assert same order of the list items is maintained in the db - self.assertEqual( - BlogPost._get_collection().find_one({"_id": post.pk})["content"][ - "keywords" - ], - ["lorem", "ipsum"], - ) + assert BlogPost._get_collection().find_one({"_id": post.pk})["content"][ + "keywords" + ] == ["lorem", "ipsum"] def test_save(self): """Ensure that a document may be saved in the database.""" @@ -996,28 +989,30 @@ class TestInstance(MongoDBTestCase): # Ensure that the object is in the database raw_doc = get_as_pymongo(person) - self.assertEqual( - raw_doc, - {"_cls": "Person", "name": "Test User", "age": 30, "_id": person.id}, - ) + assert raw_doc == { + "_cls": "Person", + "name": "Test User", + "age": 30, + "_id": person.id, + } def test_save_skip_validation(self): class Recipient(Document): email = EmailField(required=True) recipient = Recipient(email="not-an-email") - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): recipient.save() recipient.save(validate=False) raw_doc = get_as_pymongo(recipient) - self.assertEqual(raw_doc, {"email": "not-an-email", "_id": recipient.id}) + assert raw_doc == {"email": "not-an-email", "_id": recipient.id} def test_save_with_bad_id(self): class Clown(Document): id = IntField(primary_key=True) - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): Clown(id="not_an_int").save() def test_save_to_a_value_that_equates_to_false(self): @@ -1037,7 +1032,7 @@ class TestInstance(MongoDBTestCase): user.save() user.reload() - self.assertEqual(user.thing.count, 0) + assert user.thing.count == 0 def test_save_max_recursion_not_hit(self): class Person(Document): @@ -1085,7 +1080,7 @@ class TestInstance(MongoDBTestCase): b.name = "world" b.save() - self.assertEqual(b.picture, b.bar.picture, b.bar.bar.picture) + assert b.picture == b.bar.picture, b.bar.bar.picture def test_save_cascades(self): class Person(Document): @@ -1107,7 +1102,7 @@ class TestInstance(MongoDBTestCase): p.save(cascade=True) p1.reload() - self.assertEqual(p1.name, p.parent.name) + assert p1.name == p.parent.name def test_save_cascade_kwargs(self): class Person(Document): @@ -1127,7 +1122,7 @@ class TestInstance(MongoDBTestCase): p1.reload() p2.reload() - self.assertEqual(p1.name, p2.parent.name) + assert p1.name == p2.parent.name def test_save_cascade_meta_false(self): class Person(Document): @@ -1151,11 +1146,11 @@ class TestInstance(MongoDBTestCase): p.save() p1.reload() - self.assertNotEqual(p1.name, p.parent.name) + assert p1.name != p.parent.name p.save(cascade=True) p1.reload() - self.assertEqual(p1.name, p.parent.name) + assert p1.name == p.parent.name def test_save_cascade_meta_true(self): class Person(Document): @@ -1179,7 +1174,7 @@ class TestInstance(MongoDBTestCase): p.save() p1.reload() - self.assertNotEqual(p1.name, p.parent.name) + assert p1.name != p.parent.name def test_save_cascades_generically(self): class Person(Document): @@ -1200,11 +1195,11 @@ class TestInstance(MongoDBTestCase): p.save() p1.reload() - self.assertNotEqual(p1.name, p.parent.name) + assert p1.name != p.parent.name p.save(cascade=True) p1.reload() - self.assertEqual(p1.name, p.parent.name) + assert p1.name == p.parent.name def test_save_atomicity_condition(self): class Widget(Document): @@ -1226,64 +1221,61 @@ class TestInstance(MongoDBTestCase): # ignore save_condition on new record creation w1.save(save_condition={"save_id": UUID(42)}) w1.reload() - self.assertFalse(w1.toggle) - self.assertEqual(w1.save_id, UUID(1)) - self.assertEqual(w1.count, 0) + assert not w1.toggle + assert w1.save_id == UUID(1) + assert w1.count == 0 # mismatch in save_condition prevents save and raise exception flip(w1) - self.assertTrue(w1.toggle) - self.assertEqual(w1.count, 1) - self.assertRaises( - SaveConditionError, w1.save, save_condition={"save_id": UUID(42)} - ) + assert w1.toggle + assert w1.count == 1 + with pytest.raises(SaveConditionError): + w1.save(save_condition={"save_id": UUID(42)}) w1.reload() - self.assertFalse(w1.toggle) - self.assertEqual(w1.count, 0) + assert not w1.toggle + assert w1.count == 0 # matched save_condition allows save flip(w1) - self.assertTrue(w1.toggle) - self.assertEqual(w1.count, 1) + assert w1.toggle + assert w1.count == 1 w1.save(save_condition={"save_id": UUID(1)}) w1.reload() - self.assertTrue(w1.toggle) - self.assertEqual(w1.count, 1) + assert w1.toggle + assert w1.count == 1 # save_condition can be used to ensure atomic read & updates # i.e., prevent interleaved reads and writes from separate contexts w2 = Widget.objects.get() - self.assertEqual(w1, w2) + assert w1 == w2 old_id = w1.save_id flip(w1) w1.save_id = UUID(2) w1.save(save_condition={"save_id": old_id}) w1.reload() - self.assertFalse(w1.toggle) - self.assertEqual(w1.count, 2) + assert not w1.toggle + assert w1.count == 2 flip(w2) flip(w2) - self.assertRaises( - SaveConditionError, w2.save, save_condition={"save_id": old_id} - ) + with pytest.raises(SaveConditionError): + w2.save(save_condition={"save_id": old_id}) w2.reload() - self.assertFalse(w2.toggle) - self.assertEqual(w2.count, 2) + assert not w2.toggle + assert w2.count == 2 # save_condition uses mongoengine-style operator syntax flip(w1) w1.save(save_condition={"count__lt": w1.count}) w1.reload() - self.assertTrue(w1.toggle) - self.assertEqual(w1.count, 3) + assert w1.toggle + assert w1.count == 3 flip(w1) - self.assertRaises( - SaveConditionError, w1.save, save_condition={"count__gte": w1.count} - ) + with pytest.raises(SaveConditionError): + w1.save(save_condition={"count__gte": w1.count}) w1.reload() - self.assertTrue(w1.toggle) - self.assertEqual(w1.count, 3) + assert w1.toggle + assert w1.count == 3 def test_save_update_selectively(self): class WildBoy(Document): @@ -1303,8 +1295,8 @@ class TestInstance(MongoDBTestCase): boy2.save() fresh_boy = WildBoy.objects().first() - self.assertEqual(fresh_boy.age, 99) - self.assertEqual(fresh_boy.name, "Bob") + assert fresh_boy.age == 99 + assert fresh_boy.name == "Bob" def test_save_update_selectively_with_custom_pk(self): # Prevents regression of #2082 @@ -1326,8 +1318,8 @@ class TestInstance(MongoDBTestCase): boy2.save() fresh_boy = WildBoy.objects().first() - self.assertEqual(fresh_boy.age, 99) - self.assertEqual(fresh_boy.name, "Bob") + assert fresh_boy.age == 99 + assert fresh_boy.name == "Bob" def test_update(self): """Ensure that an existing document is updated instead of be @@ -1343,20 +1335,20 @@ class TestInstance(MongoDBTestCase): same_person.save() # Confirm only one object - self.assertEqual(self.Person.objects.count(), 1) + assert self.Person.objects.count() == 1 # reload person.reload() same_person.reload() # Confirm the same - self.assertEqual(person, same_person) - self.assertEqual(person.name, same_person.name) - self.assertEqual(person.age, same_person.age) + assert person == same_person + assert person.name == same_person.name + assert person.age == same_person.age # Confirm the saved values - self.assertEqual(person.name, "Test") - self.assertEqual(person.age, 30) + assert person.name == "Test" + assert person.age == 30 # Test only / exclude only updates included fields person = self.Person.objects.only("name").get() @@ -1364,8 +1356,8 @@ class TestInstance(MongoDBTestCase): person.save() person.reload() - self.assertEqual(person.name, "User") - self.assertEqual(person.age, 30) + assert person.name == "User" + assert person.age == 30 # test exclude only updates set fields person = self.Person.objects.exclude("name").get() @@ -1373,8 +1365,8 @@ class TestInstance(MongoDBTestCase): person.save() person.reload() - self.assertEqual(person.name, "User") - self.assertEqual(person.age, 21) + assert person.name == "User" + assert person.age == 21 # Test only / exclude can set non excluded / included fields person = self.Person.objects.only("name").get() @@ -1383,8 +1375,8 @@ class TestInstance(MongoDBTestCase): person.save() person.reload() - self.assertEqual(person.name, "Test") - self.assertEqual(person.age, 30) + assert person.name == "Test" + assert person.age == 30 # test exclude only updates set fields person = self.Person.objects.exclude("name").get() @@ -1393,8 +1385,8 @@ class TestInstance(MongoDBTestCase): person.save() person.reload() - self.assertEqual(person.name, "User") - self.assertEqual(person.age, 21) + assert person.name == "User" + assert person.age == 21 # Confirm does remove unrequired fields person = self.Person.objects.exclude("name").get() @@ -1402,8 +1394,8 @@ class TestInstance(MongoDBTestCase): person.save() person.reload() - self.assertEqual(person.name, "User") - self.assertEqual(person.age, None) + assert person.name == "User" + assert person.age == None person = self.Person.objects.get() person.name = None @@ -1411,20 +1403,20 @@ class TestInstance(MongoDBTestCase): person.save() person.reload() - self.assertEqual(person.name, None) - self.assertEqual(person.age, None) + assert person.name == None + assert person.age == None def test_update_rename_operator(self): """Test the $rename operator.""" coll = self.Person._get_collection() doc = self.Person(name="John").save() raw_doc = coll.find_one({"_id": doc.pk}) - self.assertEqual(set(raw_doc.keys()), set(["_id", "_cls", "name"])) + assert set(raw_doc.keys()) == set(["_id", "_cls", "name"]) doc.update(rename__name="first_name") raw_doc = coll.find_one({"_id": doc.pk}) - self.assertEqual(set(raw_doc.keys()), set(["_id", "_cls", "first_name"])) - self.assertEqual(raw_doc["first_name"], "John") + assert set(raw_doc.keys()) == set(["_id", "_cls", "first_name"]) + assert raw_doc["first_name"] == "John" def test_inserts_if_you_set_the_pk(self): p1 = self.Person(name="p1", id=bson.ObjectId()).save() @@ -1432,7 +1424,7 @@ class TestInstance(MongoDBTestCase): p2.id = bson.ObjectId() p2.save() - self.assertEqual(2, self.Person.objects.count()) + assert 2 == self.Person.objects.count() def test_can_save_if_not_included(self): class EmbeddedDoc(EmbeddedDocument): @@ -1480,13 +1472,13 @@ class TestInstance(MongoDBTestCase): my_doc.save() my_doc = Doc.objects.get(string_field="string") - self.assertEqual(my_doc.string_field, "string") - self.assertEqual(my_doc.int_field, 1) + assert my_doc.string_field == "string" + assert my_doc.int_field == 1 def test_document_update(self): # try updating a non-saved document - with self.assertRaises(OperationError): + with pytest.raises(OperationError): person = self.Person(name="dcrosta") person.update(set__name="Dan Crosta") @@ -1497,10 +1489,10 @@ class TestInstance(MongoDBTestCase): author.reload() p1 = self.Person.objects.first() - self.assertEqual(p1.name, author.name) + assert p1.name == author.name # try sending an empty update - with self.assertRaises(OperationError): + with pytest.raises(OperationError): person = self.Person.objects.first() person.update() @@ -1509,7 +1501,7 @@ class TestInstance(MongoDBTestCase): person = self.Person.objects.first() person.update(name="Dan") person.reload() - self.assertEqual("Dan", person.name) + assert "Dan" == person.name def test_update_unique_field(self): class Doc(Document): @@ -1518,7 +1510,7 @@ class TestInstance(MongoDBTestCase): doc1 = Doc(name="first").save() doc2 = Doc(name="second").save() - with self.assertRaises(NotUniqueError): + with pytest.raises(NotUniqueError): doc2.update(set__name=doc1.name) def test_embedded_update(self): @@ -1540,7 +1532,7 @@ class TestInstance(MongoDBTestCase): site.save() site = Site.objects.first() - self.assertEqual(site.page.log_message, "Error: Dummy message") + assert site.page.log_message == "Error: Dummy message" def test_update_list_field(self): """Test update on `ListField` with $pull + $in. @@ -1558,7 +1550,7 @@ class TestInstance(MongoDBTestCase): doc.update(pull__foo__in=["a", "c"]) doc = Doc.objects.first() - self.assertEqual(doc.foo, ["b"]) + assert doc.foo == ["b"] def test_embedded_update_db_field(self): """Test update on `EmbeddedDocumentField` fields when db_field @@ -1584,7 +1576,7 @@ class TestInstance(MongoDBTestCase): site.save() site = Site.objects.first() - self.assertEqual(site.page.log_message, "Error: Dummy message") + assert site.page.log_message == "Error: Dummy message" def test_save_only_changed_fields(self): """Ensure save only sets / unsets changed fields.""" @@ -1610,9 +1602,9 @@ class TestInstance(MongoDBTestCase): same_person.save() person = self.Person.objects.get() - self.assertEqual(person.name, "User") - self.assertEqual(person.age, 21) - self.assertEqual(person.active, False) + assert person.name == "User" + assert person.age == 21 + assert person.active == False def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_embedded_doc( self, @@ -1626,7 +1618,7 @@ class TestInstance(MongoDBTestCase): emb = EmbeddedChildModel(id={"1": [1]}) changed_fields = ParentModel(child=emb)._get_changed_fields() - self.assertEqual(changed_fields, []) + assert changed_fields == [] def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_different_doc( self, @@ -1647,8 +1639,8 @@ class TestInstance(MongoDBTestCase): message = Message(id=1, author=user).save() message.author.name = "tutu" - self.assertEqual(message._get_changed_fields(), []) - self.assertEqual(user._get_changed_fields(), ["name"]) + assert message._get_changed_fields() == [] + assert user._get_changed_fields() == ["name"] def test__get_changed_fields_same_ids_embedded(self): # Refers to Issue #1768 @@ -1667,11 +1659,11 @@ class TestInstance(MongoDBTestCase): message = Message(id=1, author=user).save() message.author.name = "tutu" - self.assertEqual(message._get_changed_fields(), ["author.name"]) + assert message._get_changed_fields() == ["author.name"] message.save() message_fetched = Message.objects.with_id(message.id) - self.assertEqual(message_fetched.author.name, "tutu") + assert message_fetched.author.name == "tutu" def test_query_count_when_saving(self): """Ensure references don't cause extra fetches when saving""" @@ -1707,65 +1699,65 @@ class TestInstance(MongoDBTestCase): user = User.objects.first() # Even if stored as ObjectId's internally mongoengine uses DBRefs # As ObjectId's aren't automatically derefenced - self.assertIsInstance(user._data["orgs"][0], DBRef) - self.assertIsInstance(user.orgs[0], Organization) - self.assertIsInstance(user._data["orgs"][0], Organization) + assert isinstance(user._data["orgs"][0], DBRef) + assert isinstance(user.orgs[0], Organization) + assert isinstance(user._data["orgs"][0], Organization) # Changing a value with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 sub = UserSubscription.objects.first() - self.assertEqual(q, 1) + assert q == 1 sub.name = "Test Sub" sub.save() - self.assertEqual(q, 2) + assert q == 2 # Changing a value that will cascade with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 sub = UserSubscription.objects.first() - self.assertEqual(q, 1) + assert q == 1 sub.user.name = "Test" - self.assertEqual(q, 2) + assert q == 2 sub.save(cascade=True) - self.assertEqual(q, 3) + assert q == 3 # Changing a value and one that will cascade with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 sub = UserSubscription.objects.first() sub.name = "Test Sub 2" - self.assertEqual(q, 1) + assert q == 1 sub.user.name = "Test 2" - self.assertEqual(q, 2) + assert q == 2 sub.save(cascade=True) - self.assertEqual(q, 4) # One for the UserSub and one for the User + assert q == 4 # One for the UserSub and one for the User # Saving with just the refs with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 sub = UserSubscription(user=u1.pk, feed=f1.pk) - self.assertEqual(q, 0) + assert q == 0 sub.save() - self.assertEqual(q, 1) + assert q == 1 # Saving with just the refs on a ListField with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 User(name="Bob", orgs=[o1.pk, o2.pk]).save() - self.assertEqual(q, 1) + assert q == 1 # Saving new objects with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 user = User.objects.first() - self.assertEqual(q, 1) + assert q == 1 feed = Feed.objects.first() - self.assertEqual(q, 2) + assert q == 2 sub = UserSubscription(user=user, feed=feed) - self.assertEqual(q, 2) # Check no change + assert q == 2 # Check no change sub.save() - self.assertEqual(q, 3) + assert q == 3 def test_set_unset_one_operation(self): """Ensure that $set and $unset actions are performed in the @@ -1781,14 +1773,14 @@ class TestInstance(MongoDBTestCase): # write an entity with a single prop foo = FooBar(foo="foo").save() - self.assertEqual(foo.foo, "foo") + assert foo.foo == "foo" del foo.foo foo.bar = "bar" with query_counter() as q: - self.assertEqual(0, q) + assert 0 == q foo.save() - self.assertEqual(1, q) + assert 1 == q def test_save_only_changed_fields_recursive(self): """Ensure save only sets / unsets changed fields.""" @@ -1810,34 +1802,34 @@ class TestInstance(MongoDBTestCase): person.reload() person = self.Person.objects.get() - self.assertTrue(person.comments[0].published) + assert person.comments[0].published person.comments[0].published = False person.save() person = self.Person.objects.get() - self.assertFalse(person.comments[0].published) + assert not person.comments[0].published # Simple dict w person.comments_dict["first_post"] = Comment() person.save() person = self.Person.objects.get() - self.assertTrue(person.comments_dict["first_post"].published) + assert person.comments_dict["first_post"].published person.comments_dict["first_post"].published = False person.save() person = self.Person.objects.get() - self.assertFalse(person.comments_dict["first_post"].published) + assert not person.comments_dict["first_post"].published def test_delete(self): """Ensure that document may be deleted using the delete method.""" person = self.Person(name="Test User", age=30) person.save() - self.assertEqual(self.Person.objects.count(), 1) + assert self.Person.objects.count() == 1 person.delete() - self.assertEqual(self.Person.objects.count(), 0) + assert self.Person.objects.count() == 0 def test_save_custom_id(self): """Ensure that a document may be saved with a custom _id.""" @@ -1849,7 +1841,7 @@ class TestInstance(MongoDBTestCase): # Ensure that the object is in the database with the correct _id collection = self.db[self.Person._get_collection_name()] person_obj = collection.find_one({"name": "Test User"}) - self.assertEqual(str(person_obj["_id"]), "497ce96f395f2f052a494fd4") + assert str(person_obj["_id"]) == "497ce96f395f2f052a494fd4" def test_save_custom_pk(self): """Ensure that a document may be saved with a custom _id using @@ -1862,7 +1854,7 @@ class TestInstance(MongoDBTestCase): # Ensure that the object is in the database with the correct _id collection = self.db[self.Person._get_collection_name()] person_obj = collection.find_one({"name": "Test User"}) - self.assertEqual(str(person_obj["_id"]), "497ce96f395f2f052a494fd4") + assert str(person_obj["_id"]) == "497ce96f395f2f052a494fd4" def test_save_list(self): """Ensure that a list field may be properly saved.""" @@ -1885,9 +1877,9 @@ class TestInstance(MongoDBTestCase): collection = self.db[BlogPost._get_collection_name()] post_obj = collection.find_one() - self.assertEqual(post_obj["tags"], tags) + assert post_obj["tags"] == tags for comment_obj, comment in zip(post_obj["comments"], comments): - self.assertEqual(comment_obj["content"], comment["content"]) + assert comment_obj["content"] == comment["content"] def test_list_search_by_embedded(self): class User(Document): @@ -1944,9 +1936,9 @@ class TestInstance(MongoDBTestCase): p4 = Page(comments=[Comment(user=u2, comment="Heavy Metal song")]) p4.save() - self.assertEqual([p1, p2], list(Page.objects.filter(comments__user=u1))) - self.assertEqual([p1, p2, p4], list(Page.objects.filter(comments__user=u2))) - self.assertEqual([p1, p3], list(Page.objects.filter(comments__user=u3))) + assert [p1, p2] == list(Page.objects.filter(comments__user=u1)) + assert [p1, p2, p4] == list(Page.objects.filter(comments__user=u2)) + assert [p1, p3] == list(Page.objects.filter(comments__user=u3)) def test_save_embedded_document(self): """Ensure that a document with an embedded document field may @@ -1968,11 +1960,11 @@ class TestInstance(MongoDBTestCase): # Ensure that the object is in the database collection = self.db[self.Person._get_collection_name()] employee_obj = collection.find_one({"name": "Test Employee"}) - self.assertEqual(employee_obj["name"], "Test Employee") - self.assertEqual(employee_obj["age"], 50) + assert employee_obj["name"] == "Test Employee" + assert employee_obj["age"] == 50 # Ensure that the 'details' embedded object saved correctly - self.assertEqual(employee_obj["details"]["position"], "Developer") + assert employee_obj["details"]["position"] == "Developer" def test_embedded_update_after_save(self): """Test update of `EmbeddedDocumentField` attached to a newly @@ -1994,7 +1986,7 @@ class TestInstance(MongoDBTestCase): site.save() site = Site.objects.first() - self.assertEqual(site.page.log_message, "Error: Dummy message") + assert site.page.log_message == "Error: Dummy message" def test_updating_an_embedded_document(self): """Ensure that a document with an embedded document field may @@ -2019,18 +2011,18 @@ class TestInstance(MongoDBTestCase): promoted_employee.save() promoted_employee.reload() - self.assertEqual(promoted_employee.name, "Test Employee") - self.assertEqual(promoted_employee.age, 50) + assert promoted_employee.name == "Test Employee" + assert promoted_employee.age == 50 # Ensure that the 'details' embedded object saved correctly - self.assertEqual(promoted_employee.details.position, "Senior Developer") + assert promoted_employee.details.position == "Senior Developer" # Test removal promoted_employee.details = None promoted_employee.save() promoted_employee.reload() - self.assertEqual(promoted_employee.details, None) + assert promoted_employee.details == None def test_object_mixins(self): class NameMixin(object): @@ -2039,12 +2031,12 @@ class TestInstance(MongoDBTestCase): class Foo(EmbeddedDocument, NameMixin): quantity = IntField() - self.assertEqual(["name", "quantity"], sorted(Foo._fields.keys())) + assert ["name", "quantity"] == sorted(Foo._fields.keys()) class Bar(Document, NameMixin): widgets = StringField() - self.assertEqual(["id", "name", "widgets"], sorted(Bar._fields.keys())) + assert ["id", "name", "widgets"] == sorted(Bar._fields.keys()) def test_mixin_inheritance(self): class BaseMixIn(object): @@ -2064,10 +2056,10 @@ class TestInstance(MongoDBTestCase): t = TestDoc.objects.first() - self.assertEqual(t.age, 19) - self.assertEqual(t.comment, "great!") - self.assertEqual(t.data, "test") - self.assertEqual(t.count, 12) + assert t.age == 19 + assert t.comment == "great!" + assert t.data == "test" + assert t.count == 12 def test_save_reference(self): """Ensure that a document reference field may be saved in the @@ -2092,22 +2084,22 @@ class TestInstance(MongoDBTestCase): post_obj = BlogPost.objects.first() # Test laziness - self.assertIsInstance(post_obj._data["author"], bson.DBRef) - self.assertIsInstance(post_obj.author, self.Person) - self.assertEqual(post_obj.author.name, "Test User") + assert isinstance(post_obj._data["author"], bson.DBRef) + assert isinstance(post_obj.author, self.Person) + assert post_obj.author.name == "Test User" # Ensure that the dereferenced object may be changed and saved post_obj.author.age = 25 post_obj.author.save() author = list(self.Person.objects(name="Test User"))[-1] - self.assertEqual(author.age, 25) + assert author.age == 25 def test_duplicate_db_fields_raise_invalid_document_error(self): """Ensure a InvalidDocumentError is thrown if duplicate fields declare the same db_field. """ - with self.assertRaises(InvalidDocumentError): + with pytest.raises(InvalidDocumentError): class Foo(Document): name = StringField() @@ -2125,7 +2117,7 @@ class TestInstance(MongoDBTestCase): forms = ListField(StringField(), default=list) occurs = ListField(EmbeddedDocumentField(Occurrence), default=list) - with self.assertRaises(InvalidDocumentError): + with pytest.raises(InvalidDocumentError): Word._from_son( { "stem": [1, 2, 3], @@ -2136,7 +2128,7 @@ class TestInstance(MongoDBTestCase): ) # Tests for issue #1438: https://github.com/MongoEngine/mongoengine/issues/1438 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): Word._from_son("this is not a valid SON dict") def test_reverse_delete_rule_cascade_and_nullify(self): @@ -2165,12 +2157,12 @@ class TestInstance(MongoDBTestCase): reviewer.delete() # No effect on the BlogPost - self.assertEqual(BlogPost.objects.count(), 1) - self.assertEqual(BlogPost.objects.get().reviewer, None) + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.get().reviewer == None # Delete the Person, which should lead to deletion of the BlogPost, too author.delete() - self.assertEqual(BlogPost.objects.count(), 0) + assert BlogPost.objects.count() == 0 def test_reverse_delete_rule_pull(self): """Ensure that a referenced document is also deleted with @@ -2189,7 +2181,7 @@ class TestInstance(MongoDBTestCase): parent_record.save() child_record.delete() - self.assertEqual(Record.objects(name="parent").get().children, []) + assert Record.objects(name="parent").get().children == [] def test_reverse_delete_rule_with_custom_id_field(self): """Ensure that a referenced document with custom primary key @@ -2211,11 +2203,11 @@ class TestInstance(MongoDBTestCase): book = Book(author=user, reviewer=reviewer).save() reviewer.delete() - self.assertEqual(Book.objects.count(), 1) - self.assertEqual(Book.objects.get().reviewer, None) + assert Book.objects.count() == 1 + assert Book.objects.get().reviewer == None user.delete() - self.assertEqual(Book.objects.count(), 0) + assert Book.objects.count() == 0 def test_reverse_delete_rule_with_shared_id_among_collections(self): """Ensure that cascade delete rule doesn't mix id among @@ -2239,16 +2231,16 @@ class TestInstance(MongoDBTestCase): user_2.delete() # Deleting user_2 should also delete book_1 but not book_2 - self.assertEqual(Book.objects.count(), 1) - self.assertEqual(Book.objects.get(), book_2) + assert Book.objects.count() == 1 + assert Book.objects.get() == book_2 user_3 = User(id=3).save() book_3 = Book(id=3, author=user_3).save() user_3.delete() # Deleting user_3 should also delete book_3 - self.assertEqual(Book.objects.count(), 1) - self.assertEqual(Book.objects.get(), book_2) + assert Book.objects.count() == 1 + assert Book.objects.get() == book_2 def test_reverse_delete_rule_with_document_inheritance(self): """Ensure that a referenced document is also deleted upon @@ -2278,12 +2270,12 @@ class TestInstance(MongoDBTestCase): post.save() reviewer.delete() - self.assertEqual(BlogPost.objects.count(), 1) - self.assertEqual(BlogPost.objects.get().reviewer, None) + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.get().reviewer == None # Delete the Writer should lead to deletion of the BlogPost author.delete() - self.assertEqual(BlogPost.objects.count(), 0) + assert BlogPost.objects.count() == 0 def test_reverse_delete_rule_cascade_and_nullify_complex_field(self): """Ensure that a referenced document is also deleted upon @@ -2315,12 +2307,12 @@ class TestInstance(MongoDBTestCase): # Deleting the reviewer should have no effect on the BlogPost reviewer.delete() - self.assertEqual(BlogPost.objects.count(), 1) - self.assertEqual(BlogPost.objects.get().reviewers, []) + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.get().reviewers == [] # Delete the Person, which should lead to deletion of the BlogPost, too author.delete() - self.assertEqual(BlogPost.objects.count(), 0) + assert BlogPost.objects.count() == 0 def test_reverse_delete_rule_cascade_triggers_pre_delete_signal(self): """Ensure the pre_delete signal is triggered upon a cascading @@ -2357,7 +2349,7 @@ class TestInstance(MongoDBTestCase): # the pre-delete signal should have decremented the editor's queue editor = Editor.objects(name="Max P.").get() - self.assertEqual(editor.review_queue, 0) + assert editor.review_queue == 0 def test_two_way_reverse_delete_rule(self): """Ensure that Bi-Directional relationships work with @@ -2389,11 +2381,11 @@ class TestInstance(MongoDBTestCase): f.delete() - self.assertEqual(Bar.objects.count(), 1) # No effect on the BlogPost - self.assertEqual(Bar.objects.get().foo, None) + assert Bar.objects.count() == 1 # No effect on the BlogPost + assert Bar.objects.get().foo == None def test_invalid_reverse_delete_rule_raise_errors(self): - with self.assertRaises(InvalidDocumentError): + with pytest.raises(InvalidDocumentError): class Blog(Document): content = StringField() @@ -2404,7 +2396,7 @@ class TestInstance(MongoDBTestCase): field=ReferenceField(self.Person, reverse_delete_rule=NULLIFY) ) - with self.assertRaises(InvalidDocumentError): + with pytest.raises(InvalidDocumentError): class Parents(EmbeddedDocument): father = ReferenceField("Person", reverse_delete_rule=DENY) @@ -2441,7 +2433,7 @@ class TestInstance(MongoDBTestCase): # Delete the Person, which should lead to deletion of the BlogPost, # and, recursively to the Comment, too author.delete() - self.assertEqual(Comment.objects.count(), 0) + assert Comment.objects.count() == 0 def test_reverse_delete_rule_deny(self): """Ensure that a document cannot be referenced if there are @@ -2463,19 +2455,18 @@ class TestInstance(MongoDBTestCase): post.save() # Delete the Person should be denied - self.assertRaises(OperationError, author.delete) # Should raise denied error - self.assertEqual( - BlogPost.objects.count(), 1 - ) # No objects may have been deleted - self.assertEqual(self.Person.objects.count(), 1) + with pytest.raises(OperationError): + author.delete() # Should raise denied error + assert BlogPost.objects.count() == 1 # No objects may have been deleted + assert self.Person.objects.count() == 1 # Other users, that don't have BlogPosts must be removable, like normal author = self.Person(name="Another User") author.save() - self.assertEqual(self.Person.objects.count(), 2) + assert self.Person.objects.count() == 2 author.delete() - self.assertEqual(self.Person.objects.count(), 1) + assert self.Person.objects.count() == 1 def subclasses_and_unique_keys_works(self): class A(Document): @@ -2491,8 +2482,8 @@ class TestInstance(MongoDBTestCase): A().save() B(foo=True).save() - self.assertEqual(A.objects.count(), 2) - self.assertEqual(B.objects.count(), 1) + assert A.objects.count() == 2 + assert B.objects.count() == 1 def test_document_hash(self): """Test document in list, dict, set.""" @@ -2518,12 +2509,12 @@ class TestInstance(MongoDBTestCase): # Make sure docs are properly identified in a list (__eq__ is used # for the comparison). all_user_list = list(User.objects.all()) - self.assertIn(u1, all_user_list) - self.assertIn(u2, all_user_list) - self.assertIn(u3, all_user_list) - self.assertNotIn(u4, all_user_list) # New object - self.assertNotIn(b1, all_user_list) # Other object - self.assertNotIn(b2, all_user_list) # Other object + assert u1 in all_user_list + assert u2 in all_user_list + assert u3 in all_user_list + assert u4 not in all_user_list # New object + assert b1 not in all_user_list # Other object + assert b2 not in all_user_list # Other object # Make sure docs can be used as keys in a dict (__hash__ is used # for hashing the docs). @@ -2531,27 +2522,27 @@ class TestInstance(MongoDBTestCase): for u in User.objects.all(): all_user_dic[u] = "OK" - self.assertEqual(all_user_dic.get(u1, False), "OK") - self.assertEqual(all_user_dic.get(u2, False), "OK") - self.assertEqual(all_user_dic.get(u3, False), "OK") - self.assertEqual(all_user_dic.get(u4, False), False) # New object - self.assertEqual(all_user_dic.get(b1, False), False) # Other object - self.assertEqual(all_user_dic.get(b2, False), False) # Other object + assert all_user_dic.get(u1, False) == "OK" + assert all_user_dic.get(u2, False) == "OK" + assert all_user_dic.get(u3, False) == "OK" + assert all_user_dic.get(u4, False) == False # New object + assert all_user_dic.get(b1, False) == False # Other object + assert all_user_dic.get(b2, False) == False # Other object # Make sure docs are properly identified in a set (__hash__ is used # for hashing the docs). all_user_set = set(User.objects.all()) - self.assertIn(u1, all_user_set) - self.assertNotIn(u4, all_user_set) - self.assertNotIn(b1, all_user_list) - self.assertNotIn(b2, all_user_list) + assert u1 in all_user_set + assert u4 not in all_user_set + assert b1 not in all_user_list + assert b2 not in all_user_list # Make sure duplicate docs aren't accepted in the set - self.assertEqual(len(all_user_set), 3) + assert len(all_user_set) == 3 all_user_set.add(u1) all_user_set.add(u2) all_user_set.add(u3) - self.assertEqual(len(all_user_set), 3) + assert len(all_user_set) == 3 def test_picklable(self): pickle_doc = PickleTest(number=1, string="One", lists=["1", "2"]) @@ -2564,21 +2555,21 @@ class TestInstance(MongoDBTestCase): pickled_doc = pickle.dumps(pickle_doc) resurrected = pickle.loads(pickled_doc) - self.assertEqual(resurrected, pickle_doc) + assert resurrected == pickle_doc # Test pickling changed data pickle_doc.lists.append("3") pickled_doc = pickle.dumps(pickle_doc) resurrected = pickle.loads(pickled_doc) - self.assertEqual(resurrected, pickle_doc) + assert resurrected == pickle_doc resurrected.string = "Two" resurrected.save() pickle_doc = PickleTest.objects.first() - self.assertEqual(resurrected, pickle_doc) - self.assertEqual(pickle_doc.string, "Two") - self.assertEqual(pickle_doc.lists, ["1", "2", "3"]) + assert resurrected == pickle_doc + assert pickle_doc.string == "Two" + assert pickle_doc.lists == ["1", "2", "3"] def test_regular_document_pickle(self): pickle_doc = PickleTest(number=1, string="One", lists=["1", "2"]) @@ -2594,11 +2585,12 @@ class TestInstance(MongoDBTestCase): fixtures.PickleTest = fixtures.NewDocumentPickleTest resurrected = pickle.loads(pickled_doc) - self.assertEqual(resurrected.__class__, fixtures.NewDocumentPickleTest) - self.assertEqual( - resurrected._fields_ordered, fixtures.NewDocumentPickleTest._fields_ordered + assert resurrected.__class__ == fixtures.NewDocumentPickleTest + assert ( + resurrected._fields_ordered + == fixtures.NewDocumentPickleTest._fields_ordered ) - self.assertNotEqual(resurrected._fields_ordered, pickle_doc._fields_ordered) + assert resurrected._fields_ordered != pickle_doc._fields_ordered # The local PickleTest is still a ref to the original fixtures.PickleTest = PickleTest @@ -2617,19 +2609,17 @@ class TestInstance(MongoDBTestCase): pickled_doc = pickle.dumps(pickle_doc) resurrected = pickle.loads(pickled_doc) - self.assertEqual(resurrected, pickle_doc) - self.assertEqual(resurrected._fields_ordered, pickle_doc._fields_ordered) - self.assertEqual( - resurrected._dynamic_fields.keys(), pickle_doc._dynamic_fields.keys() - ) + assert resurrected == pickle_doc + assert resurrected._fields_ordered == pickle_doc._fields_ordered + assert resurrected._dynamic_fields.keys() == pickle_doc._dynamic_fields.keys() - self.assertEqual(resurrected.embedded, pickle_doc.embedded) - self.assertEqual( - resurrected.embedded._fields_ordered, pickle_doc.embedded._fields_ordered + assert resurrected.embedded == pickle_doc.embedded + assert ( + resurrected.embedded._fields_ordered == pickle_doc.embedded._fields_ordered ) - self.assertEqual( - resurrected.embedded._dynamic_fields.keys(), - pickle_doc.embedded._dynamic_fields.keys(), + assert ( + resurrected.embedded._dynamic_fields.keys() + == pickle_doc.embedded._dynamic_fields.keys() ) def test_picklable_on_signals(self): @@ -2642,7 +2632,7 @@ class TestInstance(MongoDBTestCase): """Test creating a field with a field name that would override the "validate" method. """ - with self.assertRaises(InvalidDocumentError): + with pytest.raises(InvalidDocumentError): class Blog(Document): validate = DictField() @@ -2659,7 +2649,7 @@ class TestInstance(MongoDBTestCase): a = A() a.save() a.reload() - self.assertEqual(a.b.field1, "field1") + assert a.b.field1 == "field1" class C(EmbeddedDocument): c_field = StringField(default="cfield") @@ -2676,7 +2666,7 @@ class TestInstance(MongoDBTestCase): a.save() a.reload() - self.assertEqual(a.b.field2.c_field, "new value") + assert a.b.field2.c_field == "new value" def test_can_save_false_values(self): """Ensures you can save False values on save.""" @@ -2692,7 +2682,7 @@ class TestInstance(MongoDBTestCase): d.archived = False d.save() - self.assertEqual(Doc.objects(archived=False).count(), 1) + assert Doc.objects(archived=False).count() == 1 def test_can_save_false_values_dynamic(self): """Ensures you can save False values on dynamic docs.""" @@ -2707,7 +2697,7 @@ class TestInstance(MongoDBTestCase): d.archived = False d.save() - self.assertEqual(Doc.objects(archived=False).count(), 1) + assert Doc.objects(archived=False).count() == 1 def test_do_not_save_unchanged_references(self): """Ensures cascading saves dont auto update""" @@ -2768,8 +2758,8 @@ class TestInstance(MongoDBTestCase): hp = Book.objects.create(name="Harry Potter") # Selects - self.assertEqual(User.objects.first(), bob) - self.assertEqual(Book.objects.first(), hp) + assert User.objects.first() == bob + assert Book.objects.first() == hp # DeReference class AuthorBooks(Document): @@ -2783,27 +2773,23 @@ class TestInstance(MongoDBTestCase): ab = AuthorBooks.objects.create(author=bob, book=hp) # select - self.assertEqual(AuthorBooks.objects.first(), ab) - self.assertEqual(AuthorBooks.objects.first().book, hp) - self.assertEqual(AuthorBooks.objects.first().author, bob) - self.assertEqual(AuthorBooks.objects.filter(author=bob).first(), ab) - self.assertEqual(AuthorBooks.objects.filter(book=hp).first(), ab) + assert AuthorBooks.objects.first() == ab + assert AuthorBooks.objects.first().book == hp + assert AuthorBooks.objects.first().author == bob + assert AuthorBooks.objects.filter(author=bob).first() == ab + assert AuthorBooks.objects.filter(book=hp).first() == ab # DB Alias - self.assertEqual(User._get_db(), get_db("testdb-1")) - self.assertEqual(Book._get_db(), get_db("testdb-2")) - self.assertEqual(AuthorBooks._get_db(), get_db("testdb-3")) + assert User._get_db() == get_db("testdb-1") + assert Book._get_db() == get_db("testdb-2") + assert AuthorBooks._get_db() == get_db("testdb-3") # Collections - self.assertEqual( - User._get_collection(), get_db("testdb-1")[User._get_collection_name()] - ) - self.assertEqual( - Book._get_collection(), get_db("testdb-2")[Book._get_collection_name()] - ) - self.assertEqual( - AuthorBooks._get_collection(), - get_db("testdb-3")[AuthorBooks._get_collection_name()], + assert User._get_collection() == get_db("testdb-1")[User._get_collection_name()] + assert Book._get_collection() == get_db("testdb-2")[Book._get_collection_name()] + assert ( + AuthorBooks._get_collection() + == get_db("testdb-3")[AuthorBooks._get_collection_name()] ) def test_db_alias_overrides(self): @@ -2826,9 +2812,9 @@ class TestInstance(MongoDBTestCase): A.objects.all() - self.assertEqual("testdb-2", B._meta.get("db_alias")) - self.assertEqual("mongoenginetest", A._get_collection().database.name) - self.assertEqual("mongoenginetest2", B._get_collection().database.name) + assert "testdb-2" == B._meta.get("db_alias") + assert "mongoenginetest" == A._get_collection().database.name + assert "mongoenginetest2" == B._get_collection().database.name def test_db_alias_propagates(self): """db_alias propagates?""" @@ -2841,7 +2827,7 @@ class TestInstance(MongoDBTestCase): class B(A): pass - self.assertEqual("testdb-1", B._meta.get("db_alias")) + assert "testdb-1" == B._meta.get("db_alias") def test_db_ref_usage(self): """DB Ref usage in dict_fields.""" @@ -2898,11 +2884,9 @@ class TestInstance(MongoDBTestCase): Book.objects.create(name="9", author=jon, extra={"a": peter.to_dbref()}) # Checks - self.assertEqual( - ",".join([str(b) for b in Book.objects.all()]), "1,2,3,4,5,6,7,8,9" - ) + assert ",".join([str(b) for b in Book.objects.all()]) == "1,2,3,4,5,6,7,8,9" # bob related books - self.assertEqual( + assert ( ",".join( [ str(b) @@ -2910,12 +2894,12 @@ class TestInstance(MongoDBTestCase): Q(extra__a=bob) | Q(author=bob) | Q(extra__b=bob) ) ] - ), - "1,2,3,4", + ) + == "1,2,3,4" ) # Susan & Karl related books - self.assertEqual( + assert ( ",".join( [ str(b) @@ -2925,12 +2909,12 @@ class TestInstance(MongoDBTestCase): | Q(extra__b__all=[karl.to_dbref(), susan.to_dbref()]) ) ] - ), - "1", + ) + == "1" ) # $Where - self.assertEqual( + assert ( u",".join( [ str(b) @@ -2943,8 +2927,8 @@ class TestInstance(MongoDBTestCase): } ) ] - ), - "1,2", + ) + == "1,2" ) def test_switch_db_instance(self): @@ -2958,7 +2942,7 @@ class TestInstance(MongoDBTestCase): Group.drop_collection() Group(name="hello - default").save() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() group = Group.objects.first() group.switch_db("testdb-1") @@ -2967,10 +2951,10 @@ class TestInstance(MongoDBTestCase): with switch_db(Group, "testdb-1") as Group: group = Group.objects.first() - self.assertEqual("hello - testdb!", group.name) + assert "hello - testdb!" == group.name group = Group.objects.first() - self.assertEqual("hello - default", group.name) + assert "hello - default" == group.name # Slightly contrived now - perform an update # Only works as they have the same object_id @@ -2979,12 +2963,12 @@ class TestInstance(MongoDBTestCase): with switch_db(Group, "testdb-1") as Group: group = Group.objects.first() - self.assertEqual("hello - update", group.name) + assert "hello - update" == group.name Group.drop_collection() - self.assertEqual(0, Group.objects.count()) + assert 0 == Group.objects.count() group = Group.objects.first() - self.assertEqual("hello - default", group.name) + assert "hello - default" == group.name # Totally contrived now - perform a delete # Only works as they have the same object_id @@ -2992,10 +2976,10 @@ class TestInstance(MongoDBTestCase): group.delete() with switch_db(Group, "testdb-1") as Group: - self.assertEqual(0, Group.objects.count()) + assert 0 == Group.objects.count() group = Group.objects.first() - self.assertEqual("hello - default", group.name) + assert "hello - default" == group.name def test_load_undefined_fields(self): class User(Document): @@ -3007,7 +2991,8 @@ class TestInstance(MongoDBTestCase): {"name": "John", "foo": "Bar", "data": [1, 2, 3]} ) - self.assertRaises(FieldDoesNotExist, User.objects.first) + with pytest.raises(FieldDoesNotExist): + User.objects.first() def test_load_undefined_fields_with_strict_false(self): class User(Document): @@ -3022,11 +3007,11 @@ class TestInstance(MongoDBTestCase): ) user = User.objects.first() - self.assertEqual(user.name, "John") - self.assertFalse(hasattr(user, "foo")) - self.assertEqual(user._data["foo"], "Bar") - self.assertFalse(hasattr(user, "data")) - self.assertEqual(user._data["data"], [1, 2, 3]) + assert user.name == "John" + assert not hasattr(user, "foo") + assert user._data["foo"] == "Bar" + assert not hasattr(user, "data") + assert user._data["data"] == [1, 2, 3] def test_load_undefined_fields_on_embedded_document(self): class Thing(EmbeddedDocument): @@ -3045,7 +3030,8 @@ class TestInstance(MongoDBTestCase): } ) - self.assertRaises(FieldDoesNotExist, User.objects.first) + with pytest.raises(FieldDoesNotExist): + User.objects.first() def test_load_undefined_fields_on_embedded_document_with_strict_false_on_doc(self): class Thing(EmbeddedDocument): @@ -3066,7 +3052,8 @@ class TestInstance(MongoDBTestCase): } ) - self.assertRaises(FieldDoesNotExist, User.objects.first) + with pytest.raises(FieldDoesNotExist): + User.objects.first() def test_load_undefined_fields_on_embedded_document_with_strict_false(self): class Thing(EmbeddedDocument): @@ -3088,12 +3075,12 @@ class TestInstance(MongoDBTestCase): ) user = User.objects.first() - self.assertEqual(user.name, "John") - self.assertEqual(user.thing.name, "My thing") - self.assertFalse(hasattr(user.thing, "foo")) - self.assertEqual(user.thing._data["foo"], "Bar") - self.assertFalse(hasattr(user.thing, "data")) - self.assertEqual(user.thing._data["data"], [1, 2, 3]) + assert user.name == "John" + assert user.thing.name == "My thing" + assert not hasattr(user.thing, "foo") + assert user.thing._data["foo"] == "Bar" + assert not hasattr(user.thing, "data") + assert user.thing._data["data"] == [1, 2, 3] def test_spaces_in_keys(self): class Embedded(DynamicEmbeddedDocument): @@ -3108,7 +3095,7 @@ class TestInstance(MongoDBTestCase): doc.save() one = Doc.objects.filter(**{"hello world": 1}).count() - self.assertEqual(1, one) + assert 1 == one def test_shard_key(self): class LogEntry(Document): @@ -3123,13 +3110,13 @@ class TestInstance(MongoDBTestCase): log.machine = "Localhost" log.save() - self.assertTrue(log.id is not None) + assert log.id is not None log.log = "Saving" log.save() # try to change the shard key - with self.assertRaises(OperationError): + with pytest.raises(OperationError): log.machine = "127.0.0.1" def test_shard_key_in_embedded_document(self): @@ -3145,13 +3132,13 @@ class TestInstance(MongoDBTestCase): bar_doc = Bar(foo=foo_doc, bar="world") bar_doc.save() - self.assertTrue(bar_doc.id is not None) + assert bar_doc.id is not None bar_doc.bar = "baz" bar_doc.save() # try to change the shard key - with self.assertRaises(OperationError): + with pytest.raises(OperationError): bar_doc.foo.foo = "something" bar_doc.save() @@ -3168,13 +3155,13 @@ class TestInstance(MongoDBTestCase): log.machine = "Localhost" log.save() - self.assertTrue(log.id is not None) + assert log.id is not None log.log = "Saving" log.save() # try to change the shard key - with self.assertRaises(OperationError): + with pytest.raises(OperationError): log.machine = "127.0.0.1" def test_kwargs_simple(self): @@ -3191,8 +3178,8 @@ class TestInstance(MongoDBTestCase): classic_doc = Doc(doc_name="my doc", doc=Embedded(name="embedded doc")) dict_doc = Doc(**{"doc_name": "my doc", "doc": {"name": "embedded doc"}}) - self.assertEqual(classic_doc, dict_doc) - self.assertEqual(classic_doc._data, dict_doc._data) + assert classic_doc == dict_doc + assert classic_doc._data == dict_doc._data def test_kwargs_complex(self): class Embedded(EmbeddedDocument): @@ -3216,48 +3203,48 @@ class TestInstance(MongoDBTestCase): } ) - self.assertEqual(classic_doc, dict_doc) - self.assertEqual(classic_doc._data, dict_doc._data) + assert classic_doc == dict_doc + assert classic_doc._data == dict_doc._data def test_positional_creation(self): """Document cannot be instantiated using positional arguments.""" - with self.assertRaises(TypeError) as e: + with pytest.raises(TypeError) as e: person = self.Person("Test User", 42) expected_msg = ( "Instantiating a document with positional arguments is not " "supported. Please use `field_name=value` keyword arguments." ) - self.assertEqual(str(e.exception), expected_msg) + assert str(e.exception) == expected_msg def test_mixed_creation(self): """Document cannot be instantiated using mixed arguments.""" - with self.assertRaises(TypeError) as e: + with pytest.raises(TypeError) as e: person = self.Person("Test User", age=42) expected_msg = ( "Instantiating a document with positional arguments is not " "supported. Please use `field_name=value` keyword arguments." ) - self.assertEqual(str(e.exception), expected_msg) + assert str(e.exception) == expected_msg def test_positional_creation_embedded(self): """Embedded document cannot be created using positional arguments.""" - with self.assertRaises(TypeError) as e: + with pytest.raises(TypeError) as e: job = self.Job("Test Job", 4) expected_msg = ( "Instantiating a document with positional arguments is not " "supported. Please use `field_name=value` keyword arguments." ) - self.assertEqual(str(e.exception), expected_msg) + assert str(e.exception) == expected_msg def test_mixed_creation_embedded(self): """Embedded document cannot be created using mixed arguments.""" - with self.assertRaises(TypeError) as e: + with pytest.raises(TypeError) as e: job = self.Job("Test Job", years=4) expected_msg = ( "Instantiating a document with positional arguments is not " "supported. Please use `field_name=value` keyword arguments." ) - self.assertEqual(str(e.exception), expected_msg) + assert str(e.exception) == expected_msg def test_data_contains_id_field(self): """Ensure that asking for _data returns 'id'.""" @@ -3269,8 +3256,8 @@ class TestInstance(MongoDBTestCase): Person(name="Harry Potter").save() person = Person.objects.first() - self.assertIn("id", person._data.keys()) - self.assertEqual(person._data.get("id"), person.id) + assert "id" in person._data.keys() + assert person._data.get("id") == person.id def test_complex_nesting_document_and_embedded_document(self): class Macro(EmbeddedDocument): @@ -3310,8 +3297,8 @@ class TestInstance(MongoDBTestCase): system.save() system = NodesSystem.objects.first() - self.assertEqual( - "UNDEFINED", system.nodes["node"].parameters["param"].macros["test"].value + assert ( + "UNDEFINED" == system.nodes["node"].parameters["param"].macros["test"].value ) def test_embedded_document_equality(self): @@ -3328,9 +3315,9 @@ class TestInstance(MongoDBTestCase): f1 = Embedded._from_son(e.to_mongo()) f2 = Embedded._from_son(e.to_mongo()) - self.assertEqual(f1, f2) + assert f1 == f2 f1.ref # Dereferences lazily - self.assertEqual(f1, f2) + assert f1 == f2 def test_dbref_equality(self): class Test2(Document): @@ -3361,36 +3348,36 @@ class TestInstance(MongoDBTestCase): dbref2 = f._data["test2"] obj2 = f.test2 - self.assertIsInstance(dbref2, DBRef) - self.assertIsInstance(obj2, Test2) - self.assertEqual(obj2.id, dbref2.id) - self.assertEqual(obj2, dbref2) - self.assertEqual(dbref2, obj2) + assert isinstance(dbref2, DBRef) + assert isinstance(obj2, Test2) + assert obj2.id == dbref2.id + assert obj2 == dbref2 + assert dbref2 == obj2 dbref3 = f._data["test3"] obj3 = f.test3 - self.assertIsInstance(dbref3, DBRef) - self.assertIsInstance(obj3, Test3) - self.assertEqual(obj3.id, dbref3.id) - self.assertEqual(obj3, dbref3) - self.assertEqual(dbref3, obj3) + assert isinstance(dbref3, DBRef) + assert isinstance(obj3, Test3) + assert obj3.id == dbref3.id + assert obj3 == dbref3 + assert dbref3 == obj3 - self.assertEqual(obj2.id, obj3.id) - self.assertEqual(dbref2.id, dbref3.id) - self.assertNotEqual(dbref2, dbref3) - self.assertNotEqual(dbref3, dbref2) - self.assertNotEqual(dbref2, dbref3) - self.assertNotEqual(dbref3, dbref2) + assert obj2.id == obj3.id + assert dbref2.id == dbref3.id + assert dbref2 != dbref3 + assert dbref3 != dbref2 + assert dbref2 != dbref3 + assert dbref3 != dbref2 - self.assertNotEqual(obj2, dbref3) - self.assertNotEqual(dbref3, obj2) - self.assertNotEqual(obj2, dbref3) - self.assertNotEqual(dbref3, obj2) + assert obj2 != dbref3 + assert dbref3 != obj2 + assert obj2 != dbref3 + assert dbref3 != obj2 - self.assertNotEqual(obj3, dbref2) - self.assertNotEqual(dbref2, obj3) - self.assertNotEqual(obj3, dbref2) - self.assertNotEqual(dbref2, obj3) + assert obj3 != dbref2 + assert dbref2 != obj3 + assert obj3 != dbref2 + assert dbref2 != obj3 def test_default_values(self): class Person(Document): @@ -3405,7 +3392,7 @@ class TestInstance(MongoDBTestCase): p2.name = "alon2" p2.save() p3 = Person.objects().only("created_on")[0] - self.assertEqual(orig_created_on, p3.created_on) + assert orig_created_on == p3.created_on class Person(Document): created_on = DateTimeField(default=lambda: datetime.utcnow()) @@ -3414,10 +3401,10 @@ class TestInstance(MongoDBTestCase): p4 = Person.objects()[0] p4.save() - self.assertEqual(p4.height, 189) + assert p4.height == 189 # However the default will not be fixed in DB - self.assertEqual(Person.objects(height=189).count(), 0) + assert Person.objects(height=189).count() == 0 # alter DB for the new default coll = Person._get_collection() @@ -3425,7 +3412,7 @@ class TestInstance(MongoDBTestCase): if "height" not in person: coll.update_one({"_id": person["_id"]}, {"$set": {"height": 189}}) - self.assertEqual(Person.objects(height=189).count(), 1) + assert Person.objects(height=189).count() == 1 def test_shard_key_mutability_after_from_json(self): """Ensure that a document ID can be modified after from_json. @@ -3445,11 +3432,11 @@ class TestInstance(MongoDBTestCase): meta = {"shard_key": ("id", "name")} p = Person.from_json('{"name": "name", "age": 27}', created=True) - self.assertEqual(p._created, True) + assert p._created == True p.name = "new name" p.id = "12345" - self.assertEqual(p.name, "new name") - self.assertEqual(p.id, "12345") + assert p.name == "new name" + assert p.id == "12345" def test_shard_key_mutability_after_from_son(self): """Ensure that a document ID can be modified after _from_son. @@ -3463,11 +3450,11 @@ class TestInstance(MongoDBTestCase): meta = {"shard_key": ("id", "name")} p = Person._from_son({"name": "name", "age": 27}, created=True) - self.assertEqual(p._created, True) + assert p._created == True p.name = "new name" p.id = "12345" - self.assertEqual(p.name, "new name") - self.assertEqual(p.id, "12345") + assert p.name == "new name" + assert p.id == "12345" def test_from_json_created_false_without_an_id(self): class Person(Document): @@ -3476,14 +3463,14 @@ class TestInstance(MongoDBTestCase): Person.objects.delete() p = Person.from_json('{"name": "name"}', created=False) - self.assertEqual(p._created, False) - self.assertEqual(p.id, None) + assert p._created == False + assert p.id == None # Make sure the document is subsequently persisted correctly. p.save() - self.assertTrue(p.id is not None) + assert p.id is not None saved_p = Person.objects.get(id=p.id) - self.assertEqual(saved_p.name, "name") + assert saved_p.name == "name" def test_from_json_created_false_with_an_id(self): """See https://github.com/mongoengine/mongoengine/issues/1854""" @@ -3496,13 +3483,13 @@ class TestInstance(MongoDBTestCase): p = Person.from_json( '{"_id": "5b85a8b04ec5dc2da388296e", "name": "name"}', created=False ) - self.assertEqual(p._created, False) - self.assertEqual(p._changed_fields, []) - self.assertEqual(p.name, "name") - self.assertEqual(p.id, ObjectId("5b85a8b04ec5dc2da388296e")) + assert p._created == False + assert p._changed_fields == [] + assert p.name == "name" + assert p.id == ObjectId("5b85a8b04ec5dc2da388296e") p.save() - with self.assertRaises(DoesNotExist): + with pytest.raises(DoesNotExist): # Since the object is considered as already persisted (thanks to # `created=False` and an existing ID), and we haven't changed any # fields (i.e. `_changed_fields` is empty), the document is @@ -3510,12 +3497,12 @@ class TestInstance(MongoDBTestCase): # nothing. Person.objects.get(id=p.id) - self.assertFalse(p._created) + assert not p._created p.name = "a new name" - self.assertEqual(p._changed_fields, ["name"]) + assert p._changed_fields == ["name"] p.save() saved_p = Person.objects.get(id=p.id) - self.assertEqual(saved_p.name, p.name) + assert saved_p.name == p.name def test_from_json_created_true_with_an_id(self): class Person(Document): @@ -3526,15 +3513,15 @@ class TestInstance(MongoDBTestCase): p = Person.from_json( '{"_id": "5b85a8b04ec5dc2da388296e", "name": "name"}', created=True ) - self.assertTrue(p._created) - self.assertEqual(p._changed_fields, []) - self.assertEqual(p.name, "name") - self.assertEqual(p.id, ObjectId("5b85a8b04ec5dc2da388296e")) + assert p._created + assert p._changed_fields == [] + assert p.name == "name" + assert p.id == ObjectId("5b85a8b04ec5dc2da388296e") p.save() saved_p = Person.objects.get(id=p.id) - self.assertEqual(saved_p, p) - self.assertEqual(saved_p.name, "name") + assert saved_p == p + assert saved_p.name == "name" def test_null_field(self): # 734 @@ -3553,13 +3540,13 @@ class TestInstance(MongoDBTestCase): u_from_db = User.objects.get(name="user") u_from_db.height = None u_from_db.save() - self.assertEqual(u_from_db.height, None) + assert u_from_db.height == None # 864 - self.assertEqual(u_from_db.str_fld, None) - self.assertEqual(u_from_db.int_fld, None) - self.assertEqual(u_from_db.flt_fld, None) - self.assertEqual(u_from_db.dt_fld, None) - self.assertEqual(u_from_db.cdt_fld, None) + assert u_from_db.str_fld == None + assert u_from_db.int_fld == None + assert u_from_db.flt_fld == None + assert u_from_db.dt_fld == None + assert u_from_db.cdt_fld == None # 735 User.objects.delete() @@ -3567,7 +3554,7 @@ class TestInstance(MongoDBTestCase): u.save() User.objects(name="user").update_one(set__height=None, upsert=True) u_from_db = User.objects.get(name="user") - self.assertEqual(u_from_db.height, None) + assert u_from_db.height == None def test_not_saved_eq(self): """Ensure we can compare documents not saved. @@ -3578,8 +3565,8 @@ class TestInstance(MongoDBTestCase): p = Person() p1 = Person() - self.assertNotEqual(p, p1) - self.assertEqual(p, p) + assert p != p1 + assert p == p def test_list_iter(self): # 914 @@ -3592,10 +3579,10 @@ class TestInstance(MongoDBTestCase): A.objects.delete() A(l=[B(v="1"), B(v="2"), B(v="3")]).save() a = A.objects.get() - self.assertEqual(a.l._instance, a) + assert a.l._instance == a for idx, b in enumerate(a.l): - self.assertEqual(b._instance, a) - self.assertEqual(idx, 2) + assert b._instance == a + assert idx == 2 def test_falsey_pk(self): """Ensure that we can create and update a document with Falsey PK.""" @@ -3625,7 +3612,7 @@ class TestInstance(MongoDBTestCase): blog.update(push__tags__0=["mongodb", "code"]) blog.reload() - self.assertEqual(blog.tags, ["mongodb", "code", "python"]) + assert blog.tags == ["mongodb", "code", "python"] def test_push_nested_list(self): """Ensure that push update works in nested list""" @@ -3637,7 +3624,7 @@ class TestInstance(MongoDBTestCase): blog = BlogPost(slug="test").save() blog.update(push__tags=["value1", 123]) blog.reload() - self.assertEqual(blog.tags, [["value1", 123]]) + assert blog.tags == [["value1", 123]] def test_accessing_objects_with_indexes_error(self): insert_result = self.db.company.insert_many( @@ -3653,7 +3640,7 @@ class TestInstance(MongoDBTestCase): company = ReferenceField(Company) # Ensure index creation exception aren't swallowed (#1688) - with self.assertRaises(DuplicateKeyError): + with pytest.raises(DuplicateKeyError): User.objects().select_related() @@ -3663,10 +3650,10 @@ class ObjectKeyTestCase(MongoDBTestCase): title = StringField() book = Book(title="Whatever") - self.assertEqual(book._object_key, {"pk": None}) + assert book._object_key == {"pk": None} book.pk = ObjectId() - self.assertEqual(book._object_key, {"pk": book.pk}) + assert book._object_key == {"pk": book.pk} def test_object_key_with_custom_primary_key(self): class Book(Document): @@ -3674,10 +3661,10 @@ class ObjectKeyTestCase(MongoDBTestCase): title = StringField() book = Book(title="Sapiens") - self.assertEqual(book._object_key, {"pk": None}) + assert book._object_key == {"pk": None} book = Book(pk="0062316117") - self.assertEqual(book._object_key, {"pk": "0062316117"}) + assert book._object_key == {"pk": "0062316117"} def test_object_key_in_a_sharded_collection(self): class Book(Document): @@ -3685,9 +3672,9 @@ class ObjectKeyTestCase(MongoDBTestCase): meta = {"shard_key": ("pk", "title")} book = Book() - self.assertEqual(book._object_key, {"pk": None, "title": None}) + assert book._object_key == {"pk": None, "title": None} book = Book(pk=ObjectId(), title="Sapiens") - self.assertEqual(book._object_key, {"pk": book.pk, "title": "Sapiens"}) + assert book._object_key == {"pk": book.pk, "title": "Sapiens"} def test_object_key_with_custom_db_field(self): class Book(Document): @@ -3695,7 +3682,7 @@ class ObjectKeyTestCase(MongoDBTestCase): meta = {"shard_key": ("pk", "author")} book = Book(pk=ObjectId(), author="Author") - self.assertEqual(book._object_key, {"pk": book.pk, "author": "Author"}) + assert book._object_key == {"pk": book.pk, "author": "Author"} def test_object_key_with_nested_shard_key(self): class Author(EmbeddedDocument): @@ -3706,7 +3693,7 @@ class ObjectKeyTestCase(MongoDBTestCase): meta = {"shard_key": ("pk", "author.name")} book = Book(pk=ObjectId(), author=Author(name="Author")) - self.assertEqual(book._object_key, {"pk": book.pk, "author__name": "Author"}) + assert book._object_key == {"pk": book.pk, "author__name": "Author"} if __name__ == "__main__": diff --git a/tests/document/test_json_serialisation.py b/tests/document/test_json_serialisation.py index 26a4a6c1..593d34f8 100644 --- a/tests/document/test_json_serialisation.py +++ b/tests/document/test_json_serialisation.py @@ -32,7 +32,7 @@ class TestJson(MongoDBTestCase): expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}""" - self.assertEqual(doc_json, expected_json) + assert doc_json == expected_json def test_json_simple(self): class Embedded(EmbeddedDocument): @@ -52,9 +52,9 @@ class TestJson(MongoDBTestCase): doc_json = doc.to_json(sort_keys=True, separators=(",", ":")) expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}""" - self.assertEqual(doc_json, expected_json) + assert doc_json == expected_json - self.assertEqual(doc, Doc.from_json(doc.to_json())) + assert doc == Doc.from_json(doc.to_json()) def test_json_complex(self): class EmbeddedDoc(EmbeddedDocument): @@ -99,7 +99,7 @@ class TestJson(MongoDBTestCase): return json.loads(self.to_json()) == json.loads(other.to_json()) doc = Doc() - self.assertEqual(doc, Doc.from_json(doc.to_json())) + assert doc == Doc.from_json(doc.to_json()) if __name__ == "__main__": diff --git a/tests/document/test_validation.py b/tests/document/test_validation.py index 7449dd33..80601994 100644 --- a/tests/document/test_validation.py +++ b/tests/document/test_validation.py @@ -4,6 +4,7 @@ from datetime import datetime from mongoengine import * from tests.utils import MongoDBTestCase +import pytest class TestValidatorError(MongoDBTestCase): @@ -11,12 +12,12 @@ class TestValidatorError(MongoDBTestCase): """Ensure a ValidationError handles error to_dict correctly. """ error = ValidationError("root") - self.assertEqual(error.to_dict(), {}) + assert error.to_dict() == {} # 1st level error schema error.errors = {"1st": ValidationError("bad 1st")} - self.assertIn("1st", error.to_dict()) - self.assertEqual(error.to_dict()["1st"], "bad 1st") + assert "1st" in error.to_dict() + assert error.to_dict()["1st"] == "bad 1st" # 2nd level error schema error.errors = { @@ -24,10 +25,10 @@ class TestValidatorError(MongoDBTestCase): "bad 1st", errors={"2nd": ValidationError("bad 2nd")} ) } - self.assertIn("1st", error.to_dict()) - self.assertIsInstance(error.to_dict()["1st"], dict) - self.assertIn("2nd", error.to_dict()["1st"]) - self.assertEqual(error.to_dict()["1st"]["2nd"], "bad 2nd") + assert "1st" in error.to_dict() + assert isinstance(error.to_dict()["1st"], dict) + assert "2nd" in error.to_dict()["1st"] + assert error.to_dict()["1st"]["2nd"] == "bad 2nd" # moar levels error.errors = { @@ -45,13 +46,13 @@ class TestValidatorError(MongoDBTestCase): }, ) } - self.assertIn("1st", error.to_dict()) - self.assertIn("2nd", error.to_dict()["1st"]) - self.assertIn("3rd", error.to_dict()["1st"]["2nd"]) - self.assertIn("4th", error.to_dict()["1st"]["2nd"]["3rd"]) - self.assertEqual(error.to_dict()["1st"]["2nd"]["3rd"]["4th"], "Inception") + assert "1st" in error.to_dict() + assert "2nd" in error.to_dict()["1st"] + assert "3rd" in error.to_dict()["1st"]["2nd"] + assert "4th" in error.to_dict()["1st"]["2nd"]["3rd"] + assert error.to_dict()["1st"]["2nd"]["3rd"]["4th"] == "Inception" - self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])") + assert error.message == "root(2nd.3rd.4th.Inception: ['1st'])" def test_model_validation(self): class User(Document): @@ -61,19 +62,19 @@ class TestValidatorError(MongoDBTestCase): try: User().validate() except ValidationError as e: - self.assertIn("User:None", e.message) - self.assertEqual( - e.to_dict(), - {"username": "Field is required", "name": "Field is required"}, - ) + assert "User:None" in e.message + assert e.to_dict() == { + "username": "Field is required", + "name": "Field is required", + } user = User(username="RossC0", name="Ross").save() user.name = None try: user.save() except ValidationError as e: - self.assertIn("User:RossC0", e.message) - self.assertEqual(e.to_dict(), {"name": "Field is required"}) + assert "User:RossC0" in e.message + assert e.to_dict() == {"name": "Field is required"} def test_fields_rewrite(self): class BasePerson(Document): @@ -85,7 +86,8 @@ class TestValidatorError(MongoDBTestCase): name = StringField(required=True) p = Person(age=15) - self.assertRaises(ValidationError, p.validate) + with pytest.raises(ValidationError): + p.validate() def test_embedded_document_validation(self): """Ensure that embedded documents may be validated. @@ -96,17 +98,19 @@ class TestValidatorError(MongoDBTestCase): content = StringField(required=True) comment = Comment() - self.assertRaises(ValidationError, comment.validate) + with pytest.raises(ValidationError): + comment.validate() comment.content = "test" comment.validate() comment.date = 4 - self.assertRaises(ValidationError, comment.validate) + with pytest.raises(ValidationError): + comment.validate() comment.date = datetime.now() comment.validate() - self.assertEqual(comment._instance, None) + assert comment._instance == None def test_embedded_db_field_validate(self): class SubDoc(EmbeddedDocument): @@ -119,10 +123,8 @@ class TestValidatorError(MongoDBTestCase): try: Doc(id="bad").validate() except ValidationError as e: - self.assertIn("SubDoc:None", e.message) - self.assertEqual( - e.to_dict(), {"e": {"val": "OK could not be converted to int"}} - ) + assert "SubDoc:None" in e.message + assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}} Doc.drop_collection() @@ -130,18 +132,16 @@ class TestValidatorError(MongoDBTestCase): doc = Doc.objects.first() keys = doc._data.keys() - self.assertEqual(2, len(keys)) - self.assertIn("e", keys) - self.assertIn("id", keys) + assert 2 == len(keys) + assert "e" in keys + assert "id" in keys doc.e.val = "OK" try: doc.save() except ValidationError as e: - self.assertIn("Doc:test", e.message) - self.assertEqual( - e.to_dict(), {"e": {"val": "OK could not be converted to int"}} - ) + assert "Doc:test" in e.message + assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}} def test_embedded_weakref(self): class SubDoc(EmbeddedDocument): @@ -157,14 +157,16 @@ class TestValidatorError(MongoDBTestCase): s = SubDoc() - self.assertRaises(ValidationError, s.validate) + with pytest.raises(ValidationError): + s.validate() d1.e = s d2.e = s del d1 - self.assertRaises(ValidationError, d2.validate) + with pytest.raises(ValidationError): + d2.validate() def test_parent_reference_in_child_document(self): """ diff --git a/tests/fields/test_binary_field.py b/tests/fields/test_binary_field.py index 719df922..86ee2654 100644 --- a/tests/fields/test_binary_field.py +++ b/tests/fields/test_binary_field.py @@ -7,6 +7,7 @@ import six from mongoengine import * from tests.utils import MongoDBTestCase +import pytest BIN_VALUE = six.b( "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5" @@ -31,8 +32,8 @@ class TestBinaryField(MongoDBTestCase): attachment.save() attachment_1 = Attachment.objects().first() - self.assertEqual(MIME_TYPE, attachment_1.content_type) - self.assertEqual(BLOB, six.binary_type(attachment_1.blob)) + assert MIME_TYPE == attachment_1.content_type + assert BLOB == six.binary_type(attachment_1.blob) def test_validation_succeeds(self): """Ensure that valid values can be assigned to binary fields. @@ -45,13 +46,15 @@ class TestBinaryField(MongoDBTestCase): blob = BinaryField(max_bytes=4) attachment_required = AttachmentRequired() - self.assertRaises(ValidationError, attachment_required.validate) + with pytest.raises(ValidationError): + attachment_required.validate() attachment_required.blob = Binary(six.b("\xe6\x00\xc4\xff\x07")) attachment_required.validate() _5_BYTES = six.b("\xe6\x00\xc4\xff\x07") _4_BYTES = six.b("\xe6\x00\xc4\xff") - self.assertRaises(ValidationError, AttachmentSizeLimit(blob=_5_BYTES).validate) + with pytest.raises(ValidationError): + AttachmentSizeLimit(blob=_5_BYTES).validate() AttachmentSizeLimit(blob=_4_BYTES).validate() def test_validation_fails(self): @@ -61,7 +64,8 @@ class TestBinaryField(MongoDBTestCase): blob = BinaryField() for invalid_data in (2, u"Im_a_unicode", ["some_str"]): - self.assertRaises(ValidationError, Attachment(blob=invalid_data).validate) + with pytest.raises(ValidationError): + Attachment(blob=invalid_data).validate() def test__primary(self): class Attachment(Document): @@ -70,10 +74,10 @@ class TestBinaryField(MongoDBTestCase): Attachment.drop_collection() binary_id = uuid.uuid4().bytes att = Attachment(id=binary_id).save() - self.assertEqual(1, Attachment.objects.count()) - self.assertEqual(1, Attachment.objects.filter(id=att.id).count()) + assert 1 == Attachment.objects.count() + assert 1 == Attachment.objects.filter(id=att.id).count() att.delete() - self.assertEqual(0, Attachment.objects.count()) + assert 0 == Attachment.objects.count() def test_primary_filter_by_binary_pk_as_str(self): class Attachment(Document): @@ -82,9 +86,9 @@ class TestBinaryField(MongoDBTestCase): Attachment.drop_collection() binary_id = uuid.uuid4().bytes att = Attachment(id=binary_id).save() - self.assertEqual(1, Attachment.objects.filter(id=binary_id).count()) + assert 1 == Attachment.objects.filter(id=binary_id).count() att.delete() - self.assertEqual(0, Attachment.objects.count()) + assert 0 == Attachment.objects.count() def test_match_querying_with_bytes(self): class MyDocument(Document): @@ -94,7 +98,7 @@ class TestBinaryField(MongoDBTestCase): doc = MyDocument(bin_field=BIN_VALUE).save() matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first() - self.assertEqual(matched_doc.id, doc.id) + assert matched_doc.id == doc.id def test_match_querying_with_binary(self): class MyDocument(Document): @@ -105,7 +109,7 @@ class TestBinaryField(MongoDBTestCase): doc = MyDocument(bin_field=BIN_VALUE).save() matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first() - self.assertEqual(matched_doc.id, doc.id) + assert matched_doc.id == doc.id def test_modify_operation__set(self): """Ensures no regression of bug #1127""" @@ -119,11 +123,11 @@ class TestBinaryField(MongoDBTestCase): doc = MyDocument.objects(some_field="test").modify( upsert=True, new=True, set__bin_field=BIN_VALUE ) - self.assertEqual(doc.some_field, "test") + assert doc.some_field == "test" if six.PY3: - self.assertEqual(doc.bin_field, BIN_VALUE) + assert doc.bin_field == BIN_VALUE else: - self.assertEqual(doc.bin_field, Binary(BIN_VALUE)) + assert doc.bin_field == Binary(BIN_VALUE) def test_update_one(self): """Ensures no regression of bug #1127""" @@ -139,9 +143,9 @@ class TestBinaryField(MongoDBTestCase): n_updated = MyDocument.objects(bin_field=bin_data).update_one( bin_field=BIN_VALUE ) - self.assertEqual(n_updated, 1) + assert n_updated == 1 fetched = MyDocument.objects.with_id(doc.id) if six.PY3: - self.assertEqual(fetched.bin_field, BIN_VALUE) + assert fetched.bin_field == BIN_VALUE else: - self.assertEqual(fetched.bin_field, Binary(BIN_VALUE)) + assert fetched.bin_field == Binary(BIN_VALUE) diff --git a/tests/fields/test_boolean_field.py b/tests/fields/test_boolean_field.py index 22ebb6f7..b38b5ea4 100644 --- a/tests/fields/test_boolean_field.py +++ b/tests/fields/test_boolean_field.py @@ -2,6 +2,7 @@ from mongoengine import * from tests.utils import MongoDBTestCase, get_as_pymongo +import pytest class TestBooleanField(MongoDBTestCase): @@ -11,7 +12,7 @@ class TestBooleanField(MongoDBTestCase): person = Person(admin=True) person.save() - self.assertEqual(get_as_pymongo(person), {"_id": person.id, "admin": True}) + assert get_as_pymongo(person) == {"_id": person.id, "admin": True} def test_validation(self): """Ensure that invalid values cannot be assigned to boolean @@ -26,11 +27,14 @@ class TestBooleanField(MongoDBTestCase): person.validate() person.admin = 2 - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.admin = "Yes" - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.admin = "False" - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() def test_weirdness_constructor(self): """When attribute is set in contructor, it gets cast into a bool @@ -42,7 +46,7 @@ class TestBooleanField(MongoDBTestCase): admin = BooleanField() new_person = Person(admin="False") - self.assertTrue(new_person.admin) + assert new_person.admin new_person = Person(admin="0") - self.assertTrue(new_person.admin) + assert new_person.admin diff --git a/tests/fields/test_cached_reference_field.py b/tests/fields/test_cached_reference_field.py index 4e467587..e404aae0 100644 --- a/tests/fields/test_cached_reference_field.py +++ b/tests/fields/test_cached_reference_field.py @@ -4,6 +4,7 @@ from decimal import Decimal from mongoengine import * from tests.utils import MongoDBTestCase +import pytest class TestCachedReferenceField(MongoDBTestCase): @@ -46,29 +47,29 @@ class TestCachedReferenceField(MongoDBTestCase): a = Animal(name="Leopard", tag="heavy") a.save() - self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal]) + assert Animal._cached_reference_fields == [Ocorrence.animal] o = Ocorrence(person="teste", animal=a) o.save() p = Ocorrence(person="Wilson") p.save() - self.assertEqual(Ocorrence.objects(animal=None).count(), 1) + assert Ocorrence.objects(animal=None).count() == 1 - self.assertEqual(a.to_mongo(fields=["tag"]), {"tag": "heavy", "_id": a.pk}) + assert a.to_mongo(fields=["tag"]) == {"tag": "heavy", "_id": a.pk} - self.assertEqual(o.to_mongo()["animal"]["tag"], "heavy") + assert o.to_mongo()["animal"]["tag"] == "heavy" # counts Ocorrence(person="teste 2").save() Ocorrence(person="teste 3").save() count = Ocorrence.objects(animal__tag="heavy").count() - self.assertEqual(count, 1) + assert count == 1 ocorrence = Ocorrence.objects(animal__tag="heavy").first() - self.assertEqual(ocorrence.person, "teste") - self.assertIsInstance(ocorrence.animal, Animal) + assert ocorrence.person == "teste" + assert isinstance(ocorrence.animal, Animal) def test_with_decimal(self): class PersonAuto(Document): @@ -88,10 +89,11 @@ class TestCachedReferenceField(MongoDBTestCase): s = SocialTest(group="dev", person=p) s.save() - self.assertEqual( - SocialTest.objects._collection.find_one({"person.salary": 7000.00}), - {"_id": s.pk, "group": s.group, "person": {"_id": p.pk, "salary": 7000.00}}, - ) + assert SocialTest.objects._collection.find_one({"person.salary": 7000.00}) == { + "_id": s.pk, + "group": s.group, + "person": {"_id": p.pk, "salary": 7000.00}, + } def test_cached_reference_field_reference(self): class Group(Document): @@ -131,18 +133,15 @@ class TestCachedReferenceField(MongoDBTestCase): s2 = SocialData(obs="testing 321", person=p3, tags=["tag3", "tag4"]) s2.save() - self.assertEqual( - SocialData.objects._collection.find_one({"tags": "tag2"}), - { - "_id": s1.pk, - "obs": "testing 123", - "tags": ["tag1", "tag2"], - "person": {"_id": p1.pk, "group": g1.pk}, - }, - ) + assert SocialData.objects._collection.find_one({"tags": "tag2"}) == { + "_id": s1.pk, + "obs": "testing 123", + "tags": ["tag1", "tag2"], + "person": {"_id": p1.pk, "group": g1.pk}, + } - self.assertEqual(SocialData.objects(person__group=g2).count(), 1) - self.assertEqual(SocialData.objects(person__group=g2).first(), s2) + assert SocialData.objects(person__group=g2).count() == 1 + assert SocialData.objects(person__group=g2).first() == s2 def test_cached_reference_field_push_with_fields(self): class Product(Document): @@ -157,26 +156,20 @@ class TestCachedReferenceField(MongoDBTestCase): product1 = Product(name="abc").save() product2 = Product(name="def").save() basket = Basket(products=[product1]).save() - self.assertEqual( - Basket.objects._collection.find_one(), - { - "_id": basket.pk, - "products": [{"_id": product1.pk, "name": product1.name}], - }, - ) + assert Basket.objects._collection.find_one() == { + "_id": basket.pk, + "products": [{"_id": product1.pk, "name": product1.name}], + } # push to list basket.update(push__products=product2) basket.reload() - self.assertEqual( - Basket.objects._collection.find_one(), - { - "_id": basket.pk, - "products": [ - {"_id": product1.pk, "name": product1.name}, - {"_id": product2.pk, "name": product2.name}, - ], - }, - ) + assert Basket.objects._collection.find_one() == { + "_id": basket.pk, + "products": [ + {"_id": product1.pk, "name": product1.name}, + {"_id": product2.pk, "name": product2.name}, + ], + } def test_cached_reference_field_update_all(self): class Person(Document): @@ -194,37 +187,31 @@ class TestCachedReferenceField(MongoDBTestCase): a2.save() a2 = Person.objects.with_id(a2.id) - self.assertEqual(a2.father.tp, a1.tp) + assert a2.father.tp == a1.tp - self.assertEqual( - dict(a2.to_mongo()), - { - "_id": a2.pk, - "name": u"Wilson Junior", - "tp": u"pf", - "father": {"_id": a1.pk, "tp": u"pj"}, - }, - ) + assert dict(a2.to_mongo()) == { + "_id": a2.pk, + "name": u"Wilson Junior", + "tp": u"pf", + "father": {"_id": a1.pk, "tp": u"pj"}, + } - self.assertEqual(Person.objects(father=a1)._query, {"father._id": a1.pk}) - self.assertEqual(Person.objects(father=a1).count(), 1) + assert Person.objects(father=a1)._query == {"father._id": a1.pk} + assert Person.objects(father=a1).count() == 1 Person.objects.update(set__tp="pf") Person.father.sync_all() a2.reload() - self.assertEqual( - dict(a2.to_mongo()), - { - "_id": a2.pk, - "name": u"Wilson Junior", - "tp": u"pf", - "father": {"_id": a1.pk, "tp": u"pf"}, - }, - ) + assert dict(a2.to_mongo()) == { + "_id": a2.pk, + "name": u"Wilson Junior", + "tp": u"pf", + "father": {"_id": a1.pk, "tp": u"pf"}, + } def test_cached_reference_fields_on_embedded_documents(self): - with self.assertRaises(InvalidDocumentError): + with pytest.raises(InvalidDocumentError): class Test(Document): name = StringField() @@ -255,15 +242,12 @@ class TestCachedReferenceField(MongoDBTestCase): a1.save() a2.reload() - self.assertEqual( - dict(a2.to_mongo()), - { - "_id": a2.pk, - "name": "Wilson Junior", - "tp": "pf", - "father": {"_id": a1.pk, "tp": "pf"}, - }, - ) + assert dict(a2.to_mongo()) == { + "_id": a2.pk, + "name": "Wilson Junior", + "tp": "pf", + "father": {"_id": a1.pk, "tp": "pf"}, + } def test_cached_reference_auto_sync_disabled(self): class Persone(Document): @@ -284,15 +268,12 @@ class TestCachedReferenceField(MongoDBTestCase): a1.tp = "pf" a1.save() - self.assertEqual( - Persone.objects._collection.find_one({"_id": a2.pk}), - { - "_id": a2.pk, - "name": "Wilson Junior", - "tp": "pf", - "father": {"_id": a1.pk, "tp": "pj"}, - }, - ) + assert Persone.objects._collection.find_one({"_id": a2.pk}) == { + "_id": a2.pk, + "name": "Wilson Junior", + "tp": "pf", + "father": {"_id": a1.pk, "tp": "pj"}, + } def test_cached_reference_embedded_fields(self): class Owner(EmbeddedDocument): @@ -320,28 +301,29 @@ class TestCachedReferenceField(MongoDBTestCase): o = Ocorrence(person="teste", animal=a) o.save() - self.assertEqual( - dict(a.to_mongo(fields=["tag", "owner.tp"])), - {"_id": a.pk, "tag": "heavy", "owner": {"t": "u"}}, - ) - self.assertEqual(o.to_mongo()["animal"]["tag"], "heavy") - self.assertEqual(o.to_mongo()["animal"]["owner"]["t"], "u") + assert dict(a.to_mongo(fields=["tag", "owner.tp"])) == { + "_id": a.pk, + "tag": "heavy", + "owner": {"t": "u"}, + } + assert o.to_mongo()["animal"]["tag"] == "heavy" + assert o.to_mongo()["animal"]["owner"]["t"] == "u" # Check to_mongo with fields - self.assertNotIn("animal", o.to_mongo(fields=["person"])) + assert "animal" not in o.to_mongo(fields=["person"]) # counts Ocorrence(person="teste 2").save() Ocorrence(person="teste 3").save() count = Ocorrence.objects(animal__tag="heavy", animal__owner__tp="u").count() - self.assertEqual(count, 1) + assert count == 1 ocorrence = Ocorrence.objects( animal__tag="heavy", animal__owner__tp="u" ).first() - self.assertEqual(ocorrence.person, "teste") - self.assertIsInstance(ocorrence.animal, Animal) + assert ocorrence.person == "teste" + assert isinstance(ocorrence.animal, Animal) def test_cached_reference_embedded_list_fields(self): class Owner(EmbeddedDocument): @@ -370,13 +352,14 @@ class TestCachedReferenceField(MongoDBTestCase): o = Ocorrence(person="teste 2", animal=a) o.save() - self.assertEqual( - dict(a.to_mongo(fields=["tag", "owner.tags"])), - {"_id": a.pk, "tag": "heavy", "owner": {"tags": ["cool", "funny"]}}, - ) + assert dict(a.to_mongo(fields=["tag", "owner.tags"])) == { + "_id": a.pk, + "tag": "heavy", + "owner": {"tags": ["cool", "funny"]}, + } - self.assertEqual(o.to_mongo()["animal"]["tag"], "heavy") - self.assertEqual(o.to_mongo()["animal"]["owner"]["tags"], ["cool", "funny"]) + assert o.to_mongo()["animal"]["tag"] == "heavy" + assert o.to_mongo()["animal"]["owner"]["tags"] == ["cool", "funny"] # counts Ocorrence(person="teste 2").save() @@ -385,10 +368,10 @@ class TestCachedReferenceField(MongoDBTestCase): query = Ocorrence.objects( animal__tag="heavy", animal__owner__tags="cool" )._query - self.assertEqual(query, {"animal.owner.tags": "cool", "animal.tag": "heavy"}) + assert query == {"animal.owner.tags": "cool", "animal.tag": "heavy"} ocorrence = Ocorrence.objects( animal__tag="heavy", animal__owner__tags="cool" ).first() - self.assertEqual(ocorrence.person, "teste 2") - self.assertIsInstance(ocorrence.animal, Animal) + assert ocorrence.person == "teste 2" + assert isinstance(ocorrence.animal, Animal) diff --git a/tests/fields/test_complex_datetime_field.py b/tests/fields/test_complex_datetime_field.py index 611c0ff8..f0a6b96e 100644 --- a/tests/fields/test_complex_datetime_field.py +++ b/tests/fields/test_complex_datetime_field.py @@ -28,7 +28,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertEqual(log.date, d1) + assert log.date == d1 # Post UTC - microseconds are rounded (down) nearest millisecond - with # default datetimefields @@ -36,7 +36,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertEqual(log.date, d1) + assert log.date == d1 # Pre UTC dates microseconds below 1000 are dropped - with default # datetimefields @@ -44,7 +44,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertEqual(log.date, d1) + assert log.date == d1 # Pre UTC microseconds above 1000 is wonky - with default datetimefields # log.date has an invalid microsecond value so I can't construct @@ -54,9 +54,9 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertEqual(log.date, d1) + assert log.date == d1 log1 = LogEntry.objects.get(date=d1) - self.assertEqual(log, log1) + assert log == log1 # Test string padding microsecond = map(int, [math.pow(10, x) for x in range(6)]) @@ -64,7 +64,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"] - self.assertTrue( + assert ( re.match("^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored) is not None ) @@ -73,7 +73,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()[ "date_with_dots" ] - self.assertTrue( + assert ( re.match("^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None ) @@ -93,40 +93,40 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): log.save() log1 = LogEntry.objects.get(date=d1) - self.assertEqual(log, log1) + assert log == log1 # create extra 59 log entries for a total of 60 for i in range(1951, 2010): d = datetime.datetime(i, 1, 1, 0, 0, 1, 999) LogEntry(date=d).save() - self.assertEqual(LogEntry.objects.count(), 60) + assert LogEntry.objects.count() == 60 # Test ordering logs = LogEntry.objects.order_by("date") i = 0 while i < 59: - self.assertTrue(logs[i].date <= logs[i + 1].date) + assert logs[i].date <= logs[i + 1].date i += 1 logs = LogEntry.objects.order_by("-date") i = 0 while i < 59: - self.assertTrue(logs[i].date >= logs[i + 1].date) + assert logs[i].date >= logs[i + 1].date i += 1 # Test searching logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 30) + assert logs.count() == 30 logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 30) + assert logs.count() == 30 logs = LogEntry.objects.filter( date__lte=datetime.datetime(2011, 1, 1), date__gte=datetime.datetime(2000, 1, 1), ) - self.assertEqual(logs.count(), 10) + assert logs.count() == 10 LogEntry.drop_collection() @@ -137,17 +137,17 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): logs = list(LogEntry.objects.order_by("date")) for next_idx, log in enumerate(logs[:-1], start=1): next_log = logs[next_idx] - self.assertTrue(log.date < next_log.date) + assert log.date < next_log.date logs = list(LogEntry.objects.order_by("-date")) for next_idx, log in enumerate(logs[:-1], start=1): next_log = logs[next_idx] - self.assertTrue(log.date > next_log.date) + assert log.date > next_log.date logs = LogEntry.objects.filter( date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000) ) - self.assertEqual(logs.count(), 4) + assert logs.count() == 4 def test_no_default_value(self): class Log(Document): @@ -156,11 +156,11 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): Log.drop_collection() log = Log() - self.assertIsNone(log.timestamp) + assert log.timestamp is None log.save() fetched_log = Log.objects.with_id(log.id) - self.assertIsNone(fetched_log.timestamp) + assert fetched_log.timestamp is None def test_default_static_value(self): NOW = datetime.datetime.utcnow() @@ -171,11 +171,11 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): Log.drop_collection() log = Log() - self.assertEqual(log.timestamp, NOW) + assert log.timestamp == NOW log.save() fetched_log = Log.objects.with_id(log.id) - self.assertEqual(fetched_log.timestamp, NOW) + assert fetched_log.timestamp == NOW def test_default_callable(self): NOW = datetime.datetime.utcnow() @@ -186,8 +186,8 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): Log.drop_collection() log = Log() - self.assertGreaterEqual(log.timestamp, NOW) + assert log.timestamp >= NOW log.save() fetched_log = Log.objects.with_id(log.id) - self.assertGreaterEqual(fetched_log.timestamp, NOW) + assert fetched_log.timestamp >= NOW diff --git a/tests/fields/test_date_field.py b/tests/fields/test_date_field.py index da572134..46fa4f0f 100644 --- a/tests/fields/test_date_field.py +++ b/tests/fields/test_date_field.py @@ -10,6 +10,7 @@ except ImportError: from mongoengine import * from tests.utils import MongoDBTestCase +import pytest class TestDateField(MongoDBTestCase): @@ -23,7 +24,8 @@ class TestDateField(MongoDBTestCase): dt = DateField() md = MyDoc(dt="") - self.assertRaises(ValidationError, md.save) + with pytest.raises(ValidationError): + md.save() def test_date_from_whitespace_string(self): """ @@ -35,7 +37,8 @@ class TestDateField(MongoDBTestCase): dt = DateField() md = MyDoc(dt=" ") - self.assertRaises(ValidationError, md.save) + with pytest.raises(ValidationError): + md.save() def test_default_values_today(self): """Ensure that default field values are used when creating @@ -47,9 +50,9 @@ class TestDateField(MongoDBTestCase): person = Person() person.validate() - self.assertEqual(person.day, person.day) - self.assertEqual(person.day, datetime.date.today()) - self.assertEqual(person._data["day"], person.day) + assert person.day == person.day + assert person.day == datetime.date.today() + assert person._data["day"] == person.day def test_date(self): """Tests showing pymongo date fields @@ -67,7 +70,7 @@ class TestDateField(MongoDBTestCase): log.date = datetime.date.today() log.save() log.reload() - self.assertEqual(log.date, datetime.date.today()) + assert log.date == datetime.date.today() d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) d2 = datetime.datetime(1970, 1, 1, 0, 0, 1) @@ -75,16 +78,16 @@ class TestDateField(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertEqual(log.date, d1.date()) - self.assertEqual(log.date, d2.date()) + assert log.date == d1.date() + assert log.date == d2.date() d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000) log.date = d1 log.save() log.reload() - self.assertEqual(log.date, d1.date()) - self.assertEqual(log.date, d2.date()) + assert log.date == d1.date() + assert log.date == d2.date() if not six.PY3: # Pre UTC dates microseconds below 1000 are dropped @@ -94,8 +97,8 @@ class TestDateField(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertEqual(log.date, d1.date()) - self.assertEqual(log.date, d2.date()) + assert log.date == d1.date() + assert log.date == d2.date() def test_regular_usage(self): """Tests for regular datetime fields""" @@ -113,35 +116,35 @@ class TestDateField(MongoDBTestCase): for query in (d1, d1.isoformat(" ")): log1 = LogEntry.objects.get(date=query) - self.assertEqual(log, log1) + assert log == log1 if dateutil: log1 = LogEntry.objects.get(date=d1.isoformat("T")) - self.assertEqual(log, log1) + assert log == log1 # create additional 19 log entries for a total of 20 for i in range(1971, 1990): d = datetime.datetime(i, 1, 1, 0, 0, 1) LogEntry(date=d).save() - self.assertEqual(LogEntry.objects.count(), 20) + assert LogEntry.objects.count() == 20 # Test ordering logs = LogEntry.objects.order_by("date") i = 0 while i < 19: - self.assertTrue(logs[i].date <= logs[i + 1].date) + assert logs[i].date <= logs[i + 1].date i += 1 logs = LogEntry.objects.order_by("-date") i = 0 while i < 19: - self.assertTrue(logs[i].date >= logs[i + 1].date) + assert logs[i].date >= logs[i + 1].date i += 1 # Test searching logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 10) + assert logs.count() == 10 def test_validation(self): """Ensure that invalid values cannot be assigned to datetime @@ -166,6 +169,8 @@ class TestDateField(MongoDBTestCase): log.validate() log.time = -1 - self.assertRaises(ValidationError, log.validate) + with pytest.raises(ValidationError): + log.validate() log.time = "ABC" - self.assertRaises(ValidationError, log.validate) + with pytest.raises(ValidationError): + log.validate() diff --git a/tests/fields/test_datetime_field.py b/tests/fields/test_datetime_field.py index c911390a..8db491c6 100644 --- a/tests/fields/test_datetime_field.py +++ b/tests/fields/test_datetime_field.py @@ -11,6 +11,7 @@ from mongoengine import * from mongoengine import connection from tests.utils import MongoDBTestCase +import pytest class TestDateTimeField(MongoDBTestCase): @@ -24,7 +25,8 @@ class TestDateTimeField(MongoDBTestCase): dt = DateTimeField() md = MyDoc(dt="") - self.assertRaises(ValidationError, md.save) + with pytest.raises(ValidationError): + md.save() def test_datetime_from_whitespace_string(self): """ @@ -36,7 +38,8 @@ class TestDateTimeField(MongoDBTestCase): dt = DateTimeField() md = MyDoc(dt=" ") - self.assertRaises(ValidationError, md.save) + with pytest.raises(ValidationError): + md.save() def test_default_value_utcnow(self): """Ensure that default field values are used when creating @@ -50,11 +53,9 @@ class TestDateTimeField(MongoDBTestCase): person = Person() person.validate() person_created_t0 = person.created - self.assertLess(person.created - utcnow, dt.timedelta(seconds=1)) - self.assertEqual( - person_created_t0, person.created - ) # make sure it does not change - self.assertEqual(person._data["created"], person.created) + assert person.created - utcnow < dt.timedelta(seconds=1) + assert person_created_t0 == person.created # make sure it does not change + assert person._data["created"] == person.created def test_handling_microseconds(self): """Tests showing pymongo datetime fields handling of microseconds. @@ -74,7 +75,7 @@ class TestDateTimeField(MongoDBTestCase): log.date = dt.date.today() log.save() log.reload() - self.assertEqual(log.date.date(), dt.date.today()) + assert log.date.date() == dt.date.today() # Post UTC - microseconds are rounded (down) nearest millisecond and # dropped @@ -84,8 +85,8 @@ class TestDateTimeField(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertNotEqual(log.date, d1) - self.assertEqual(log.date, d2) + assert log.date != d1 + assert log.date == d2 # Post UTC - microseconds are rounded (down) nearest millisecond d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 9999) @@ -93,8 +94,8 @@ class TestDateTimeField(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertNotEqual(log.date, d1) - self.assertEqual(log.date, d2) + assert log.date != d1 + assert log.date == d2 if not six.PY3: # Pre UTC dates microseconds below 1000 are dropped @@ -104,8 +105,8 @@ class TestDateTimeField(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertNotEqual(log.date, d1) - self.assertEqual(log.date, d2) + assert log.date != d1 + assert log.date == d2 def test_regular_usage(self): """Tests for regular datetime fields""" @@ -123,43 +124,43 @@ class TestDateTimeField(MongoDBTestCase): for query in (d1, d1.isoformat(" ")): log1 = LogEntry.objects.get(date=query) - self.assertEqual(log, log1) + assert log == log1 if dateutil: log1 = LogEntry.objects.get(date=d1.isoformat("T")) - self.assertEqual(log, log1) + assert log == log1 # create additional 19 log entries for a total of 20 for i in range(1971, 1990): d = dt.datetime(i, 1, 1, 0, 0, 1) LogEntry(date=d).save() - self.assertEqual(LogEntry.objects.count(), 20) + assert LogEntry.objects.count() == 20 # Test ordering logs = LogEntry.objects.order_by("date") i = 0 while i < 19: - self.assertTrue(logs[i].date <= logs[i + 1].date) + assert logs[i].date <= logs[i + 1].date i += 1 logs = LogEntry.objects.order_by("-date") i = 0 while i < 19: - self.assertTrue(logs[i].date >= logs[i + 1].date) + assert logs[i].date >= logs[i + 1].date i += 1 # Test searching logs = LogEntry.objects.filter(date__gte=dt.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 10) + assert logs.count() == 10 logs = LogEntry.objects.filter(date__lte=dt.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 10) + assert logs.count() == 10 logs = LogEntry.objects.filter( date__lte=dt.datetime(1980, 1, 1), date__gte=dt.datetime(1975, 1, 1) ) - self.assertEqual(logs.count(), 5) + assert logs.count() == 5 def test_datetime_validation(self): """Ensure that invalid values cannot be assigned to datetime @@ -187,15 +188,20 @@ class TestDateTimeField(MongoDBTestCase): log.validate() log.time = -1 - self.assertRaises(ValidationError, log.validate) + with pytest.raises(ValidationError): + log.validate() log.time = "ABC" - self.assertRaises(ValidationError, log.validate) + with pytest.raises(ValidationError): + log.validate() log.time = "2019-05-16 21:GARBAGE:12" - self.assertRaises(ValidationError, log.validate) + with pytest.raises(ValidationError): + log.validate() log.time = "2019-05-16 21:42:57.GARBAGE" - self.assertRaises(ValidationError, log.validate) + with pytest.raises(ValidationError): + log.validate() log.time = "2019-05-16 21:42:57.123.456" - self.assertRaises(ValidationError, log.validate) + with pytest.raises(ValidationError): + log.validate() def test_parse_datetime_as_str(self): class DTDoc(Document): @@ -206,15 +212,16 @@ class TestDateTimeField(MongoDBTestCase): # make sure that passing a parsable datetime works dtd = DTDoc() dtd.date = date_str - self.assertIsInstance(dtd.date, six.string_types) + assert isinstance(dtd.date, six.string_types) dtd.save() dtd.reload() - self.assertIsInstance(dtd.date, dt.datetime) - self.assertEqual(str(dtd.date), date_str) + assert isinstance(dtd.date, dt.datetime) + assert str(dtd.date) == date_str dtd.date = "January 1st, 9999999999" - self.assertRaises(ValidationError, dtd.validate) + with pytest.raises(ValidationError): + dtd.validate() class TestDateTimeTzAware(MongoDBTestCase): @@ -235,4 +242,4 @@ class TestDateTimeTzAware(MongoDBTestCase): log = LogEntry.objects.first() log.time = dt.datetime(2013, 1, 1, 0, 0, 0) - self.assertEqual(["time"], log._changed_fields) + assert ["time"] == log._changed_fields diff --git a/tests/fields/test_decimal_field.py b/tests/fields/test_decimal_field.py index 30b7e5ea..b5b95363 100644 --- a/tests/fields/test_decimal_field.py +++ b/tests/fields/test_decimal_field.py @@ -4,6 +4,7 @@ from decimal import Decimal from mongoengine import * from tests.utils import MongoDBTestCase +import pytest class TestDecimalField(MongoDBTestCase): @@ -18,21 +19,26 @@ class TestDecimalField(MongoDBTestCase): Person(height=Decimal("1.89")).save() person = Person.objects.first() - self.assertEqual(person.height, Decimal("1.89")) + assert person.height == Decimal("1.89") person.height = "2.0" person.save() person.height = 0.01 - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.height = Decimal("0.01") - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.height = Decimal("4.0") - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.height = "something invalid" - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person_2 = Person(height="something invalid") - self.assertRaises(ValidationError, person_2.validate) + with pytest.raises(ValidationError): + person_2.validate() def test_comparison(self): class Person(Document): @@ -45,11 +51,11 @@ class TestDecimalField(MongoDBTestCase): Person(money=8).save() Person(money=10).save() - self.assertEqual(2, Person.objects(money__gt=Decimal("7")).count()) - self.assertEqual(2, Person.objects(money__gt=7).count()) - self.assertEqual(2, Person.objects(money__gt="7").count()) + assert 2 == Person.objects(money__gt=Decimal("7")).count() + assert 2 == Person.objects(money__gt=7).count() + assert 2 == Person.objects(money__gt="7").count() - self.assertEqual(3, Person.objects(money__gte="7").count()) + assert 3 == Person.objects(money__gte="7").count() def test_storage(self): class Person(Document): @@ -87,7 +93,7 @@ class TestDecimalField(MongoDBTestCase): ] expected.extend(expected) actual = list(Person.objects.exclude("id").as_pymongo()) - self.assertEqual(expected, actual) + assert expected == actual # How it comes out locally expected = [ @@ -101,4 +107,4 @@ class TestDecimalField(MongoDBTestCase): expected.extend(expected) for field_name in ["float_value", "string_value"]: actual = list(Person.objects().scalar(field_name)) - self.assertEqual(expected, actual) + assert expected == actual diff --git a/tests/fields/test_dict_field.py b/tests/fields/test_dict_field.py index 07bab85b..56df682f 100644 --- a/tests/fields/test_dict_field.py +++ b/tests/fields/test_dict_field.py @@ -3,6 +3,7 @@ from mongoengine import * from mongoengine.base import BaseDict from tests.utils import MongoDBTestCase, get_as_pymongo +import pytest class TestDictField(MongoDBTestCase): @@ -14,7 +15,7 @@ class TestDictField(MongoDBTestCase): info = {"testkey": "testvalue"} post = BlogPost(info=info).save() - self.assertEqual(get_as_pymongo(post), {"_id": post.id, "info": info}) + assert get_as_pymongo(post) == {"_id": post.id, "info": info} def test_general_things(self): """Ensure that dict types work as expected.""" @@ -26,25 +27,32 @@ class TestDictField(MongoDBTestCase): post = BlogPost() post.info = "my post" - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.info = ["test", "test"] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.info = {"$title": "test"} - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.info = {"nested": {"$title": "test"}} - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.info = {"the.title": "test"} - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.info = {"nested": {"the.title": "test"}} - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.info = {1: "test"} - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.info = {"title": "test"} post.save() @@ -61,33 +69,27 @@ class TestDictField(MongoDBTestCase): post.info = {"details": {"test": 3}} post.save() - self.assertEqual(BlogPost.objects.count(), 4) - self.assertEqual(BlogPost.objects.filter(info__title__exact="test").count(), 1) - self.assertEqual( - BlogPost.objects.filter(info__details__test__exact="test").count(), 1 - ) + assert BlogPost.objects.count() == 4 + assert BlogPost.objects.filter(info__title__exact="test").count() == 1 + assert BlogPost.objects.filter(info__details__test__exact="test").count() == 1 post = BlogPost.objects.filter(info__title__exact="dollar_sign").first() - self.assertIn("te$t", post["info"]["details"]) + assert "te$t" in post["info"]["details"] # Confirm handles non strings or non existing keys - self.assertEqual( - BlogPost.objects.filter(info__details__test__exact=5).count(), 0 - ) - self.assertEqual( - BlogPost.objects.filter(info__made_up__test__exact="test").count(), 0 - ) + assert BlogPost.objects.filter(info__details__test__exact=5).count() == 0 + assert BlogPost.objects.filter(info__made_up__test__exact="test").count() == 0 post = BlogPost.objects.create(info={"title": "original"}) post.info.update({"title": "updated"}) post.save() post.reload() - self.assertEqual("updated", post.info["title"]) + assert "updated" == post.info["title"] post.info.setdefault("authors", []) post.save() post.reload() - self.assertEqual([], post.info["authors"]) + assert [] == post.info["authors"] def test_dictfield_dump_document(self): """Ensure a DictField can handle another document's dump.""" @@ -114,10 +116,8 @@ class TestDictField(MongoDBTestCase): ).save() doc = Doc(field=to_embed.to_mongo().to_dict()) doc.save() - self.assertIsInstance(doc.field, dict) - self.assertEqual( - doc.field, {"_id": 2, "recursive": {"_id": 1, "recursive": {}}} - ) + assert isinstance(doc.field, dict) + assert doc.field == {"_id": 2, "recursive": {"_id": 1, "recursive": {}}} # Same thing with a Document with a _cls field to_embed_recursive = ToEmbedChild(id=1).save() to_embed_child = ToEmbedChild( @@ -125,7 +125,7 @@ class TestDictField(MongoDBTestCase): ).save() doc = Doc(field=to_embed_child.to_mongo().to_dict()) doc.save() - self.assertIsInstance(doc.field, dict) + assert isinstance(doc.field, dict) expected = { "_id": 2, "_cls": "ToEmbedParent.ToEmbedChild", @@ -135,7 +135,7 @@ class TestDictField(MongoDBTestCase): "recursive": {}, }, } - self.assertEqual(doc.field, expected) + assert doc.field == expected def test_dictfield_strict(self): """Ensure that dict field handles validation if provided a strict field type.""" @@ -150,7 +150,7 @@ class TestDictField(MongoDBTestCase): e.save() # try creating an invalid mapping - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): e.mapping["somestring"] = "abc" e.save() @@ -184,22 +184,21 @@ class TestDictField(MongoDBTestCase): e.save() e2 = Simple.objects.get(id=e.id) - self.assertIsInstance(e2.mapping["somestring"], StringSetting) - self.assertIsInstance(e2.mapping["someint"], IntegerSetting) + assert isinstance(e2.mapping["somestring"], StringSetting) + assert isinstance(e2.mapping["someint"], IntegerSetting) # Test querying - self.assertEqual(Simple.objects.filter(mapping__someint__value=42).count(), 1) - self.assertEqual( - Simple.objects.filter(mapping__nested_dict__number=1).count(), 1 + assert Simple.objects.filter(mapping__someint__value=42).count() == 1 + assert Simple.objects.filter(mapping__nested_dict__number=1).count() == 1 + assert ( + Simple.objects.filter(mapping__nested_dict__complex__value=42).count() == 1 ) - self.assertEqual( - Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1 + assert ( + Simple.objects.filter(mapping__nested_dict__list__0__value=42).count() == 1 ) - self.assertEqual( - Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1 - ) - self.assertEqual( - Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count(), 1 + assert ( + Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count() + == 1 ) # Confirm can update @@ -207,11 +206,13 @@ class TestDictField(MongoDBTestCase): Simple.objects().update( set__mapping__nested_dict__list__1=StringSetting(value="Boo") ) - self.assertEqual( - Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count(), 0 + assert ( + Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count() + == 0 ) - self.assertEqual( - Simple.objects.filter(mapping__nested_dict__list__1__value="Boo").count(), 1 + assert ( + Simple.objects.filter(mapping__nested_dict__list__1__value="Boo").count() + == 1 ) def test_push_dict(self): @@ -221,12 +222,12 @@ class TestDictField(MongoDBTestCase): doc = MyModel(events=[{"a": 1}]).save() raw_doc = get_as_pymongo(doc) expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}]} - self.assertEqual(raw_doc, expected_raw_doc) + assert raw_doc == expected_raw_doc MyModel.objects(id=doc.id).update(push__events={}) raw_doc = get_as_pymongo(doc) expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}, {}]} - self.assertEqual(raw_doc, expected_raw_doc) + assert raw_doc == expected_raw_doc def test_ensure_unique_default_instances(self): """Ensure that every field has it's own unique default instance.""" @@ -239,8 +240,8 @@ class TestDictField(MongoDBTestCase): d1.data["foo"] = "bar" d1.data2["foo"] = "bar" d2 = D() - self.assertEqual(d2.data, {}) - self.assertEqual(d2.data2, {}) + assert d2.data == {} + assert d2.data2 == {} def test_dict_field_invalid_dict_value(self): class DictFieldTest(Document): @@ -250,11 +251,13 @@ class TestDictField(MongoDBTestCase): test = DictFieldTest(dictionary=None) test.dictionary # Just access to test getter - self.assertRaises(ValidationError, test.validate) + with pytest.raises(ValidationError): + test.validate() test = DictFieldTest(dictionary=False) test.dictionary # Just access to test getter - self.assertRaises(ValidationError, test.validate) + with pytest.raises(ValidationError): + test.validate() def test_dict_field_raises_validation_error_if_wrongly_assign_embedded_doc(self): class DictFieldTest(Document): @@ -267,12 +270,10 @@ class TestDictField(MongoDBTestCase): embed = Embedded(name="garbage") doc = DictFieldTest(dictionary=embed) - with self.assertRaises(ValidationError) as ctx_err: + with pytest.raises(ValidationError) as ctx_err: doc.validate() - self.assertIn("'dictionary'", str(ctx_err.exception)) - self.assertIn( - "Only dictionaries may be used in a DictField", str(ctx_err.exception) - ) + assert "'dictionary'" in str(ctx_err.exception) + assert "Only dictionaries may be used in a DictField" in str(ctx_err.exception) def test_atomic_update_dict_field(self): """Ensure that the entire DictField can be atomically updated.""" @@ -287,11 +288,11 @@ class TestDictField(MongoDBTestCase): e.save() e.update(set__mapping={"ints": [3, 4]}) e.reload() - self.assertEqual(BaseDict, type(e.mapping)) - self.assertEqual({"ints": [3, 4]}, e.mapping) + assert BaseDict == type(e.mapping) + assert {"ints": [3, 4]} == e.mapping # try creating an invalid mapping - with self.assertRaises(ValueError): + with pytest.raises(ValueError): e.update(set__mapping={"somestrings": ["foo", "bar"]}) def test_dictfield_with_referencefield_complex_nesting_cases(self): @@ -329,13 +330,13 @@ class TestDictField(MongoDBTestCase): e.save() s = Simple.objects.first() - self.assertIsInstance(s.mapping0["someint"], Doc) - self.assertIsInstance(s.mapping1["someint"], Doc) - self.assertIsInstance(s.mapping2["someint"][0], Doc) - self.assertIsInstance(s.mapping3["someint"][0], Doc) - self.assertIsInstance(s.mapping4["someint"]["d"], Doc) - self.assertIsInstance(s.mapping5["someint"]["d"], Doc) - self.assertIsInstance(s.mapping6["someint"][0]["d"], Doc) - self.assertIsInstance(s.mapping7["someint"][0]["d"], Doc) - self.assertIsInstance(s.mapping8["someint"][0]["d"][0], Doc) - self.assertIsInstance(s.mapping9["someint"][0]["d"][0], Doc) + assert isinstance(s.mapping0["someint"], Doc) + assert isinstance(s.mapping1["someint"], Doc) + assert isinstance(s.mapping2["someint"][0], Doc) + assert isinstance(s.mapping3["someint"][0], Doc) + assert isinstance(s.mapping4["someint"]["d"], Doc) + assert isinstance(s.mapping5["someint"]["d"], Doc) + assert isinstance(s.mapping6["someint"][0]["d"], Doc) + assert isinstance(s.mapping7["someint"][0]["d"], Doc) + assert isinstance(s.mapping8["someint"][0]["d"][0], Doc) + assert isinstance(s.mapping9["someint"][0]["d"][0], Doc) diff --git a/tests/fields/test_email_field.py b/tests/fields/test_email_field.py index 06ec5151..b8d3d169 100644 --- a/tests/fields/test_email_field.py +++ b/tests/fields/test_email_field.py @@ -5,6 +5,7 @@ from unittest import SkipTest from mongoengine import * from tests.utils import MongoDBTestCase +import pytest class TestEmailField(MongoDBTestCase): @@ -27,7 +28,8 @@ class TestEmailField(MongoDBTestCase): user.validate() user = User(email="ross@example.com.") - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() # unicode domain user = User(email=u"user@пример.рф") @@ -35,11 +37,13 @@ class TestEmailField(MongoDBTestCase): # invalid unicode domain user = User(email=u"user@пример") - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() # invalid data type user = User(email=123) - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() def test_email_field_unicode_user(self): # Don't run this test on pypy3, which doesn't support unicode regex: @@ -52,7 +56,8 @@ class TestEmailField(MongoDBTestCase): # unicode user shouldn't validate by default... user = User(email=u"Dörte@Sörensen.example.com") - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() # ...but it should be fine with allow_utf8_user set to True class User(Document): @@ -67,7 +72,8 @@ class TestEmailField(MongoDBTestCase): # localhost domain shouldn't validate by default... user = User(email="me@localhost") - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() # ...but it should be fine if it's whitelisted class User(Document): @@ -82,9 +88,9 @@ class TestEmailField(MongoDBTestCase): invalid_idn = ".google.com" user = User(email="me@%s" % invalid_idn) - with self.assertRaises(ValidationError) as ctx_err: + with pytest.raises(ValidationError) as ctx_err: user.validate() - self.assertIn("domain failed IDN encoding", str(ctx_err.exception)) + assert "domain failed IDN encoding" in str(ctx_err.exception) def test_email_field_ip_domain(self): class User(Document): @@ -96,13 +102,16 @@ class TestEmailField(MongoDBTestCase): # IP address as a domain shouldn't validate by default... user = User(email=valid_ipv4) - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() user = User(email=valid_ipv6) - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() user = User(email=invalid_ip) - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() # ...but it should be fine with allow_ip_domain set to True class User(Document): @@ -116,7 +125,8 @@ class TestEmailField(MongoDBTestCase): # invalid IP should still fail validation user = User(email=invalid_ip) - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() def test_email_field_honors_regex(self): class User(Document): @@ -124,8 +134,9 @@ class TestEmailField(MongoDBTestCase): # Fails regex validation user = User(email="me@foo.com") - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() # Passes regex validation user = User(email="me@example.com") - self.assertIsNone(user.validate()) + assert user.validate() is None diff --git a/tests/fields/test_embedded_document_field.py b/tests/fields/test_embedded_document_field.py index 8db8c180..4fcf6bf1 100644 --- a/tests/fields/test_embedded_document_field.py +++ b/tests/fields/test_embedded_document_field.py @@ -13,6 +13,7 @@ from mongoengine import ( ) from tests.utils import MongoDBTestCase +import pytest class TestEmbeddedDocumentField(MongoDBTestCase): @@ -21,13 +22,13 @@ class TestEmbeddedDocumentField(MongoDBTestCase): name = StringField() field = EmbeddedDocumentField(MyDoc) - self.assertEqual(field.document_type_obj, MyDoc) + assert field.document_type_obj == MyDoc field2 = EmbeddedDocumentField("MyDoc") - self.assertEqual(field2.document_type_obj, "MyDoc") + assert field2.document_type_obj == "MyDoc" def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self): - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): EmbeddedDocumentField(dict) def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self): @@ -35,11 +36,11 @@ class TestEmbeddedDocumentField(MongoDBTestCase): name = StringField() emb = EmbeddedDocumentField("MyDoc") - with self.assertRaises(ValidationError) as ctx: + with pytest.raises(ValidationError) as ctx: emb.document_type - self.assertIn( - "Invalid embedded document class provided to an EmbeddedDocumentField", - str(ctx.exception), + assert ( + "Invalid embedded document class provided to an EmbeddedDocumentField" + in str(ctx.exception) ) def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self): @@ -47,12 +48,12 @@ class TestEmbeddedDocumentField(MongoDBTestCase): class MyDoc(Document): name = StringField() - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): class MyFailingDoc(Document): emb = EmbeddedDocumentField(MyDoc) - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): class MyFailingdoc2(Document): emb = EmbeddedDocumentField("MyDoc") @@ -71,24 +72,24 @@ class TestEmbeddedDocumentField(MongoDBTestCase): p = Person(settings=AdminSettings(foo1="bar1", foo2="bar2"), name="John").save() # Test non exiting attribute - with self.assertRaises(InvalidQueryError) as ctx_err: + with pytest.raises(InvalidQueryError) as ctx_err: Person.objects(settings__notexist="bar").first() - self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') + assert unicode(ctx_err.exception) == u'Cannot resolve field "notexist"' - with self.assertRaises(LookUpError): + with pytest.raises(LookUpError): Person.objects.only("settings.notexist") # Test existing attribute - self.assertEqual(Person.objects(settings__foo1="bar1").first().id, p.id) + assert Person.objects(settings__foo1="bar1").first().id == p.id only_p = Person.objects.only("settings.foo1").first() - self.assertEqual(only_p.settings.foo1, p.settings.foo1) - self.assertIsNone(only_p.settings.foo2) - self.assertIsNone(only_p.name) + assert only_p.settings.foo1 == p.settings.foo1 + assert only_p.settings.foo2 is None + assert only_p.name is None exclude_p = Person.objects.exclude("settings.foo1").first() - self.assertIsNone(exclude_p.settings.foo1) - self.assertEqual(exclude_p.settings.foo2, p.settings.foo2) - self.assertEqual(exclude_p.name, p.name) + assert exclude_p.settings.foo1 is None + assert exclude_p.settings.foo2 == p.settings.foo2 + assert exclude_p.name == p.name def test_query_embedded_document_attribute_with_inheritance(self): class BaseSettings(EmbeddedDocument): @@ -107,17 +108,17 @@ class TestEmbeddedDocumentField(MongoDBTestCase): p.save() # Test non exiting attribute - with self.assertRaises(InvalidQueryError) as ctx_err: - self.assertEqual(Person.objects(settings__notexist="bar").first().id, p.id) - self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') + with pytest.raises(InvalidQueryError) as ctx_err: + assert Person.objects(settings__notexist="bar").first().id == p.id + assert unicode(ctx_err.exception) == u'Cannot resolve field "notexist"' # Test existing attribute - self.assertEqual(Person.objects(settings__base_foo="basefoo").first().id, p.id) - self.assertEqual(Person.objects(settings__sub_foo="subfoo").first().id, p.id) + assert Person.objects(settings__base_foo="basefoo").first().id == p.id + assert Person.objects(settings__sub_foo="subfoo").first().id == p.id only_p = Person.objects.only("settings.base_foo", "settings._cls").first() - self.assertEqual(only_p.settings.base_foo, "basefoo") - self.assertIsNone(only_p.settings.sub_foo) + assert only_p.settings.base_foo == "basefoo" + assert only_p.settings.sub_foo is None def test_query_list_embedded_document_with_inheritance(self): class Post(EmbeddedDocument): @@ -137,14 +138,14 @@ class TestEmbeddedDocumentField(MongoDBTestCase): record_text = Record(posts=[TextPost(content="a", title="foo")]).save() records = list(Record.objects(posts__author=record_movie.posts[0].author)) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].id, record_movie.id) + assert len(records) == 1 + assert records[0].id == record_movie.id records = list(Record.objects(posts__content=record_text.posts[0].content)) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].id, record_text.id) + assert len(records) == 1 + assert records[0].id == record_text.id - self.assertEqual(Record.objects(posts__title="foo").count(), 2) + assert Record.objects(posts__title="foo").count() == 2 class TestGenericEmbeddedDocumentField(MongoDBTestCase): @@ -167,13 +168,13 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): person.save() person = Person.objects.first() - self.assertIsInstance(person.like, Car) + assert isinstance(person.like, Car) person.like = Dish(food="arroz", number=15) person.save() person = Person.objects.first() - self.assertIsInstance(person.like, Dish) + assert isinstance(person.like, Dish) def test_generic_embedded_document_choices(self): """Ensure you can limit GenericEmbeddedDocument choices.""" @@ -193,13 +194,14 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): person = Person(name="Test User") person.like = Car(name="Fiat") - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.like = Dish(food="arroz", number=15) person.save() person = Person.objects.first() - self.assertIsInstance(person.like, Dish) + assert isinstance(person.like, Dish) def test_generic_list_embedded_document_choices(self): """Ensure you can limit GenericEmbeddedDocument choices inside @@ -221,13 +223,14 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): person = Person(name="Test User") person.likes = [Car(name="Fiat")] - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.likes = [Dish(food="arroz", number=15)] person.save() person = Person.objects.first() - self.assertIsInstance(person.likes[0], Dish) + assert isinstance(person.likes[0], Dish) def test_choices_validation_documents(self): """ @@ -263,7 +266,8 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): # Single Entry Failure post = BlogPost(comments=[ModeratorComments(author="mod1", message="message1")]) - self.assertRaises(ValidationError, post.save) + with pytest.raises(ValidationError): + post.save() # Mixed Entry Failure post = BlogPost( @@ -272,7 +276,8 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): UserComments(author="user2", message="message2"), ] ) - self.assertRaises(ValidationError, post.save) + with pytest.raises(ValidationError): + post.save() def test_choices_validation_documents_inheritance(self): """ @@ -311,16 +316,16 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): p2 = Person(settings=NonAdminSettings(foo2="bar2")).save() # Test non exiting attribute - with self.assertRaises(InvalidQueryError) as ctx_err: + with pytest.raises(InvalidQueryError) as ctx_err: Person.objects(settings__notexist="bar").first() - self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') + assert unicode(ctx_err.exception) == u'Cannot resolve field "notexist"' - with self.assertRaises(LookUpError): + with pytest.raises(LookUpError): Person.objects.only("settings.notexist") # Test existing attribute - self.assertEqual(Person.objects(settings__foo1="bar1").first().id, p1.id) - self.assertEqual(Person.objects(settings__foo2="bar2").first().id, p2.id) + assert Person.objects(settings__foo1="bar1").first().id == p1.id + assert Person.objects(settings__foo2="bar2").first().id == p2.id def test_query_generic_embedded_document_attribute_with_inheritance(self): class BaseSettings(EmbeddedDocument): @@ -339,10 +344,10 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): p.save() # Test non exiting attribute - with self.assertRaises(InvalidQueryError) as ctx_err: - self.assertEqual(Person.objects(settings__notexist="bar").first().id, p.id) - self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') + with pytest.raises(InvalidQueryError) as ctx_err: + assert Person.objects(settings__notexist="bar").first().id == p.id + assert unicode(ctx_err.exception) == u'Cannot resolve field "notexist"' # Test existing attribute - self.assertEqual(Person.objects(settings__base_foo="basefoo").first().id, p.id) - self.assertEqual(Person.objects(settings__sub_foo="subfoo").first().id, p.id) + assert Person.objects(settings__base_foo="basefoo").first().id == p.id + assert Person.objects(settings__sub_foo="subfoo").first().id == p.id diff --git a/tests/fields/test_fields.py b/tests/fields/test_fields.py index bd2149e6..b27d95d2 100644 --- a/tests/fields/test_fields.py +++ b/tests/fields/test_fields.py @@ -39,6 +39,7 @@ from mongoengine.base import BaseField, EmbeddedDocumentList, _document_registry from mongoengine.errors import DeprecatedError from tests.utils import MongoDBTestCase +import pytest class TestField(MongoDBTestCase): @@ -58,25 +59,25 @@ class TestField(MongoDBTestCase): # Confirm saving now would store values data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ["age", "created", "day", "name", "userid"]) + assert data_to_be_saved == ["age", "created", "day", "name", "userid"] - self.assertTrue(person.validate() is None) + assert person.validate() is None - self.assertEqual(person.name, person.name) - self.assertEqual(person.age, person.age) - self.assertEqual(person.userid, person.userid) - self.assertEqual(person.created, person.created) - self.assertEqual(person.day, person.day) + assert person.name == person.name + assert person.age == person.age + assert person.userid == person.userid + assert person.created == person.created + assert person.day == person.day - self.assertEqual(person._data["name"], person.name) - self.assertEqual(person._data["age"], person.age) - self.assertEqual(person._data["userid"], person.userid) - self.assertEqual(person._data["created"], person.created) - self.assertEqual(person._data["day"], person.day) + assert person._data["name"] == person.name + assert person._data["age"] == person.age + assert person._data["userid"] == person.userid + assert person._data["created"] == person.created + assert person._data["day"] == person.day # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ["age", "created", "day", "name", "userid"]) + assert data_to_be_saved == ["age", "created", "day", "name", "userid"] def test_custom_field_validation_raise_deprecated_error_when_validation_return_something( self, @@ -95,13 +96,13 @@ class TestField(MongoDBTestCase): "it should raise a ValidationError if validation fails" ) - with self.assertRaises(DeprecatedError) as ctx_err: + with pytest.raises(DeprecatedError) as ctx_err: Person(name="").validate() - self.assertEqual(str(ctx_err.exception), error) + assert str(ctx_err.exception) == error - with self.assertRaises(DeprecatedError) as ctx_err: + with pytest.raises(DeprecatedError) as ctx_err: Person(name="").save() - self.assertEqual(str(ctx_err.exception), error) + assert str(ctx_err.exception) == error def test_custom_field_validation_raise_validation_error(self): def _not_empty(z): @@ -113,18 +114,16 @@ class TestField(MongoDBTestCase): Person.drop_collection() - with self.assertRaises(ValidationError) as ctx_err: + with pytest.raises(ValidationError) as ctx_err: Person(name="").validate() - self.assertEqual( - "ValidationError (Person:None) (cantbeempty: ['name'])", - str(ctx_err.exception), + assert "ValidationError (Person:None) (cantbeempty: ['name'])" == str( + ctx_err.exception ) - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): Person(name="").save() - self.assertEqual( - "ValidationError (Person:None) (cantbeempty: ['name'])", - str(ctx_err.exception), + assert "ValidationError (Person:None) (cantbeempty: ['name'])" == str( + ctx_err.exception ) Person(name="garbage").validate() @@ -146,23 +145,23 @@ class TestField(MongoDBTestCase): # Confirm saving now would store values data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ["age", "created", "userid"]) + assert data_to_be_saved == ["age", "created", "userid"] - self.assertTrue(person.validate() is None) + assert person.validate() is None - self.assertEqual(person.name, person.name) - self.assertEqual(person.age, person.age) - self.assertEqual(person.userid, person.userid) - self.assertEqual(person.created, person.created) + assert person.name == person.name + assert person.age == person.age + assert person.userid == person.userid + assert person.created == person.created - self.assertEqual(person._data["name"], person.name) - self.assertEqual(person._data["age"], person.age) - self.assertEqual(person._data["userid"], person.userid) - self.assertEqual(person._data["created"], person.created) + assert person._data["name"] == person.name + assert person._data["age"] == person.age + assert person._data["userid"] == person.userid + assert person._data["created"] == person.created # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ["age", "created", "userid"]) + assert data_to_be_saved == ["age", "created", "userid"] def test_default_values_when_setting_to_None(self): """Ensure that default field values are used when creating @@ -183,23 +182,23 @@ class TestField(MongoDBTestCase): # Confirm saving now would store values data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ["age", "created", "userid"]) + assert data_to_be_saved == ["age", "created", "userid"] - self.assertTrue(person.validate() is None) + assert person.validate() is None - self.assertEqual(person.name, None) - self.assertEqual(person.age, 30) - self.assertEqual(person.userid, "test") - self.assertIsInstance(person.created, datetime.datetime) + assert person.name == None + assert person.age == 30 + assert person.userid == "test" + assert isinstance(person.created, datetime.datetime) - self.assertEqual(person._data["name"], person.name) - self.assertEqual(person._data["age"], person.age) - self.assertEqual(person._data["userid"], person.userid) - self.assertEqual(person._data["created"], person.created) + assert person._data["name"] == person.name + assert person._data["age"] == person.age + assert person._data["userid"] == person.userid + assert person._data["created"] == person.created # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ["age", "created", "userid"]) + assert data_to_be_saved == ["age", "created", "userid"] def test_default_value_is_not_used_when_changing_value_to_empty_list_for_strict_doc( self, @@ -213,7 +212,7 @@ class TestField(MongoDBTestCase): doc.x = [] doc.save() reloaded = Doc.objects.get(id=doc.id) - self.assertEqual(reloaded.x, []) + assert reloaded.x == [] def test_default_value_is_not_used_when_changing_value_to_empty_list_for_dyn_doc( self, @@ -228,7 +227,7 @@ class TestField(MongoDBTestCase): doc.y = 2 # Was triggering the bug doc.save() reloaded = Doc.objects.get(id=doc.id) - self.assertEqual(reloaded.x, []) + assert reloaded.x == [] def test_default_values_when_deleting_value(self): """Ensure that default field values are used after non-default @@ -253,24 +252,24 @@ class TestField(MongoDBTestCase): del person.created data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ["age", "created", "userid"]) + assert data_to_be_saved == ["age", "created", "userid"] - self.assertTrue(person.validate() is None) + assert person.validate() is None - self.assertEqual(person.name, None) - self.assertEqual(person.age, 30) - self.assertEqual(person.userid, "test") - self.assertIsInstance(person.created, datetime.datetime) - self.assertNotEqual(person.created, datetime.datetime(2014, 6, 12)) + assert person.name == None + assert person.age == 30 + assert person.userid == "test" + assert isinstance(person.created, datetime.datetime) + assert person.created != datetime.datetime(2014, 6, 12) - self.assertEqual(person._data["name"], person.name) - self.assertEqual(person._data["age"], person.age) - self.assertEqual(person._data["userid"], person.userid) - self.assertEqual(person._data["created"], person.created) + assert person._data["name"] == person.name + assert person._data["age"] == person.age + assert person._data["userid"] == person.userid + assert person._data["created"] == person.created # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ["age", "created", "userid"]) + assert data_to_be_saved == ["age", "created", "userid"] def test_required_values(self): """Ensure that required field constraints are enforced.""" @@ -281,9 +280,11 @@ class TestField(MongoDBTestCase): userid = StringField() person = Person(name="Test User") - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person = Person(age=30) - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() def test_not_required_handles_none_in_update(self): """Ensure that every fields should accept None if required is @@ -311,15 +312,15 @@ class TestField(MongoDBTestCase): set__flt_fld=None, set__comp_dt_fld=None, ) - self.assertEqual(res, 1) + assert res == 1 # Retrive data from db and verify it. ret = HandleNoneFields.objects.all()[0] - self.assertIsNone(ret.str_fld) - self.assertIsNone(ret.int_fld) - self.assertIsNone(ret.flt_fld) + assert ret.str_fld is None + assert ret.int_fld is None + assert ret.flt_fld is None - self.assertIsNone(ret.comp_dt_fld) + assert ret.comp_dt_fld is None def test_not_required_handles_none_from_database(self): """Ensure that every field can handle null values from the @@ -349,14 +350,15 @@ class TestField(MongoDBTestCase): # Retrive data from db and verify it. ret = HandleNoneFields.objects.first() - self.assertIsNone(ret.str_fld) - self.assertIsNone(ret.int_fld) - self.assertIsNone(ret.flt_fld) - self.assertIsNone(ret.comp_dt_fld) + assert ret.str_fld is None + assert ret.int_fld is None + assert ret.flt_fld is None + assert ret.comp_dt_fld is None # Retrieved object shouldn't pass validation when a re-save is # attempted. - self.assertRaises(ValidationError, ret.validate) + with pytest.raises(ValidationError): + ret.validate() def test_default_id_validation_as_objectid(self): """Ensure that invalid values cannot be assigned to an @@ -367,13 +369,15 @@ class TestField(MongoDBTestCase): name = StringField() person = Person(name="Test User") - self.assertEqual(person.id, None) + assert person.id == None person.id = 47 - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.id = "abc" - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.id = str(ObjectId()) person.validate() @@ -386,19 +390,22 @@ class TestField(MongoDBTestCase): userid = StringField(r"[0-9a-z_]+$") person = Person(name=34) - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() # Test regex validation on userid person = Person(userid="test.User") - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.userid = "test_user" - self.assertEqual(person.userid, "test_user") + assert person.userid == "test_user" person.validate() # Test max length validation on name person = Person(name="Name that is more than twenty characters") - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.name = "Shorter name" person.validate() @@ -407,19 +414,19 @@ class TestField(MongoDBTestCase): """Ensure that db_field doesn't accept invalid values.""" # dot in the name - with self.assertRaises(ValueError): + with pytest.raises(ValueError): class User(Document): name = StringField(db_field="user.name") # name starting with $ - with self.assertRaises(ValueError): + with pytest.raises(ValueError): class UserX1(Document): name = StringField(db_field="$name") # name containing a null character - with self.assertRaises(ValueError): + with pytest.raises(ValueError): class UserX2(Document): name = StringField(db_field="name\0") @@ -455,9 +462,11 @@ class TestField(MongoDBTestCase): post.validate() post.tags = "fun" - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.tags = [1, 2] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.tags = ["fun", "leisure"] post.validate() @@ -465,30 +474,36 @@ class TestField(MongoDBTestCase): post.validate() post.access_list = "a,b" - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.access_list = ["c", "d"] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.access_list = ["a", "b"] post.validate() - self.assertEqual(post.get_access_list_display(), u"Administration, Manager") + assert post.get_access_list_display() == u"Administration, Manager" post.comments = ["a"] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.comments = "yay" - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() comments = [Comment(content="Good for you"), Comment(content="Yay.")] post.comments = comments post.validate() post.authors = [Comment()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.authors = [User()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() user = User() user.save() @@ -496,34 +511,42 @@ class TestField(MongoDBTestCase): post.validate() post.authors_as_lazy = [Comment()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.authors_as_lazy = [User()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.authors_as_lazy = [user] post.validate() post.generic = [1, 2] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.generic = [User(), Comment()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.generic = [Comment()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.generic = [user] post.validate() post.generic_as_lazy = [1, 2] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.generic_as_lazy = [User(), Comment()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.generic_as_lazy = [Comment()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.generic_as_lazy = [user] post.validate() @@ -549,7 +572,7 @@ class TestField(MongoDBTestCase): post.tags = ["leisure", "fun"] post.save() post.reload() - self.assertEqual(post.tags, ["fun", "leisure"]) + assert post.tags == ["fun", "leisure"] comment1 = Comment(content="Good for you", order=1) comment2 = Comment(content="Yay.", order=0) @@ -557,15 +580,15 @@ class TestField(MongoDBTestCase): post.comments = comments post.save() post.reload() - self.assertEqual(post.comments[0].content, comment2.content) - self.assertEqual(post.comments[1].content, comment1.content) + assert post.comments[0].content == comment2.content + assert post.comments[1].content == comment1.content post.comments[0].order = 2 post.save() post.reload() - self.assertEqual(post.comments[0].content, comment1.content) - self.assertEqual(post.comments[1].content, comment2.content) + assert post.comments[0].content == comment1.content + assert post.comments[1].content == comment2.content def test_reverse_list_sorting(self): """Ensure that a reverse sorted list field properly sorts values""" @@ -590,9 +613,9 @@ class TestField(MongoDBTestCase): catlist.save() catlist.reload() - self.assertEqual(catlist.categories[0].name, cat2.name) - self.assertEqual(catlist.categories[1].name, cat3.name) - self.assertEqual(catlist.categories[2].name, cat1.name) + assert catlist.categories[0].name == cat2.name + assert catlist.categories[1].name == cat3.name + assert catlist.categories[2].name == cat1.name def test_list_field(self): """Ensure that list types work as expected.""" @@ -604,10 +627,12 @@ class TestField(MongoDBTestCase): post = BlogPost() post.info = "my post" - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.info = {"title": "test"} - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.info = ["test"] post.save() @@ -620,15 +645,13 @@ class TestField(MongoDBTestCase): post.info = [{"test": 3}] post.save() - self.assertEqual(BlogPost.objects.count(), 3) - self.assertEqual(BlogPost.objects.filter(info__exact="test").count(), 1) - self.assertEqual(BlogPost.objects.filter(info__0__test="test").count(), 1) + assert BlogPost.objects.count() == 3 + assert BlogPost.objects.filter(info__exact="test").count() == 1 + assert BlogPost.objects.filter(info__0__test="test").count() == 1 # Confirm handles non strings or non existing keys - self.assertEqual(BlogPost.objects.filter(info__0__test__exact="5").count(), 0) - self.assertEqual( - BlogPost.objects.filter(info__100__test__exact="test").count(), 0 - ) + assert BlogPost.objects.filter(info__0__test__exact="5").count() == 0 + assert BlogPost.objects.filter(info__100__test__exact="test").count() == 0 # test queries by list post = BlogPost() @@ -637,12 +660,12 @@ class TestField(MongoDBTestCase): post = BlogPost.objects(info=["1", "2"]).get() post.info += ["3", "4"] post.save() - self.assertEqual(BlogPost.objects(info=["1", "2", "3", "4"]).count(), 1) + assert BlogPost.objects(info=["1", "2", "3", "4"]).count() == 1 post = BlogPost.objects(info=["1", "2", "3", "4"]).get() post.info *= 2 post.save() - self.assertEqual( - BlogPost.objects(info=["1", "2", "3", "4", "1", "2", "3", "4"]).count(), 1 + assert ( + BlogPost.objects(info=["1", "2", "3", "4", "1", "2", "3", "4"]).count() == 1 ) def test_list_field_manipulative_operators(self): @@ -670,165 +693,149 @@ class TestField(MongoDBTestCase): reset_post() temp = ["a", "b"] post.info = post.info + temp - self.assertEqual(post.info, ["0", "1", "2", "3", "4", "5", "a", "b"]) + assert post.info == ["0", "1", "2", "3", "4", "5", "a", "b"] post.save() post.reload() - self.assertEqual(post.info, ["0", "1", "2", "3", "4", "5", "a", "b"]) + assert post.info == ["0", "1", "2", "3", "4", "5", "a", "b"] # '__delitem__(index)' # aka 'del list[index]' # aka 'operator.delitem(list, index)' reset_post() del post.info[2] # del from middle ('2') - self.assertEqual(post.info, ["0", "1", "3", "4", "5"]) + assert post.info == ["0", "1", "3", "4", "5"] post.save() post.reload() - self.assertEqual(post.info, ["0", "1", "3", "4", "5"]) + assert post.info == ["0", "1", "3", "4", "5"] # '__delitem__(slice(i, j))' # aka 'del list[i:j]' # aka 'operator.delitem(list, slice(i,j))' reset_post() del post.info[1:3] # removes '1', '2' - self.assertEqual(post.info, ["0", "3", "4", "5"]) + assert post.info == ["0", "3", "4", "5"] post.save() post.reload() - self.assertEqual(post.info, ["0", "3", "4", "5"]) + assert post.info == ["0", "3", "4", "5"] # '__iadd__' # aka 'list += list' reset_post() temp = ["a", "b"] post.info += temp - self.assertEqual(post.info, ["0", "1", "2", "3", "4", "5", "a", "b"]) + assert post.info == ["0", "1", "2", "3", "4", "5", "a", "b"] post.save() post.reload() - self.assertEqual(post.info, ["0", "1", "2", "3", "4", "5", "a", "b"]) + assert post.info == ["0", "1", "2", "3", "4", "5", "a", "b"] # '__imul__' # aka 'list *= number' reset_post() post.info *= 2 - self.assertEqual( - post.info, ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] - ) + assert post.info == ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] post.save() post.reload() - self.assertEqual( - post.info, ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] - ) + assert post.info == ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] # '__mul__' # aka 'listA*listB' reset_post() post.info = post.info * 2 - self.assertEqual( - post.info, ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] - ) + assert post.info == ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] post.save() post.reload() - self.assertEqual( - post.info, ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] - ) + assert post.info == ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] # '__rmul__' # aka 'listB*listA' reset_post() post.info = 2 * post.info - self.assertEqual( - post.info, ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] - ) + assert post.info == ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] post.save() post.reload() - self.assertEqual( - post.info, ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] - ) + assert post.info == ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] # '__setitem__(index, value)' # aka 'list[index]=value' # aka 'setitem(list, value)' reset_post() post.info[4] = "a" - self.assertEqual(post.info, ["0", "1", "2", "3", "a", "5"]) + assert post.info == ["0", "1", "2", "3", "a", "5"] post.save() post.reload() - self.assertEqual(post.info, ["0", "1", "2", "3", "a", "5"]) + assert post.info == ["0", "1", "2", "3", "a", "5"] # __setitem__(index, value) with a negative index reset_post() post.info[-2] = "a" - self.assertEqual(post.info, ["0", "1", "2", "3", "a", "5"]) + assert post.info == ["0", "1", "2", "3", "a", "5"] post.save() post.reload() - self.assertEqual(post.info, ["0", "1", "2", "3", "a", "5"]) + assert post.info == ["0", "1", "2", "3", "a", "5"] # '__setitem__(slice(i, j), listB)' # aka 'listA[i:j] = listB' # aka 'setitem(listA, slice(i, j), listB)' reset_post() post.info[1:3] = ["h", "e", "l", "l", "o"] - self.assertEqual(post.info, ["0", "h", "e", "l", "l", "o", "3", "4", "5"]) + assert post.info == ["0", "h", "e", "l", "l", "o", "3", "4", "5"] post.save() post.reload() - self.assertEqual(post.info, ["0", "h", "e", "l", "l", "o", "3", "4", "5"]) + assert post.info == ["0", "h", "e", "l", "l", "o", "3", "4", "5"] # '__setitem__(slice(i, j), listB)' with negative i and j reset_post() post.info[-5:-3] = ["h", "e", "l", "l", "o"] - self.assertEqual(post.info, ["0", "h", "e", "l", "l", "o", "3", "4", "5"]) + assert post.info == ["0", "h", "e", "l", "l", "o", "3", "4", "5"] post.save() post.reload() - self.assertEqual(post.info, ["0", "h", "e", "l", "l", "o", "3", "4", "5"]) + assert post.info == ["0", "h", "e", "l", "l", "o", "3", "4", "5"] # negative # 'append' reset_post() post.info.append("h") - self.assertEqual(post.info, ["0", "1", "2", "3", "4", "5", "h"]) + assert post.info == ["0", "1", "2", "3", "4", "5", "h"] post.save() post.reload() - self.assertEqual(post.info, ["0", "1", "2", "3", "4", "5", "h"]) + assert post.info == ["0", "1", "2", "3", "4", "5", "h"] # 'extend' reset_post() post.info.extend(["h", "e", "l", "l", "o"]) - self.assertEqual( - post.info, ["0", "1", "2", "3", "4", "5", "h", "e", "l", "l", "o"] - ) + assert post.info == ["0", "1", "2", "3", "4", "5", "h", "e", "l", "l", "o"] post.save() post.reload() - self.assertEqual( - post.info, ["0", "1", "2", "3", "4", "5", "h", "e", "l", "l", "o"] - ) + assert post.info == ["0", "1", "2", "3", "4", "5", "h", "e", "l", "l", "o"] # 'insert' # 'pop' reset_post() x = post.info.pop(2) y = post.info.pop() - self.assertEqual(post.info, ["0", "1", "3", "4"]) - self.assertEqual(x, "2") - self.assertEqual(y, "5") + assert post.info == ["0", "1", "3", "4"] + assert x == "2" + assert y == "5" post.save() post.reload() - self.assertEqual(post.info, ["0", "1", "3", "4"]) + assert post.info == ["0", "1", "3", "4"] # 'remove' reset_post() post.info.remove("2") - self.assertEqual(post.info, ["0", "1", "3", "4", "5"]) + assert post.info == ["0", "1", "3", "4", "5"] post.save() post.reload() - self.assertEqual(post.info, ["0", "1", "3", "4", "5"]) + assert post.info == ["0", "1", "3", "4", "5"] # 'reverse' reset_post() post.info.reverse() - self.assertEqual(post.info, ["5", "4", "3", "2", "1", "0"]) + assert post.info == ["5", "4", "3", "2", "1", "0"] post.save() post.reload() - self.assertEqual(post.info, ["5", "4", "3", "2", "1", "0"]) + assert post.info == ["5", "4", "3", "2", "1", "0"] # 'sort': though this operator method does manipulate the list, it is # tested in the 'test_list_field_lexicograpic_operators' function @@ -844,7 +851,8 @@ class TestField(MongoDBTestCase): # '__hash__' # aka 'hash(list)' - self.assertRaises(TypeError, lambda: hash(post.info)) + with pytest.raises(TypeError): + hash(post.info) def test_list_field_lexicographic_operators(self): """Ensure that ListField works with standard list operators that @@ -883,32 +891,32 @@ class TestField(MongoDBTestCase): blogLargeB.reload() # '__eq__' aka '==' - self.assertEqual(blogLargeA.text_info, blogLargeB.text_info) - self.assertEqual(blogLargeA.bool_info, blogLargeB.bool_info) + assert blogLargeA.text_info == blogLargeB.text_info + assert blogLargeA.bool_info == blogLargeB.bool_info # '__ge__' aka '>=' - self.assertGreaterEqual(blogLargeA.text_info, blogSmall.text_info) - self.assertGreaterEqual(blogLargeA.text_info, blogLargeB.text_info) - self.assertGreaterEqual(blogLargeA.bool_info, blogSmall.bool_info) - self.assertGreaterEqual(blogLargeA.bool_info, blogLargeB.bool_info) + assert blogLargeA.text_info >= blogSmall.text_info + assert blogLargeA.text_info >= blogLargeB.text_info + assert blogLargeA.bool_info >= blogSmall.bool_info + assert blogLargeA.bool_info >= blogLargeB.bool_info # '__gt__' aka '>' - self.assertGreaterEqual(blogLargeA.text_info, blogSmall.text_info) - self.assertGreaterEqual(blogLargeA.bool_info, blogSmall.bool_info) + assert blogLargeA.text_info >= blogSmall.text_info + assert blogLargeA.bool_info >= blogSmall.bool_info # '__le__' aka '<=' - self.assertLessEqual(blogSmall.text_info, blogLargeB.text_info) - self.assertLessEqual(blogLargeA.text_info, blogLargeB.text_info) - self.assertLessEqual(blogSmall.bool_info, blogLargeB.bool_info) - self.assertLessEqual(blogLargeA.bool_info, blogLargeB.bool_info) + assert blogSmall.text_info <= blogLargeB.text_info + assert blogLargeA.text_info <= blogLargeB.text_info + assert blogSmall.bool_info <= blogLargeB.bool_info + assert blogLargeA.bool_info <= blogLargeB.bool_info # '__lt__' aka '<' - self.assertLess(blogSmall.text_info, blogLargeB.text_info) - self.assertLess(blogSmall.bool_info, blogLargeB.bool_info) + assert blogSmall.text_info < blogLargeB.text_info + assert blogSmall.bool_info < blogLargeB.bool_info # '__ne__' aka '!=' - self.assertNotEqual(blogSmall.text_info, blogLargeB.text_info) - self.assertNotEqual(blogSmall.bool_info, blogLargeB.bool_info) + assert blogSmall.text_info != blogLargeB.text_info + assert blogSmall.bool_info != blogLargeB.bool_info # 'sort' blogLargeB.bool_info = [True, False, True, False] @@ -920,14 +928,14 @@ class TestField(MongoDBTestCase): ObjectId("54495ad94c934721ede76d23"), ObjectId("54495ad94c934721ede76f90"), ] - self.assertEqual(blogLargeB.text_info, ["a", "j", "z"]) - self.assertEqual(blogLargeB.oid_info, sorted_target_list) - self.assertEqual(blogLargeB.bool_info, [False, False, True, True]) + assert blogLargeB.text_info == ["a", "j", "z"] + assert blogLargeB.oid_info == sorted_target_list + assert blogLargeB.bool_info == [False, False, True, True] blogLargeB.save() blogLargeB.reload() - self.assertEqual(blogLargeB.text_info, ["a", "j", "z"]) - self.assertEqual(blogLargeB.oid_info, sorted_target_list) - self.assertEqual(blogLargeB.bool_info, [False, False, True, True]) + assert blogLargeB.text_info == ["a", "j", "z"] + assert blogLargeB.oid_info == sorted_target_list + assert blogLargeB.bool_info == [False, False, True, True] def test_list_assignment(self): """Ensure that list field element assignment and slicing work.""" @@ -944,37 +952,37 @@ class TestField(MongoDBTestCase): post.info[0] = 1 post.save() post.reload() - self.assertEqual(post.info[0], 1) + assert post.info[0] == 1 post.info[1:3] = ["n2", "n3"] post.save() post.reload() - self.assertEqual(post.info, [1, "n2", "n3", "4", 5]) + assert post.info == [1, "n2", "n3", "4", 5] post.info[-1] = "n5" post.save() post.reload() - self.assertEqual(post.info, [1, "n2", "n3", "4", "n5"]) + assert post.info == [1, "n2", "n3", "4", "n5"] post.info[-2] = 4 post.save() post.reload() - self.assertEqual(post.info, [1, "n2", "n3", 4, "n5"]) + assert post.info == [1, "n2", "n3", 4, "n5"] post.info[1:-1] = [2] post.save() post.reload() - self.assertEqual(post.info, [1, 2, "n5"]) + assert post.info == [1, 2, "n5"] post.info[:-1] = [1, "n2", "n3", 4] post.save() post.reload() - self.assertEqual(post.info, [1, "n2", "n3", 4, "n5"]) + assert post.info == [1, "n2", "n3", 4, "n5"] post.info[-4:3] = [2, 3] post.save() post.reload() - self.assertEqual(post.info, [1, 2, 3, 4, "n5"]) + assert post.info == [1, 2, 3, 4, "n5"] def test_list_field_passed_in_value(self): class Foo(Document): @@ -988,7 +996,7 @@ class TestField(MongoDBTestCase): foo = Foo(bars=[]) foo.bars.append(bar) - self.assertEqual(repr(foo.bars), "[]") + assert repr(foo.bars) == "[]" def test_list_field_strict(self): """Ensure that list field handles validation if provided @@ -1005,7 +1013,7 @@ class TestField(MongoDBTestCase): e.save() # try creating an invalid mapping - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): e.mapping = ["abc"] e.save() @@ -1021,9 +1029,9 @@ class TestField(MongoDBTestCase): if i < 6: foo.save() else: - with self.assertRaises(ValidationError) as cm: + with pytest.raises(ValidationError) as cm: foo.save() - self.assertIn("List is too long", str(cm.exception)) + assert "List is too long" in str(cm.exception) def test_list_field_max_length_set_operator(self): """Ensure ListField's max_length is respected for a "set" operator.""" @@ -1032,9 +1040,9 @@ class TestField(MongoDBTestCase): items = ListField(IntField(), max_length=3) foo = Foo.objects.create(items=[1, 2, 3]) - with self.assertRaises(ValidationError) as cm: + with pytest.raises(ValidationError) as cm: foo.modify(set__items=[1, 2, 3, 4]) - self.assertIn("List is too long", str(cm.exception)) + assert "List is too long" in str(cm.exception) def test_list_field_rejects_strings(self): """Strings aren't valid list field data types.""" @@ -1046,7 +1054,8 @@ class TestField(MongoDBTestCase): e = Simple() e.mapping = "hello world" - self.assertRaises(ValidationError, e.save) + with pytest.raises(ValidationError): + e.save() def test_complex_field_required(self): """Ensure required cant be None / Empty.""" @@ -1058,7 +1067,8 @@ class TestField(MongoDBTestCase): e = Simple() e.mapping = [] - self.assertRaises(ValidationError, e.save) + with pytest.raises(ValidationError): + e.save() class Simple(Document): mapping = DictField(required=True) @@ -1066,7 +1076,8 @@ class TestField(MongoDBTestCase): Simple.drop_collection() e = Simple() e.mapping = {} - self.assertRaises(ValidationError, e.save) + with pytest.raises(ValidationError): + e.save() def test_complex_field_same_value_not_changed(self): """If a complex field is set to the same value, it should not @@ -1080,7 +1091,7 @@ class TestField(MongoDBTestCase): e = Simple().save() e.mapping = [] - self.assertEqual([], e._changed_fields) + assert [] == e._changed_fields class Simple(Document): mapping = DictField() @@ -1089,7 +1100,7 @@ class TestField(MongoDBTestCase): e = Simple().save() e.mapping = {} - self.assertEqual([], e._changed_fields) + assert [] == e._changed_fields def test_slice_marks_field_as_changed(self): class Simple(Document): @@ -1097,11 +1108,11 @@ class TestField(MongoDBTestCase): simple = Simple(widgets=[1, 2, 3, 4]).save() simple.widgets[:3] = [] - self.assertEqual(["widgets"], simple._changed_fields) + assert ["widgets"] == simple._changed_fields simple.save() simple = simple.reload() - self.assertEqual(simple.widgets, [4]) + assert simple.widgets == [4] def test_del_slice_marks_field_as_changed(self): class Simple(Document): @@ -1109,11 +1120,11 @@ class TestField(MongoDBTestCase): simple = Simple(widgets=[1, 2, 3, 4]).save() del simple.widgets[:3] - self.assertEqual(["widgets"], simple._changed_fields) + assert ["widgets"] == simple._changed_fields simple.save() simple = simple.reload() - self.assertEqual(simple.widgets, [4]) + assert simple.widgets == [4] def test_list_field_with_negative_indices(self): class Simple(Document): @@ -1121,11 +1132,11 @@ class TestField(MongoDBTestCase): simple = Simple(widgets=[1, 2, 3, 4]).save() simple.widgets[-1] = 5 - self.assertEqual(["widgets.3"], simple._changed_fields) + assert ["widgets.3"] == simple._changed_fields simple.save() simple = simple.reload() - self.assertEqual(simple.widgets, [1, 2, 3, 5]) + assert simple.widgets == [1, 2, 3, 5] def test_list_field_complex(self): """Ensure that the list fields can handle the complex types.""" @@ -1159,33 +1170,23 @@ class TestField(MongoDBTestCase): e.save() e2 = Simple.objects.get(id=e.id) - self.assertIsInstance(e2.mapping[0], StringSetting) - self.assertIsInstance(e2.mapping[1], IntegerSetting) + assert isinstance(e2.mapping[0], StringSetting) + assert isinstance(e2.mapping[1], IntegerSetting) # Test querying - self.assertEqual(Simple.objects.filter(mapping__1__value=42).count(), 1) - self.assertEqual(Simple.objects.filter(mapping__2__number=1).count(), 1) - self.assertEqual( - Simple.objects.filter(mapping__2__complex__value=42).count(), 1 - ) - self.assertEqual( - Simple.objects.filter(mapping__2__list__0__value=42).count(), 1 - ) - self.assertEqual( - Simple.objects.filter(mapping__2__list__1__value="foo").count(), 1 - ) + assert Simple.objects.filter(mapping__1__value=42).count() == 1 + assert Simple.objects.filter(mapping__2__number=1).count() == 1 + assert Simple.objects.filter(mapping__2__complex__value=42).count() == 1 + assert Simple.objects.filter(mapping__2__list__0__value=42).count() == 1 + assert Simple.objects.filter(mapping__2__list__1__value="foo").count() == 1 # Confirm can update Simple.objects().update(set__mapping__1=IntegerSetting(value=10)) - self.assertEqual(Simple.objects.filter(mapping__1__value=10).count(), 1) + assert Simple.objects.filter(mapping__1__value=10).count() == 1 Simple.objects().update(set__mapping__2__list__1=StringSetting(value="Boo")) - self.assertEqual( - Simple.objects.filter(mapping__2__list__1__value="foo").count(), 0 - ) - self.assertEqual( - Simple.objects.filter(mapping__2__list__1__value="Boo").count(), 1 - ) + assert Simple.objects.filter(mapping__2__list__1__value="foo").count() == 0 + assert Simple.objects.filter(mapping__2__list__1__value="Boo").count() == 1 def test_embedded_db_field(self): class Embedded(EmbeddedDocument): @@ -1203,9 +1204,9 @@ class TestField(MongoDBTestCase): Test.objects.update_one(inc__embedded__number=1) test = Test.objects.get() - self.assertEqual(test.embedded.number, 2) + assert test.embedded.number == 2 doc = self.db.test.find_one() - self.assertEqual(doc["x"]["i"], 2) + assert doc["x"]["i"] == 2 def test_double_embedded_db_field(self): """Make sure multiple layers of embedded docs resolve db fields @@ -1242,7 +1243,7 @@ class TestField(MongoDBTestCase): b = EmbeddedDocumentField(B, db_field="fb") a = A._from_son(SON([("fb", SON([("fc", SON([("txt", "hi")]))]))])) - self.assertEqual(a.b.c.txt, "hi") + assert a.b.c.txt == "hi" def test_embedded_document_field_cant_reference_using_a_str_if_it_does_not_exist_yet( self, @@ -1277,18 +1278,21 @@ class TestField(MongoDBTestCase): person = Person(name="Test User") person.preferences = "My Preferences" - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() # Check that only the right embedded doc works person.preferences = Comment(content="Nice blog post...") - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() # Check that the embedded doc is valid person.preferences = PersonPreferences() - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.preferences = PersonPreferences(food="Cheese", number=47) - self.assertEqual(person.preferences.food, "Cheese") + assert person.preferences.food == "Cheese" person.validate() def test_embedded_document_inheritance(self): @@ -1314,7 +1318,7 @@ class TestField(MongoDBTestCase): post.author = PowerUser(name="Test User", power=47) post.save() - self.assertEqual(47, BlogPost.objects.first().author.power) + assert 47 == BlogPost.objects.first().author.power def test_embedded_document_inheritance_with_list(self): """Ensure that nested list of subclassed embedded documents is @@ -1339,7 +1343,7 @@ class TestField(MongoDBTestCase): foobar = User(groups=[group]) foobar.save() - self.assertEqual(content, User.objects.first().groups[0].content) + assert content == User.objects.first().groups[0].content def test_reference_miss(self): """Ensure an exception is raised when dereferencing an unknown @@ -1362,16 +1366,18 @@ class TestField(MongoDBTestCase): # Reference is no longer valid foo.delete() bar = Bar.objects.get() - self.assertRaises(DoesNotExist, getattr, bar, "ref") - self.assertRaises(DoesNotExist, getattr, bar, "generic_ref") + with pytest.raises(DoesNotExist): + getattr(bar, "ref") + with pytest.raises(DoesNotExist): + getattr(bar, "generic_ref") # When auto_dereference is disabled, there is no trouble returning DBRef bar = Bar.objects.get() expected = foo.to_dbref() bar._fields["ref"]._auto_dereference = False - self.assertEqual(bar.ref, expected) + assert bar.ref == expected bar._fields["generic_ref"]._auto_dereference = False - self.assertEqual(bar.generic_ref, {"_ref": expected, "_cls": "Foo"}) + assert bar.generic_ref == {"_ref": expected, "_cls": "Foo"} def test_list_item_dereference(self): """Ensure that DBRef items in ListFields are dereferenced. @@ -1396,8 +1402,8 @@ class TestField(MongoDBTestCase): group_obj = Group.objects.first() - self.assertEqual(group_obj.members[0].name, user1.name) - self.assertEqual(group_obj.members[1].name, user2.name) + assert group_obj.members[0].name == user1.name + assert group_obj.members[1].name == user2.name def test_recursive_reference(self): """Ensure that ReferenceFields can reference their own documents. @@ -1424,8 +1430,8 @@ class TestField(MongoDBTestCase): peter.save() peter = Employee.objects.with_id(peter.id) - self.assertEqual(peter.boss, bill) - self.assertEqual(peter.friends, friends) + assert peter.boss == bill + assert peter.friends == friends def test_recursive_embedding(self): """Ensure that EmbeddedDocumentFields can contain their own documents. @@ -1450,18 +1456,18 @@ class TestField(MongoDBTestCase): tree.save() tree = Tree.objects.first() - self.assertEqual(len(tree.children), 1) + assert len(tree.children) == 1 - self.assertEqual(len(tree.children[0].children), 1) + assert len(tree.children[0].children) == 1 third_child = TreeNode(name="Child 3") tree.children[0].children.append(third_child) tree.save() - self.assertEqual(len(tree.children), 1) - self.assertEqual(tree.children[0].name, first_child.name) - self.assertEqual(tree.children[0].children[0].name, second_child.name) - self.assertEqual(tree.children[0].children[1].name, third_child.name) + assert len(tree.children) == 1 + assert tree.children[0].name == first_child.name + assert tree.children[0].children[0].name == second_child.name + assert tree.children[0].children[1].name == third_child.name # Test updating tree.children[0].name = "I am Child 1" @@ -1469,28 +1475,28 @@ class TestField(MongoDBTestCase): tree.children[0].children[1].name = "I am Child 3" tree.save() - self.assertEqual(tree.children[0].name, "I am Child 1") - self.assertEqual(tree.children[0].children[0].name, "I am Child 2") - self.assertEqual(tree.children[0].children[1].name, "I am Child 3") + assert tree.children[0].name == "I am Child 1" + assert tree.children[0].children[0].name == "I am Child 2" + assert tree.children[0].children[1].name == "I am Child 3" # Test removal - self.assertEqual(len(tree.children[0].children), 2) + assert len(tree.children[0].children) == 2 del tree.children[0].children[1] tree.save() - self.assertEqual(len(tree.children[0].children), 1) + assert len(tree.children[0].children) == 1 tree.children[0].children.pop(0) tree.save() - self.assertEqual(len(tree.children[0].children), 0) - self.assertEqual(tree.children[0].children, []) + assert len(tree.children[0].children) == 0 + assert tree.children[0].children == [] tree.children[0].children.insert(0, third_child) tree.children[0].children.insert(0, second_child) tree.save() - self.assertEqual(len(tree.children[0].children), 2) - self.assertEqual(tree.children[0].children[0].name, second_child.name) - self.assertEqual(tree.children[0].children[1].name, third_child.name) + assert len(tree.children[0].children) == 2 + assert tree.children[0].children[0].name == second_child.name + assert tree.children[0].children[1].name == third_child.name def test_drop_abstract_document(self): """Ensure that an abstract document cannot be dropped given it @@ -1501,7 +1507,8 @@ class TestField(MongoDBTestCase): name = StringField() meta = {"abstract": True} - self.assertRaises(OperationError, AbstractDoc.drop_collection) + with pytest.raises(OperationError): + AbstractDoc.drop_collection() def test_reference_class_with_abstract_parent(self): """Ensure that a class with an abstract parent can be referenced. @@ -1525,7 +1532,7 @@ class TestField(MongoDBTestCase): brother = Brother(name="Bob", sibling=sister) brother.save() - self.assertEqual(Brother.objects[0].sibling.name, sister.name) + assert Brother.objects[0].sibling.name == sister.name def test_reference_abstract_class(self): """Ensure that an abstract class instance cannot be used in the @@ -1547,7 +1554,8 @@ class TestField(MongoDBTestCase): sister = Sibling(name="Alice") brother = Brother(name="Bob", sibling=sister) - self.assertRaises(ValidationError, brother.save) + with pytest.raises(ValidationError): + brother.save() def test_abstract_reference_base_type(self): """Ensure that an an abstract reference fails validation when given a @@ -1570,7 +1578,8 @@ class TestField(MongoDBTestCase): mother = Mother(name="Carol") mother.save() brother = Brother(name="Bob", sibling=mother) - self.assertRaises(ValidationError, brother.save) + with pytest.raises(ValidationError): + brother.save() def test_generic_reference(self): """Ensure that a GenericReferenceField properly dereferences items. @@ -1601,16 +1610,16 @@ class TestField(MongoDBTestCase): bm = Bookmark.objects(bookmark_object=post_1).first() - self.assertEqual(bm.bookmark_object, post_1) - self.assertIsInstance(bm.bookmark_object, Post) + assert bm.bookmark_object == post_1 + assert isinstance(bm.bookmark_object, Post) bm.bookmark_object = link_1 bm.save() bm = Bookmark.objects(bookmark_object=link_1).first() - self.assertEqual(bm.bookmark_object, link_1) - self.assertIsInstance(bm.bookmark_object, Link) + assert bm.bookmark_object == link_1 + assert isinstance(bm.bookmark_object, Link) def test_generic_reference_list(self): """Ensure that a ListField properly dereferences generic references. @@ -1640,8 +1649,8 @@ class TestField(MongoDBTestCase): user = User.objects(bookmarks__all=[post_1, link_1]).first() - self.assertEqual(user.bookmarks[0], post_1) - self.assertEqual(user.bookmarks[1], link_1) + assert user.bookmarks[0] == post_1 + assert user.bookmarks[1] == link_1 def test_generic_reference_document_not_registered(self): """Ensure dereferencing out of the document registry throws a @@ -1682,7 +1691,7 @@ class TestField(MongoDBTestCase): Person.drop_collection() Person(name="Wilson Jr").save() - self.assertEqual(repr(Person.objects(city=None)), "[]") + assert repr(Person.objects(city=None)) == "[]" def test_generic_reference_choices(self): """Ensure that a GenericReferenceField can handle choices.""" @@ -1707,13 +1716,14 @@ class TestField(MongoDBTestCase): post_1.save() bm = Bookmark(bookmark_object=link_1) - self.assertRaises(ValidationError, bm.validate) + with pytest.raises(ValidationError): + bm.validate() bm = Bookmark(bookmark_object=post_1) bm.save() bm = Bookmark.objects.first() - self.assertEqual(bm.bookmark_object, post_1) + assert bm.bookmark_object == post_1 def test_generic_reference_string_choices(self): """Ensure that a GenericReferenceField can handle choices as strings @@ -1745,7 +1755,8 @@ class TestField(MongoDBTestCase): bm.save() bm = Bookmark(bookmark_object=bm) - self.assertRaises(ValidationError, bm.validate) + with pytest.raises(ValidationError): + bm.validate() def test_generic_reference_choices_no_dereference(self): """Ensure that a GenericReferenceField can handle choices on @@ -1798,13 +1809,14 @@ class TestField(MongoDBTestCase): post_1.save() user = User(bookmarks=[link_1]) - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() user = User(bookmarks=[post_1]) user.save() user = User.objects.first() - self.assertEqual(user.bookmarks, [post_1]) + assert user.bookmarks == [post_1] def test_generic_reference_list_item_modification(self): """Ensure that modifications of related documents (through generic reference) don't influence on querying @@ -1832,8 +1844,8 @@ class TestField(MongoDBTestCase): user = User.objects(bookmarks__all=[post_1]).first() - self.assertNotEqual(user, None) - self.assertEqual(user.bookmarks[0], post_1) + assert user != None + assert user.bookmarks[0] == post_1 def test_generic_reference_filter_by_dbref(self): """Ensure we can search for a specific generic reference by @@ -1849,7 +1861,7 @@ class TestField(MongoDBTestCase): doc2 = Doc.objects.create(ref=doc1) doc = Doc.objects.get(ref=DBRef("doc", doc1.pk)) - self.assertEqual(doc, doc2) + assert doc == doc2 def test_generic_reference_is_not_tracked_in_parent_doc(self): """Ensure that modifications of related documents (through generic reference) don't influence @@ -1871,11 +1883,11 @@ class TestField(MongoDBTestCase): doc2 = Doc2(ref=doc1, refs=[doc11]).save() doc2.ref.name = "garbage2" - self.assertEqual(doc2._get_changed_fields(), []) + assert doc2._get_changed_fields() == [] doc2.refs[0].name = "garbage3" - self.assertEqual(doc2._get_changed_fields(), []) - self.assertEqual(doc2._delta(), ({}, {})) + assert doc2._get_changed_fields() == [] + assert doc2._delta() == ({}, {}) def test_generic_reference_field(self): """Ensure we can search for a specific generic reference by @@ -1890,10 +1902,10 @@ class TestField(MongoDBTestCase): doc1 = Doc.objects.create() doc2 = Doc.objects.create(ref=doc1) - self.assertIsInstance(doc1.pk, ObjectId) + assert isinstance(doc1.pk, ObjectId) doc = Doc.objects.get(ref=doc1.pk) - self.assertEqual(doc, doc2) + assert doc == doc2 def test_choices_allow_using_sets_as_choices(self): """Ensure that sets can be used when setting choices @@ -1933,7 +1945,7 @@ class TestField(MongoDBTestCase): size = StringField(choices=("S", "M")) shirt = Shirt(size="XS") - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): shirt.validate() def test_choices_get_field_display(self): @@ -1964,24 +1976,25 @@ class TestField(MongoDBTestCase): shirt2 = Shirt() # Make sure get__display returns the default value (or None) - self.assertEqual(shirt1.get_size_display(), None) - self.assertEqual(shirt1.get_style_display(), "Wide") + assert shirt1.get_size_display() == None + assert shirt1.get_style_display() == "Wide" shirt1.size = "XXL" shirt1.style = "B" shirt2.size = "M" shirt2.style = "S" - self.assertEqual(shirt1.get_size_display(), "Extra Extra Large") - self.assertEqual(shirt1.get_style_display(), "Baggy") - self.assertEqual(shirt2.get_size_display(), "Medium") - self.assertEqual(shirt2.get_style_display(), "Small") + assert shirt1.get_size_display() == "Extra Extra Large" + assert shirt1.get_style_display() == "Baggy" + assert shirt2.get_size_display() == "Medium" + assert shirt2.get_style_display() == "Small" # Set as Z - an invalid choice shirt1.size = "Z" shirt1.style = "Z" - self.assertEqual(shirt1.get_size_display(), "Z") - self.assertEqual(shirt1.get_style_display(), "Z") - self.assertRaises(ValidationError, shirt1.validate) + assert shirt1.get_size_display() == "Z" + assert shirt1.get_style_display() == "Z" + with pytest.raises(ValidationError): + shirt1.validate() def test_simple_choices_validation(self): """Ensure that value is in a container of allowed values. @@ -1999,7 +2012,8 @@ class TestField(MongoDBTestCase): shirt.validate() shirt.size = "XS" - self.assertRaises(ValidationError, shirt.validate) + with pytest.raises(ValidationError): + shirt.validate() def test_simple_choices_get_field_display(self): """Test dynamic helper for returning the display value of a choices @@ -2016,20 +2030,21 @@ class TestField(MongoDBTestCase): shirt = Shirt() - self.assertEqual(shirt.get_size_display(), None) - self.assertEqual(shirt.get_style_display(), "Small") + assert shirt.get_size_display() == None + assert shirt.get_style_display() == "Small" shirt.size = "XXL" shirt.style = "Baggy" - self.assertEqual(shirt.get_size_display(), "XXL") - self.assertEqual(shirt.get_style_display(), "Baggy") + assert shirt.get_size_display() == "XXL" + assert shirt.get_style_display() == "Baggy" # Set as Z - an invalid choice shirt.size = "Z" shirt.style = "Z" - self.assertEqual(shirt.get_size_display(), "Z") - self.assertEqual(shirt.get_style_display(), "Z") - self.assertRaises(ValidationError, shirt.validate) + assert shirt.get_size_display() == "Z" + assert shirt.get_style_display() == "Z" + with pytest.raises(ValidationError): + shirt.validate() def test_simple_choices_validation_invalid_value(self): """Ensure that error messages are correct. @@ -2060,8 +2075,8 @@ class TestField(MongoDBTestCase): except ValidationError as error: # get the validation rules error_dict = error.to_dict() - self.assertEqual(error_dict["size"], SIZE_MESSAGE) - self.assertEqual(error_dict["color"], COLOR_MESSAGE) + assert error_dict["size"] == SIZE_MESSAGE + assert error_dict["color"] == COLOR_MESSAGE def test_recursive_validation(self): """Ensure that a validation result to_dict is available.""" @@ -2082,26 +2097,25 @@ class TestField(MongoDBTestCase): post.comments.append(Comment(content="hello", author=bob)) post.comments.append(Comment(author=bob)) - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() try: post.validate() except ValidationError as error: # ValidationError.errors property - self.assertTrue(hasattr(error, "errors")) - self.assertIsInstance(error.errors, dict) - self.assertIn("comments", error.errors) - self.assertIn(1, error.errors["comments"]) - self.assertIsInstance( - error.errors["comments"][1]["content"], ValidationError - ) + assert hasattr(error, "errors") + assert isinstance(error.errors, dict) + assert "comments" in error.errors + assert 1 in error.errors["comments"] + assert isinstance(error.errors["comments"][1]["content"], ValidationError) # ValidationError.schema property error_dict = error.to_dict() - self.assertIsInstance(error_dict, dict) - self.assertIn("comments", error_dict) - self.assertIn(1, error_dict["comments"]) - self.assertIn("content", error_dict["comments"][1]) - self.assertEqual(error_dict["comments"][1]["content"], u"Field is required") + assert isinstance(error_dict, dict) + assert "comments" in error_dict + assert 1 in error_dict["comments"] + assert "content" in error_dict["comments"][1] + assert error_dict["comments"][1]["content"] == u"Field is required" post.comments[1].content = "here we go" post.validate() @@ -2131,10 +2145,10 @@ class TestField(MongoDBTestCase): doc.items = tuples doc.save() x = TestDoc.objects().get() - self.assertIsNotNone(x) - self.assertEqual(len(x.items), 1) - self.assertIn(tuple(x.items[0]), tuples) - self.assertIn(x.items[0], tuples) + assert x is not None + assert len(x.items) == 1 + assert tuple(x.items[0]) in tuples + assert x.items[0] in tuples def test_dynamic_fields_class(self): class Doc2(Document): @@ -2150,13 +2164,14 @@ class TestField(MongoDBTestCase): doc2 = Doc2(field_1="hello") doc = Doc(my_id=1, embed_me=doc2, field_x="x") - self.assertRaises(OperationError, doc.save) + with pytest.raises(OperationError): + doc.save() doc2.save() doc.save() doc = Doc.objects.get() - self.assertEqual(doc.embed_me.field_1, "hello") + assert doc.embed_me.field_1 == "hello" def test_dynamic_fields_embedded_class(self): class Embed(EmbeddedDocument): @@ -2172,7 +2187,7 @@ class TestField(MongoDBTestCase): Doc(my_id=1, embed_me=Embed(field_1="hello"), field_x="x").save() doc = Doc.objects.get() - self.assertEqual(doc.embed_me.field_1, "hello") + assert doc.embed_me.field_1 == "hello" def test_dynamicfield_dump_document(self): """Ensure a DynamicField can handle another document's dump.""" @@ -2197,15 +2212,15 @@ class TestField(MongoDBTestCase): to_embed = ToEmbed(id=2, recursive=to_embed_recursive).save() doc = Doc(field=to_embed) doc.save() - self.assertIsInstance(doc.field, ToEmbed) - self.assertEqual(doc.field, to_embed) + assert isinstance(doc.field, ToEmbed) + assert doc.field == to_embed # Same thing with a Document with a _cls field to_embed_recursive = ToEmbedChild(id=1).save() to_embed_child = ToEmbedChild(id=2, recursive=to_embed_recursive).save() doc = Doc(field=to_embed_child) doc.save() - self.assertIsInstance(doc.field, ToEmbedChild) - self.assertEqual(doc.field, to_embed_child) + assert isinstance(doc.field, ToEmbedChild) + assert doc.field == to_embed_child def test_cls_field(self): class Animal(Document): @@ -2227,10 +2242,10 @@ class TestField(MongoDBTestCase): Dog().save() Fish().save() Human().save() - self.assertEqual( - Animal.objects(_cls__in=["Animal.Mammal.Dog", "Animal.Fish"]).count(), 2 + assert ( + Animal.objects(_cls__in=["Animal.Mammal.Dog", "Animal.Fish"]).count() == 2 ) - self.assertEqual(Animal.objects(_cls__in=["Animal.Fish.Guppy"]).count(), 0) + assert Animal.objects(_cls__in=["Animal.Fish.Guppy"]).count() == 0 def test_sparse_field(self): class Doc(Document): @@ -2249,7 +2264,7 @@ class TestField(MongoDBTestCase): class Doc(Document): foo = StringField() - with self.assertRaises(FieldDoesNotExist): + with pytest.raises(FieldDoesNotExist): Doc(bar="test") def test_undefined_field_exception_with_strict(self): @@ -2262,7 +2277,7 @@ class TestField(MongoDBTestCase): foo = StringField() meta = {"strict": False} - with self.assertRaises(FieldDoesNotExist): + with pytest.raises(FieldDoesNotExist): Doc(bar="test") @@ -2310,20 +2325,20 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): # Test with an embeddedDocument instead of a list(embeddedDocument) # It's an edge case but it used to fail with a vague error, making it difficult to troubleshoot it post = self.BlogPost(comments=comment) - with self.assertRaises(ValidationError) as ctx_err: + with pytest.raises(ValidationError) as ctx_err: post.validate() - self.assertIn("'comments'", str(ctx_err.exception)) - self.assertIn( - "Only lists and tuples may be used in a list field", str(ctx_err.exception) + assert "'comments'" in str(ctx_err.exception) + assert "Only lists and tuples may be used in a list field" in str( + ctx_err.exception ) # Test with a Document post = self.BlogPost(comments=Title(content="garbage")) - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): post.validate() - self.assertIn("'comments'", str(ctx_err.exception)) - self.assertIn( - "Only lists and tuples may be used in a list field", str(ctx_err.exception) + assert "'comments'" in str(ctx_err.exception) + assert "Only lists and tuples may be used in a list field" in str( + ctx_err.exception ) def test_no_keyword_filter(self): @@ -2334,7 +2349,7 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): filtered = self.post1.comments.filter() # Ensure nothing was changed - self.assertListEqual(filtered, self.post1.comments) + assert filtered == self.post1.comments def test_single_keyword_filter(self): """ @@ -2344,10 +2359,10 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): filtered = self.post1.comments.filter(author="user1") # Ensure only 1 entry was returned. - self.assertEqual(len(filtered), 1) + assert len(filtered) == 1 # Ensure the entry returned is the correct entry. - self.assertEqual(filtered[0].author, "user1") + assert filtered[0].author == "user1" def test_multi_keyword_filter(self): """ @@ -2357,11 +2372,11 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): filtered = self.post2.comments.filter(author="user2", message="message2") # Ensure only 1 entry was returned. - self.assertEqual(len(filtered), 1) + assert len(filtered) == 1 # Ensure the entry returned is the correct entry. - self.assertEqual(filtered[0].author, "user2") - self.assertEqual(filtered[0].message, "message2") + assert filtered[0].author == "user2" + assert filtered[0].message == "message2" def test_chained_filter(self): """ @@ -2370,18 +2385,18 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): filtered = self.post2.comments.filter(author="user2").filter(message="message2") # Ensure only 1 entry was returned. - self.assertEqual(len(filtered), 1) + assert len(filtered) == 1 # Ensure the entry returned is the correct entry. - self.assertEqual(filtered[0].author, "user2") - self.assertEqual(filtered[0].message, "message2") + assert filtered[0].author == "user2" + assert filtered[0].message == "message2" def test_unknown_keyword_filter(self): """ Tests the filter method of a List of Embedded Documents when the keyword is not a known keyword. """ - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): self.post2.comments.filter(year=2) def test_no_keyword_exclude(self): @@ -2392,7 +2407,7 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): filtered = self.post1.comments.exclude() # Ensure everything was removed - self.assertListEqual(filtered, []) + assert filtered == [] def test_single_keyword_exclude(self): """ @@ -2402,10 +2417,10 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): excluded = self.post1.comments.exclude(author="user1") # Ensure only 1 entry was returned. - self.assertEqual(len(excluded), 1) + assert len(excluded) == 1 # Ensure the entry returned is the correct entry. - self.assertEqual(excluded[0].author, "user2") + assert excluded[0].author == "user2" def test_multi_keyword_exclude(self): """ @@ -2415,11 +2430,11 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): excluded = self.post2.comments.exclude(author="user3", message="message1") # Ensure only 2 entries were returned. - self.assertEqual(len(excluded), 2) + assert len(excluded) == 2 # Ensure the entries returned are the correct entries. - self.assertEqual(excluded[0].author, "user2") - self.assertEqual(excluded[1].author, "user2") + assert excluded[0].author == "user2" + assert excluded[1].author == "user2" def test_non_matching_exclude(self): """ @@ -2429,14 +2444,14 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): excluded = self.post2.comments.exclude(author="user4") # Ensure the 3 entries still exist. - self.assertEqual(len(excluded), 3) + assert len(excluded) == 3 def test_unknown_keyword_exclude(self): """ Tests the exclude method of a List of Embedded Documents when the keyword is not a known keyword. """ - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): self.post2.comments.exclude(year=2) def test_chained_filter_exclude(self): @@ -2449,25 +2464,25 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): ) # Ensure only 1 entry was returned. - self.assertEqual(len(excluded), 1) + assert len(excluded) == 1 # Ensure the entry returned is the correct entry. - self.assertEqual(excluded[0].author, "user2") - self.assertEqual(excluded[0].message, "message3") + assert excluded[0].author == "user2" + assert excluded[0].message == "message3" def test_count(self): """ Tests the count method of a List of Embedded Documents. """ - self.assertEqual(self.post1.comments.count(), 2) - self.assertEqual(self.post1.comments.count(), len(self.post1.comments)) + assert self.post1.comments.count() == 2 + assert self.post1.comments.count() == len(self.post1.comments) def test_filtered_count(self): """ Tests the filter + count method of a List of Embedded Documents. """ count = self.post1.comments.filter(author="user1").count() - self.assertEqual(count, 1) + assert count == 1 def test_single_keyword_get(self): """ @@ -2475,8 +2490,8 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): single keyword. """ comment = self.post1.comments.get(author="user1") - self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment.author, "user1") + assert isinstance(comment, self.Comments) + assert comment.author == "user1" def test_multi_keyword_get(self): """ @@ -2484,16 +2499,16 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): multiple keywords. """ comment = self.post2.comments.get(author="user2", message="message2") - self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment.author, "user2") - self.assertEqual(comment.message, "message2") + assert isinstance(comment, self.Comments) + assert comment.author == "user2" + assert comment.message == "message2" def test_no_keyword_multiple_return_get(self): """ Tests the get method of a List of Embedded Documents without a keyword to return multiple documents. """ - with self.assertRaises(MultipleObjectsReturned): + with pytest.raises(MultipleObjectsReturned): self.post1.comments.get() def test_keyword_multiple_return_get(self): @@ -2501,7 +2516,7 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): Tests the get method of a List of Embedded Documents with a keyword to return multiple documents. """ - with self.assertRaises(MultipleObjectsReturned): + with pytest.raises(MultipleObjectsReturned): self.post2.comments.get(author="user2") def test_unknown_keyword_get(self): @@ -2509,7 +2524,7 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): Tests the get method of a List of Embedded Documents with an unknown keyword. """ - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): self.post2.comments.get(year=2020) def test_no_result_get(self): @@ -2517,7 +2532,7 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): Tests the get method of a List of Embedded Documents where get returns no results. """ - with self.assertRaises(DoesNotExist): + with pytest.raises(DoesNotExist): self.post1.comments.get(author="user3") def test_first(self): @@ -2528,8 +2543,8 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): comment = self.post1.comments.first() # Ensure a Comment object was returned. - self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment, self.post1.comments[0]) + assert isinstance(comment, self.Comments) + assert comment == self.post1.comments[0] def test_create(self): """ @@ -2539,14 +2554,12 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): self.post1.save() # Ensure the returned value is the comment object. - self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment.author, "user4") - self.assertEqual(comment.message, "message1") + assert isinstance(comment, self.Comments) + assert comment.author == "user4" + assert comment.message == "message1" # Ensure the new comment was actually saved to the database. - self.assertIn( - comment, self.BlogPost.objects(comments__author="user4")[0].comments - ) + assert comment in self.BlogPost.objects(comments__author="user4")[0].comments def test_filtered_create(self): """ @@ -2560,14 +2573,12 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): self.post1.save() # Ensure the returned value is the comment object. - self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment.author, "user4") - self.assertEqual(comment.message, "message1") + assert isinstance(comment, self.Comments) + assert comment.author == "user4" + assert comment.message == "message1" # Ensure the new comment was actually saved to the database. - self.assertIn( - comment, self.BlogPost.objects(comments__author="user4")[0].comments - ) + assert comment in self.BlogPost.objects(comments__author="user4")[0].comments def test_no_keyword_update(self): """ @@ -2579,13 +2590,13 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): self.post1.save() # Ensure that nothing was altered. - self.assertIn(original[0], self.BlogPost.objects(id=self.post1.id)[0].comments) + assert original[0] in self.BlogPost.objects(id=self.post1.id)[0].comments - self.assertIn(original[1], self.BlogPost.objects(id=self.post1.id)[0].comments) + assert original[1] in self.BlogPost.objects(id=self.post1.id)[0].comments # Ensure the method returned 0 as the number of entries # modified - self.assertEqual(number, 0) + assert number == 0 def test_single_keyword_update(self): """ @@ -2598,12 +2609,12 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): comments = self.BlogPost.objects(id=self.post1.id)[0].comments # Ensure that the database was updated properly. - self.assertEqual(comments[0].author, "user4") - self.assertEqual(comments[1].author, "user4") + assert comments[0].author == "user4" + assert comments[1].author == "user4" # Ensure the method returned 2 as the number of entries # modified - self.assertEqual(number, 2) + assert number == 2 def test_unicode(self): """ @@ -2615,7 +2626,7 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): self.Comments(author="user2", message=u"хабарлама"), ] ).save() - self.assertEqual(post.comments.get(message=u"сообщение").author, "user1") + assert post.comments.get(message=u"сообщение").author == "user1" def test_save(self): """ @@ -2627,7 +2638,7 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): comments.save() # Ensure that the new comment has been added to the database. - self.assertIn(new_comment, self.BlogPost.objects(id=self.post1.id)[0].comments) + assert new_comment in self.BlogPost.objects(id=self.post1.id)[0].comments def test_delete(self): """ @@ -2638,17 +2649,17 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): # Ensure that all the comments under post1 were deleted in the # database. - self.assertListEqual(self.BlogPost.objects(id=self.post1.id)[0].comments, []) + assert self.BlogPost.objects(id=self.post1.id)[0].comments == [] # Ensure that post1 comments were deleted from the list. - self.assertListEqual(self.post1.comments, []) + assert self.post1.comments == [] # Ensure that comments still returned a EmbeddedDocumentList object. - self.assertIsInstance(self.post1.comments, EmbeddedDocumentList) + assert isinstance(self.post1.comments, EmbeddedDocumentList) # Ensure that the delete method returned 2 as the number of entries # deleted from the database - self.assertEqual(number, 2) + assert number == 2 def test_empty_list_embedded_documents_with_unique_field(self): """ @@ -2664,7 +2675,7 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique)) A(my_list=[]).save() - with self.assertRaises(NotUniqueError): + with pytest.raises(NotUniqueError): A(my_list=[]).save() class EmbeddedWithSparseUnique(EmbeddedDocument): @@ -2689,16 +2700,16 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): self.post1.save() # Ensure that only the user2 comment was deleted. - self.assertNotIn(comment, self.BlogPost.objects(id=self.post1.id)[0].comments) - self.assertEqual(len(self.BlogPost.objects(id=self.post1.id)[0].comments), 1) + assert comment not in self.BlogPost.objects(id=self.post1.id)[0].comments + assert len(self.BlogPost.objects(id=self.post1.id)[0].comments) == 1 # Ensure that the user2 comment no longer exists in the list. - self.assertNotIn(comment, self.post1.comments) - self.assertEqual(len(self.post1.comments), 1) + assert comment not in self.post1.comments + assert len(self.post1.comments) == 1 # Ensure that the delete method returned 1 as the number of entries # deleted from the database - self.assertEqual(number, 1) + assert number == 1 def test_custom_data(self): """ @@ -2714,10 +2725,10 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): CustomData.drop_collection() a1 = CustomData(a_field=1, c_field=2).save() - self.assertEqual(2, a1.c_field) - self.assertFalse(hasattr(a1.c_field, "custom_data")) - self.assertTrue(hasattr(CustomData.c_field, "custom_data")) - self.assertEqual(custom_data["a"], CustomData.c_field.custom_data["a"]) + assert 2 == a1.c_field + assert not hasattr(a1.c_field, "custom_data") + assert hasattr(CustomData.c_field, "custom_data") + assert custom_data["a"] == CustomData.c_field.custom_data["a"] if __name__ == "__main__": diff --git a/tests/fields/test_file_field.py b/tests/fields/test_file_field.py index 49eb5bc2..0746db33 100644 --- a/tests/fields/test_file_field.py +++ b/tests/fields/test_file_field.py @@ -64,13 +64,13 @@ class TestFileField(MongoDBTestCase): putfile.save() result = PutFile.objects.first() - self.assertEqual(putfile, result) - self.assertEqual( - "%s" % result.the_file, - "" % result.the_file.grid_id, + assert putfile == result + assert ( + "%s" % result.the_file + == "" % result.the_file.grid_id ) - self.assertEqual(result.the_file.read(), text) - self.assertEqual(result.the_file.content_type, content_type) + assert result.the_file.read() == text + assert result.the_file.content_type == content_type result.the_file.delete() # Remove file from GridFS PutFile.objects.delete() @@ -85,9 +85,9 @@ class TestFileField(MongoDBTestCase): putfile.save() result = PutFile.objects.first() - self.assertEqual(putfile, result) - self.assertEqual(result.the_file.read(), text) - self.assertEqual(result.the_file.content_type, content_type) + assert putfile == result + assert result.the_file.read() == text + assert result.the_file.content_type == content_type result.the_file.delete() def test_file_fields_stream(self): @@ -111,19 +111,19 @@ class TestFileField(MongoDBTestCase): streamfile.save() result = StreamFile.objects.first() - self.assertEqual(streamfile, result) - self.assertEqual(result.the_file.read(), text + more_text) - self.assertEqual(result.the_file.content_type, content_type) + assert streamfile == result + assert result.the_file.read() == text + more_text + assert result.the_file.content_type == content_type result.the_file.seek(0) - self.assertEqual(result.the_file.tell(), 0) - self.assertEqual(result.the_file.read(len(text)), text) - self.assertEqual(result.the_file.tell(), len(text)) - self.assertEqual(result.the_file.read(len(more_text)), more_text) - self.assertEqual(result.the_file.tell(), len(text + more_text)) + assert result.the_file.tell() == 0 + assert result.the_file.read(len(text)) == text + assert result.the_file.tell() == len(text) + assert result.the_file.read(len(more_text)) == more_text + assert result.the_file.tell() == len(text + more_text) result.the_file.delete() # Ensure deleted file returns None - self.assertTrue(result.the_file.read() is None) + assert result.the_file.read() is None def test_file_fields_stream_after_none(self): """Ensure that a file field can be written to after it has been saved as @@ -148,19 +148,19 @@ class TestFileField(MongoDBTestCase): streamfile.save() result = StreamFile.objects.first() - self.assertEqual(streamfile, result) - self.assertEqual(result.the_file.read(), text + more_text) + assert streamfile == result + assert result.the_file.read() == text + more_text # self.assertEqual(result.the_file.content_type, content_type) result.the_file.seek(0) - self.assertEqual(result.the_file.tell(), 0) - self.assertEqual(result.the_file.read(len(text)), text) - self.assertEqual(result.the_file.tell(), len(text)) - self.assertEqual(result.the_file.read(len(more_text)), more_text) - self.assertEqual(result.the_file.tell(), len(text + more_text)) + assert result.the_file.tell() == 0 + assert result.the_file.read(len(text)) == text + assert result.the_file.tell() == len(text) + assert result.the_file.read(len(more_text)) == more_text + assert result.the_file.tell() == len(text + more_text) result.the_file.delete() # Ensure deleted file returns None - self.assertTrue(result.the_file.read() is None) + assert result.the_file.read() is None def test_file_fields_set(self): class SetFile(Document): @@ -176,16 +176,16 @@ class TestFileField(MongoDBTestCase): setfile.save() result = SetFile.objects.first() - self.assertEqual(setfile, result) - self.assertEqual(result.the_file.read(), text) + assert setfile == result + assert result.the_file.read() == text # Try replacing file with new one result.the_file.replace(more_text) result.save() result = SetFile.objects.first() - self.assertEqual(setfile, result) - self.assertEqual(result.the_file.read(), more_text) + assert setfile == result + assert result.the_file.read() == more_text result.the_file.delete() def test_file_field_no_default(self): @@ -205,28 +205,28 @@ class TestFileField(MongoDBTestCase): doc_b = GridDocument.objects.with_id(doc_a.id) doc_b.the_file.replace(f, filename="doc_b") doc_b.save() - self.assertNotEqual(doc_b.the_file.grid_id, None) + assert doc_b.the_file.grid_id != None # Test it matches doc_c = GridDocument.objects.with_id(doc_b.id) - self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id) + assert doc_b.the_file.grid_id == doc_c.the_file.grid_id # Test with default doc_d = GridDocument(the_file=six.b("")) doc_d.save() doc_e = GridDocument.objects.with_id(doc_d.id) - self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id) + assert doc_d.the_file.grid_id == doc_e.the_file.grid_id doc_e.the_file.replace(f, filename="doc_e") doc_e.save() doc_f = GridDocument.objects.with_id(doc_e.id) - self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id) + assert doc_e.the_file.grid_id == doc_f.the_file.grid_id db = GridDocument._get_db() grid_fs = gridfs.GridFS(db) - self.assertEqual(["doc_b", "doc_e"], grid_fs.list()) + assert ["doc_b", "doc_e"] == grid_fs.list() def test_file_uniqueness(self): """Ensure that each instance of a FileField is unique @@ -246,8 +246,8 @@ class TestFileField(MongoDBTestCase): test_file_dupe = TestFile() data = test_file_dupe.the_file.read() # Should be None - self.assertNotEqual(test_file.name, test_file_dupe.name) - self.assertNotEqual(test_file.the_file.read(), data) + assert test_file.name != test_file_dupe.name + assert test_file.the_file.read() != data TestFile.drop_collection() @@ -268,8 +268,8 @@ class TestFileField(MongoDBTestCase): marmot.save() marmot = Animal.objects.get() - self.assertEqual(marmot.photo.content_type, "image/jpeg") - self.assertEqual(marmot.photo.foo, "bar") + assert marmot.photo.content_type == "image/jpeg" + assert marmot.photo.foo == "bar" def test_file_reassigning(self): class TestFile(Document): @@ -278,12 +278,12 @@ class TestFileField(MongoDBTestCase): TestFile.drop_collection() test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() - self.assertEqual(test_file.the_file.get().length, 8313) + assert test_file.the_file.get().length == 8313 test_file = TestFile.objects.first() test_file.the_file = get_file(TEST_IMAGE2_PATH) test_file.save() - self.assertEqual(test_file.the_file.get().length, 4971) + assert test_file.the_file.get().length == 4971 def test_file_boolean(self): """Ensure that a boolean test of a FileField indicates its presence @@ -295,13 +295,13 @@ class TestFileField(MongoDBTestCase): TestFile.drop_collection() test_file = TestFile() - self.assertFalse(bool(test_file.the_file)) + assert not bool(test_file.the_file) test_file.the_file.put(six.b("Hello, World!"), content_type="text/plain") test_file.save() - self.assertTrue(bool(test_file.the_file)) + assert bool(test_file.the_file) test_file = TestFile.objects.first() - self.assertEqual(test_file.the_file.content_type, "text/plain") + assert test_file.the_file.content_type == "text/plain" def test_file_cmp(self): """Test comparing against other types""" @@ -310,7 +310,7 @@ class TestFileField(MongoDBTestCase): the_file = FileField() test_file = TestFile() - self.assertNotIn(test_file.the_file, [{"test": 1}]) + assert test_file.the_file not in [{"test": 1}] def test_file_disk_space(self): """ Test disk space usage when we delete/replace a file """ @@ -330,16 +330,16 @@ class TestFileField(MongoDBTestCase): files = db.fs.files.find() chunks = db.fs.chunks.find() - self.assertEqual(len(list(files)), 1) - self.assertEqual(len(list(chunks)), 1) + assert len(list(files)) == 1 + assert len(list(chunks)) == 1 # Deleting the docoument should delete the files testfile.delete() files = db.fs.files.find() chunks = db.fs.chunks.find() - self.assertEqual(len(list(files)), 0) - self.assertEqual(len(list(chunks)), 0) + assert len(list(files)) == 0 + assert len(list(chunks)) == 0 # Test case where we don't store a file in the first place testfile = TestFile() @@ -347,15 +347,15 @@ class TestFileField(MongoDBTestCase): files = db.fs.files.find() chunks = db.fs.chunks.find() - self.assertEqual(len(list(files)), 0) - self.assertEqual(len(list(chunks)), 0) + assert len(list(files)) == 0 + assert len(list(chunks)) == 0 testfile.delete() files = db.fs.files.find() chunks = db.fs.chunks.find() - self.assertEqual(len(list(files)), 0) - self.assertEqual(len(list(chunks)), 0) + assert len(list(files)) == 0 + assert len(list(chunks)) == 0 # Test case where we overwrite the file testfile = TestFile() @@ -368,15 +368,15 @@ class TestFileField(MongoDBTestCase): files = db.fs.files.find() chunks = db.fs.chunks.find() - self.assertEqual(len(list(files)), 1) - self.assertEqual(len(list(chunks)), 1) + assert len(list(files)) == 1 + assert len(list(chunks)) == 1 testfile.delete() files = db.fs.files.find() chunks = db.fs.chunks.find() - self.assertEqual(len(list(files)), 0) - self.assertEqual(len(list(chunks)), 0) + assert len(list(files)) == 0 + assert len(list(chunks)) == 0 def test_image_field(self): if not HAS_PIL: @@ -396,9 +396,7 @@ class TestFileField(MongoDBTestCase): t.image.put(f) self.fail("Should have raised an invalidation error") except ValidationError as e: - self.assertEqual( - "%s" % e, "Invalid image: cannot identify image file %s" % f - ) + assert "%s" % e == "Invalid image: cannot identify image file %s" % f t = TestImage() t.image.put(get_file(TEST_IMAGE_PATH)) @@ -406,11 +404,11 @@ class TestFileField(MongoDBTestCase): t = TestImage.objects.first() - self.assertEqual(t.image.format, "PNG") + assert t.image.format == "PNG" w, h = t.image.size - self.assertEqual(w, 371) - self.assertEqual(h, 76) + assert w == 371 + assert h == 76 t.image.delete() @@ -424,12 +422,12 @@ class TestFileField(MongoDBTestCase): TestFile.drop_collection() test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() - self.assertEqual(test_file.the_file.size, (371, 76)) + assert test_file.the_file.size == (371, 76) test_file = TestFile.objects.first() test_file.the_file = get_file(TEST_IMAGE2_PATH) test_file.save() - self.assertEqual(test_file.the_file.size, (45, 101)) + assert test_file.the_file.size == (45, 101) def test_image_field_resize(self): if not HAS_PIL: @@ -446,11 +444,11 @@ class TestFileField(MongoDBTestCase): t = TestImage.objects.first() - self.assertEqual(t.image.format, "PNG") + assert t.image.format == "PNG" w, h = t.image.size - self.assertEqual(w, 185) - self.assertEqual(h, 37) + assert w == 185 + assert h == 37 t.image.delete() @@ -469,11 +467,11 @@ class TestFileField(MongoDBTestCase): t = TestImage.objects.first() - self.assertEqual(t.image.format, "PNG") + assert t.image.format == "PNG" w, h = t.image.size - self.assertEqual(w, 185) - self.assertEqual(h, 37) + assert w == 185 + assert h == 37 t.image.delete() @@ -492,9 +490,9 @@ class TestFileField(MongoDBTestCase): t = TestImage.objects.first() - self.assertEqual(t.image.thumbnail.format, "PNG") - self.assertEqual(t.image.thumbnail.width, 92) - self.assertEqual(t.image.thumbnail.height, 18) + assert t.image.thumbnail.format == "PNG" + assert t.image.thumbnail.width == 92 + assert t.image.thumbnail.height == 18 t.image.delete() @@ -518,17 +516,17 @@ class TestFileField(MongoDBTestCase): test_file.save() data = get_db("test_files").macumba.files.find_one() - self.assertEqual(data.get("name"), "hello.txt") + assert data.get("name") == "hello.txt" test_file = TestFile.objects.first() - self.assertEqual(test_file.the_file.read(), six.b("Hello, World!")) + assert test_file.the_file.read() == six.b("Hello, World!") test_file = TestFile.objects.first() test_file.the_file = six.b("HELLO, WORLD!") test_file.save() test_file = TestFile.objects.first() - self.assertEqual(test_file.the_file.read(), six.b("HELLO, WORLD!")) + assert test_file.the_file.read() == six.b("HELLO, WORLD!") def test_copyable(self): class PutFile(Document): @@ -546,8 +544,8 @@ class TestFileField(MongoDBTestCase): class TestFile(Document): name = StringField() - self.assertEqual(putfile, copy.copy(putfile)) - self.assertEqual(putfile, copy.deepcopy(putfile)) + assert putfile == copy.copy(putfile) + assert putfile == copy.deepcopy(putfile) def test_get_image_by_grid_id(self): @@ -569,9 +567,7 @@ class TestFileField(MongoDBTestCase): test = TestImage.objects.first() grid_id = test.image1.grid_id - self.assertEqual( - 1, TestImage.objects(Q(image1=grid_id) or Q(image2=grid_id)).count() - ) + assert 1 == TestImage.objects(Q(image1=grid_id) or Q(image2=grid_id)).count() def test_complex_field_filefield(self): """Ensure you can add meta data to file""" @@ -593,9 +589,9 @@ class TestFileField(MongoDBTestCase): marmot.save() marmot = Animal.objects.get() - self.assertEqual(marmot.photos[0].content_type, "image/jpeg") - self.assertEqual(marmot.photos[0].foo, "bar") - self.assertEqual(marmot.photos[0].get().length, 8313) + assert marmot.photos[0].content_type == "image/jpeg" + assert marmot.photos[0].foo == "bar" + assert marmot.photos[0].get().length == 8313 if __name__ == "__main__": diff --git a/tests/fields/test_float_field.py b/tests/fields/test_float_field.py index 9f357ce5..d755fb4e 100644 --- a/tests/fields/test_float_field.py +++ b/tests/fields/test_float_field.py @@ -4,6 +4,7 @@ import six from mongoengine import * from tests.utils import MongoDBTestCase +import pytest class TestFloatField(MongoDBTestCase): @@ -16,8 +17,8 @@ class TestFloatField(MongoDBTestCase): TestDocument(float_fld=None).save() TestDocument(float_fld=1).save() - self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count()) - self.assertEqual(1, TestDocument.objects(float_fld__ne=1).count()) + assert 1 == TestDocument.objects(float_fld__ne=None).count() + assert 1 == TestDocument.objects(float_fld__ne=1).count() def test_validation(self): """Ensure that invalid values cannot be assigned to float fields. @@ -34,16 +35,20 @@ class TestFloatField(MongoDBTestCase): person.validate() person.height = "2.0" - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.height = 0.01 - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.height = 4.0 - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person_2 = Person(height="something invalid") - self.assertRaises(ValidationError, person_2.validate) + with pytest.raises(ValidationError): + person_2.validate() big_person = BigPerson() @@ -55,4 +60,5 @@ class TestFloatField(MongoDBTestCase): big_person.validate() big_person.height = 2 ** 100000 # Too big for a float value - self.assertRaises(ValidationError, big_person.validate) + with pytest.raises(ValidationError): + big_person.validate() diff --git a/tests/fields/test_geo_fields.py b/tests/fields/test_geo_fields.py index ff4cbc83..1b912a4b 100644 --- a/tests/fields/test_geo_fields.py +++ b/tests/fields/test_geo_fields.py @@ -11,7 +11,7 @@ class TestGeoField(MongoDBTestCase): Cls(loc=loc).validate() self.fail("Should not validate the location {0}".format(loc)) except ValidationError as e: - self.assertEqual(expected, e.to_dict()["loc"]) + assert expected == e.to_dict()["loc"] def test_geopoint_validation(self): class Location(Document): @@ -299,7 +299,7 @@ class TestGeoField(MongoDBTestCase): location = GeoPointField() geo_indicies = Event._geo_indices() - self.assertEqual(geo_indicies, [{"fields": [("location", "2d")]}]) + assert geo_indicies == [{"fields": [("location", "2d")]}] def test_geopoint_embedded_indexes(self): """Ensure that indexes are created automatically for GeoPointFields on @@ -315,7 +315,7 @@ class TestGeoField(MongoDBTestCase): venue = EmbeddedDocumentField(Venue) geo_indicies = Event._geo_indices() - self.assertEqual(geo_indicies, [{"fields": [("venue.location", "2d")]}]) + assert geo_indicies == [{"fields": [("venue.location", "2d")]}] def test_indexes_2dsphere(self): """Ensure that indexes are created automatically for GeoPointFields. @@ -328,9 +328,9 @@ class TestGeoField(MongoDBTestCase): polygon = PolygonField() geo_indicies = Event._geo_indices() - self.assertIn({"fields": [("line", "2dsphere")]}, geo_indicies) - self.assertIn({"fields": [("polygon", "2dsphere")]}, geo_indicies) - self.assertIn({"fields": [("point", "2dsphere")]}, geo_indicies) + assert {"fields": [("line", "2dsphere")]} in geo_indicies + assert {"fields": [("polygon", "2dsphere")]} in geo_indicies + assert {"fields": [("point", "2dsphere")]} in geo_indicies def test_indexes_2dsphere_embedded(self): """Ensure that indexes are created automatically for GeoPointFields. @@ -347,9 +347,9 @@ class TestGeoField(MongoDBTestCase): venue = EmbeddedDocumentField(Venue) geo_indicies = Event._geo_indices() - self.assertIn({"fields": [("venue.line", "2dsphere")]}, geo_indicies) - self.assertIn({"fields": [("venue.polygon", "2dsphere")]}, geo_indicies) - self.assertIn({"fields": [("venue.point", "2dsphere")]}, geo_indicies) + assert {"fields": [("venue.line", "2dsphere")]} in geo_indicies + assert {"fields": [("venue.polygon", "2dsphere")]} in geo_indicies + assert {"fields": [("venue.point", "2dsphere")]} in geo_indicies def test_geo_indexes_recursion(self): class Location(Document): @@ -365,12 +365,12 @@ class TestGeoField(MongoDBTestCase): Parent(name="Berlin").save() info = Parent._get_collection().index_information() - self.assertNotIn("location_2d", info) + assert "location_2d" not in info info = Location._get_collection().index_information() - self.assertIn("location_2d", info) + assert "location_2d" in info - self.assertEqual(len(Parent._geo_indices()), 0) - self.assertEqual(len(Location._geo_indices()), 1) + assert len(Parent._geo_indices()) == 0 + assert len(Location._geo_indices()) == 1 def test_geo_indexes_auto_index(self): @@ -381,16 +381,16 @@ class TestGeoField(MongoDBTestCase): meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]} - self.assertEqual([], Log._geo_indices()) + assert [] == Log._geo_indices() Log.drop_collection() Log.ensure_indexes() info = Log._get_collection().index_information() - self.assertEqual( - info["location_2dsphere_datetime_1"]["key"], - [("location", "2dsphere"), ("datetime", 1)], - ) + assert info["location_2dsphere_datetime_1"]["key"] == [ + ("location", "2dsphere"), + ("datetime", 1), + ] # Test listing explicitly class Log(Document): @@ -401,16 +401,16 @@ class TestGeoField(MongoDBTestCase): "indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}] } - self.assertEqual([], Log._geo_indices()) + assert [] == Log._geo_indices() Log.drop_collection() Log.ensure_indexes() info = Log._get_collection().index_information() - self.assertEqual( - info["location_2dsphere_datetime_1"]["key"], - [("location", "2dsphere"), ("datetime", 1)], - ) + assert info["location_2dsphere_datetime_1"]["key"] == [ + ("location", "2dsphere"), + ("datetime", 1), + ] if __name__ == "__main__": diff --git a/tests/fields/test_int_field.py b/tests/fields/test_int_field.py index b7db0416..65a5fbad 100644 --- a/tests/fields/test_int_field.py +++ b/tests/fields/test_int_field.py @@ -2,6 +2,7 @@ from mongoengine import * from tests.utils import MongoDBTestCase +import pytest class TestIntField(MongoDBTestCase): @@ -23,11 +24,14 @@ class TestIntField(MongoDBTestCase): person.validate() person.age = -1 - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.age = 120 - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.age = "ten" - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() def test_ne_operator(self): class TestDocument(Document): @@ -38,5 +42,5 @@ class TestIntField(MongoDBTestCase): TestDocument(int_fld=None).save() TestDocument(int_fld=1).save() - self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count()) - self.assertEqual(1, TestDocument.objects(int_fld__ne=1).count()) + assert 1 == TestDocument.objects(int_fld__ne=None).count() + assert 1 == TestDocument.objects(int_fld__ne=1).count() diff --git a/tests/fields/test_lazy_reference_field.py b/tests/fields/test_lazy_reference_field.py index 2a686d7f..8150574d 100644 --- a/tests/fields/test_lazy_reference_field.py +++ b/tests/fields/test_lazy_reference_field.py @@ -5,13 +5,15 @@ from mongoengine import * from mongoengine.base import LazyReference from tests.utils import MongoDBTestCase +import pytest class TestLazyReferenceField(MongoDBTestCase): def test_lazy_reference_config(self): # Make sure ReferenceField only accepts a document class or a string # with a document class name. - self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument) + with pytest.raises(ValidationError): + LazyReferenceField(EmbeddedDocument) def test___repr__(self): class Animal(Document): @@ -25,7 +27,7 @@ class TestLazyReferenceField(MongoDBTestCase): animal = Animal() oc = Ocurrence(animal=animal) - self.assertIn("LazyReference", repr(oc.animal)) + assert "LazyReference" in repr(oc.animal) def test___getattr___unknown_attr_raises_attribute_error(self): class Animal(Document): @@ -39,7 +41,7 @@ class TestLazyReferenceField(MongoDBTestCase): animal = Animal().save() oc = Ocurrence(animal=animal) - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): oc.animal.not_exist def test_lazy_reference_simple(self): @@ -57,19 +59,19 @@ class TestLazyReferenceField(MongoDBTestCase): animal = Animal(name="Leopard", tag="heavy").save() Ocurrence(person="test", animal=animal).save() p = Ocurrence.objects.get() - self.assertIsInstance(p.animal, LazyReference) + assert isinstance(p.animal, LazyReference) fetched_animal = p.animal.fetch() - self.assertEqual(fetched_animal, animal) + assert fetched_animal == animal # `fetch` keep cache on referenced document by default... animal.tag = "not so heavy" animal.save() double_fetch = p.animal.fetch() - self.assertIs(fetched_animal, double_fetch) - self.assertEqual(double_fetch.tag, "heavy") + assert fetched_animal is double_fetch + assert double_fetch.tag == "heavy" # ...unless specified otherwise fetch_force = p.animal.fetch(force=True) - self.assertIsNot(fetch_force, fetched_animal) - self.assertEqual(fetch_force.tag, "not so heavy") + assert fetch_force is not fetched_animal + assert fetch_force.tag == "not so heavy" def test_lazy_reference_fetch_invalid_ref(self): class Animal(Document): @@ -87,8 +89,8 @@ class TestLazyReferenceField(MongoDBTestCase): Ocurrence(person="test", animal=animal).save() animal.delete() p = Ocurrence.objects.get() - self.assertIsInstance(p.animal, LazyReference) - with self.assertRaises(DoesNotExist): + assert isinstance(p.animal, LazyReference) + with pytest.raises(DoesNotExist): p.animal.fetch() def test_lazy_reference_set(self): @@ -122,7 +124,7 @@ class TestLazyReferenceField(MongoDBTestCase): ): p = Ocurrence(person="test", animal=ref).save() p.reload() - self.assertIsInstance(p.animal, LazyReference) + assert isinstance(p.animal, LazyReference) p.animal.fetch() def test_lazy_reference_bad_set(self): @@ -149,7 +151,7 @@ class TestLazyReferenceField(MongoDBTestCase): DBRef(baddoc._get_collection_name(), animal.pk), LazyReference(BadDoc, animal.pk), ): - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): p = Ocurrence(person="test", animal=bad).save() def test_lazy_reference_query_conversion(self): @@ -179,14 +181,14 @@ class TestLazyReferenceField(MongoDBTestCase): post2.save() post = BlogPost.objects(author=m1).first() - self.assertEqual(post.id, post1.id) + assert post.id == post1.id post = BlogPost.objects(author=m2).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id # Same thing by passing a LazyReference instance post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id def test_lazy_reference_query_conversion_dbref(self): """Ensure that LazyReferenceFields can be queried using objects and values @@ -215,14 +217,14 @@ class TestLazyReferenceField(MongoDBTestCase): post2.save() post = BlogPost.objects(author=m1).first() - self.assertEqual(post.id, post1.id) + assert post.id == post1.id post = BlogPost.objects(author=m2).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id # Same thing by passing a LazyReference instance post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id def test_lazy_reference_passthrough(self): class Animal(Document): @@ -239,20 +241,20 @@ class TestLazyReferenceField(MongoDBTestCase): animal = Animal(name="Leopard", tag="heavy").save() Ocurrence(animal=animal, animal_passthrough=animal).save() p = Ocurrence.objects.get() - self.assertIsInstance(p.animal, LazyReference) - with self.assertRaises(KeyError): + assert isinstance(p.animal, LazyReference) + with pytest.raises(KeyError): p.animal["name"] - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): p.animal.name - self.assertEqual(p.animal.pk, animal.pk) + assert p.animal.pk == animal.pk - self.assertEqual(p.animal_passthrough.name, "Leopard") - self.assertEqual(p.animal_passthrough["name"], "Leopard") + assert p.animal_passthrough.name == "Leopard" + assert p.animal_passthrough["name"] == "Leopard" # Should not be able to access referenced document's methods - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): p.animal.save - with self.assertRaises(KeyError): + with pytest.raises(KeyError): p.animal["save"] def test_lazy_reference_not_set(self): @@ -269,7 +271,7 @@ class TestLazyReferenceField(MongoDBTestCase): Ocurrence(person="foo").save() p = Ocurrence.objects.get() - self.assertIs(p.animal, None) + assert p.animal is None def test_lazy_reference_equality(self): class Animal(Document): @@ -280,12 +282,12 @@ class TestLazyReferenceField(MongoDBTestCase): animal = Animal(name="Leopard", tag="heavy").save() animalref = LazyReference(Animal, animal.pk) - self.assertEqual(animal, animalref) - self.assertEqual(animalref, animal) + assert animal == animalref + assert animalref == animal other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90")) - self.assertNotEqual(animal, other_animalref) - self.assertNotEqual(other_animalref, animal) + assert animal != other_animalref + assert other_animalref != animal def test_lazy_reference_embedded(self): class Animal(Document): @@ -308,12 +310,12 @@ class TestLazyReferenceField(MongoDBTestCase): animal2 = Animal(name="cheeta").save() def check_fields_type(occ): - self.assertIsInstance(occ.direct, LazyReference) + assert isinstance(occ.direct, LazyReference) for elem in occ.in_list: - self.assertIsInstance(elem, LazyReference) - self.assertIsInstance(occ.in_embedded.direct, LazyReference) + assert isinstance(elem, LazyReference) + assert isinstance(occ.in_embedded.direct, LazyReference) for elem in occ.in_embedded.in_list: - self.assertIsInstance(elem, LazyReference) + assert isinstance(elem, LazyReference) occ = Ocurrence( in_list=[animal1, animal2], @@ -346,19 +348,19 @@ class TestGenericLazyReferenceField(MongoDBTestCase): animal = Animal(name="Leopard", tag="heavy").save() Ocurrence(person="test", animal=animal).save() p = Ocurrence.objects.get() - self.assertIsInstance(p.animal, LazyReference) + assert isinstance(p.animal, LazyReference) fetched_animal = p.animal.fetch() - self.assertEqual(fetched_animal, animal) + assert fetched_animal == animal # `fetch` keep cache on referenced document by default... animal.tag = "not so heavy" animal.save() double_fetch = p.animal.fetch() - self.assertIs(fetched_animal, double_fetch) - self.assertEqual(double_fetch.tag, "heavy") + assert fetched_animal is double_fetch + assert double_fetch.tag == "heavy" # ...unless specified otherwise fetch_force = p.animal.fetch(force=True) - self.assertIsNot(fetch_force, fetched_animal) - self.assertEqual(fetch_force.tag, "not so heavy") + assert fetch_force is not fetched_animal + assert fetch_force.tag == "not so heavy" def test_generic_lazy_reference_choices(self): class Animal(Document): @@ -385,13 +387,13 @@ class TestGenericLazyReferenceField(MongoDBTestCase): occ_animal = Ocurrence(living_thing=animal, thing=animal).save() occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save() - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): Ocurrence(living_thing=mineral).save() occ = Ocurrence.objects.get(living_thing=animal) - self.assertEqual(occ, occ_animal) - self.assertIsInstance(occ.thing, LazyReference) - self.assertIsInstance(occ.living_thing, LazyReference) + assert occ == occ_animal + assert isinstance(occ.thing, LazyReference) + assert isinstance(occ.living_thing, LazyReference) occ.thing = vegetal occ.living_thing = vegetal @@ -399,7 +401,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): occ.thing = mineral occ.living_thing = mineral - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): occ.save() def test_generic_lazy_reference_set(self): @@ -434,7 +436,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): ): p = Ocurrence(person="test", animal=ref).save() p.reload() - self.assertIsInstance(p.animal, (LazyReference, Document)) + assert isinstance(p.animal, (LazyReference, Document)) p.animal.fetch() def test_generic_lazy_reference_bad_set(self): @@ -455,7 +457,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): animal = Animal(name="Leopard", tag="heavy").save() baddoc = BadDoc().save() for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)): - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): p = Ocurrence(person="test", animal=bad).save() def test_generic_lazy_reference_query_conversion(self): @@ -481,14 +483,14 @@ class TestGenericLazyReferenceField(MongoDBTestCase): post2.save() post = BlogPost.objects(author=m1).first() - self.assertEqual(post.id, post1.id) + assert post.id == post1.id post = BlogPost.objects(author=m2).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id # Same thing by passing a LazyReference instance post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id def test_generic_lazy_reference_not_set(self): class Animal(Document): @@ -504,7 +506,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): Ocurrence(person="foo").save() p = Ocurrence.objects.get() - self.assertIs(p.animal, None) + assert p.animal is None def test_generic_lazy_reference_accepts_string_instead_of_class(self): class Animal(Document): @@ -521,7 +523,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): animal = Animal().save() Ocurrence(animal=animal).save() p = Ocurrence.objects.get() - self.assertEqual(p.animal, animal) + assert p.animal == animal def test_generic_lazy_reference_embedded(self): class Animal(Document): @@ -544,12 +546,12 @@ class TestGenericLazyReferenceField(MongoDBTestCase): animal2 = Animal(name="cheeta").save() def check_fields_type(occ): - self.assertIsInstance(occ.direct, LazyReference) + assert isinstance(occ.direct, LazyReference) for elem in occ.in_list: - self.assertIsInstance(elem, LazyReference) - self.assertIsInstance(occ.in_embedded.direct, LazyReference) + assert isinstance(elem, LazyReference) + assert isinstance(occ.in_embedded.direct, LazyReference) for elem in occ.in_embedded.in_list: - self.assertIsInstance(elem, LazyReference) + assert isinstance(elem, LazyReference) occ = Ocurrence( in_list=[animal1, animal2], diff --git a/tests/fields/test_long_field.py b/tests/fields/test_long_field.py index ab86eccd..51f8e255 100644 --- a/tests/fields/test_long_field.py +++ b/tests/fields/test_long_field.py @@ -10,6 +10,7 @@ from mongoengine import * from mongoengine.connection import get_db from tests.utils import MongoDBTestCase +import pytest class TestLongField(MongoDBTestCase): @@ -24,10 +25,10 @@ class TestLongField(MongoDBTestCase): doc = TestLongFieldConsideredAsInt64(some_long=42).save() db = get_db() - self.assertIsInstance( + assert isinstance( db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64 ) - self.assertIsInstance(doc.some_long, six.integer_types) + assert isinstance(doc.some_long, six.integer_types) def test_long_validation(self): """Ensure that invalid values cannot be assigned to long fields. @@ -41,11 +42,14 @@ class TestLongField(MongoDBTestCase): doc.validate() doc.value = -1 - self.assertRaises(ValidationError, doc.validate) + with pytest.raises(ValidationError): + doc.validate() doc.value = 120 - self.assertRaises(ValidationError, doc.validate) + with pytest.raises(ValidationError): + doc.validate() doc.value = "ten" - self.assertRaises(ValidationError, doc.validate) + with pytest.raises(ValidationError): + doc.validate() def test_long_ne_operator(self): class TestDocument(Document): @@ -56,4 +60,4 @@ class TestLongField(MongoDBTestCase): TestDocument(long_fld=None).save() TestDocument(long_fld=1).save() - self.assertEqual(1, TestDocument.objects(long_fld__ne=None).count()) + assert 1 == TestDocument.objects(long_fld__ne=None).count() diff --git a/tests/fields/test_map_field.py b/tests/fields/test_map_field.py index 54f70aa1..fd56ddd0 100644 --- a/tests/fields/test_map_field.py +++ b/tests/fields/test_map_field.py @@ -4,6 +4,7 @@ import datetime from mongoengine import * from tests.utils import MongoDBTestCase +import pytest class TestMapField(MongoDBTestCase): @@ -19,11 +20,11 @@ class TestMapField(MongoDBTestCase): e.mapping["someint"] = 1 e.save() - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): e.mapping["somestring"] = "abc" e.save() - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): class NoDeclaredType(Document): mapping = MapField() @@ -51,10 +52,10 @@ class TestMapField(MongoDBTestCase): e.save() e2 = Extensible.objects.get(id=e.id) - self.assertIsInstance(e2.mapping["somestring"], StringSetting) - self.assertIsInstance(e2.mapping["someint"], IntegerSetting) + assert isinstance(e2.mapping["somestring"], StringSetting) + assert isinstance(e2.mapping["someint"], IntegerSetting) - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): e.mapping["someint"] = 123 e.save() @@ -74,9 +75,9 @@ class TestMapField(MongoDBTestCase): Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1) test = Test.objects.get() - self.assertEqual(test.my_map["DICTIONARY_KEY"].number, 2) + assert test.my_map["DICTIONARY_KEY"].number == 2 doc = self.db.test.find_one() - self.assertEqual(doc["x"]["DICTIONARY_KEY"]["i"], 2) + assert doc["x"]["DICTIONARY_KEY"]["i"] == 2 def test_mapfield_numerical_index(self): """Ensure that MapField accept numeric strings as indexes.""" @@ -116,13 +117,13 @@ class TestMapField(MongoDBTestCase): actions={"friends": Action(operation="drink", object="beer")}, ).save() - self.assertEqual(1, Log.objects(visited__friends__exists=True).count()) + assert 1 == Log.objects(visited__friends__exists=True).count() - self.assertEqual( - 1, - Log.objects( + assert ( + 1 + == Log.objects( actions__friends__operation="drink", actions__friends__object="beer" - ).count(), + ).count() ) def test_map_field_unicode(self): @@ -139,7 +140,7 @@ class TestMapField(MongoDBTestCase): tree.save() - self.assertEqual( - BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description, - u"VALUE: éééé", + assert ( + BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description + == u"VALUE: éééé" ) diff --git a/tests/fields/test_reference_field.py b/tests/fields/test_reference_field.py index 783a46da..783d1315 100644 --- a/tests/fields/test_reference_field.py +++ b/tests/fields/test_reference_field.py @@ -4,6 +4,7 @@ from bson import DBRef, SON from mongoengine import * from tests.utils import MongoDBTestCase +import pytest class TestReferenceField(MongoDBTestCase): @@ -24,19 +25,22 @@ class TestReferenceField(MongoDBTestCase): # Make sure ReferenceField only accepts a document class or a string # with a document class name. - self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument) + with pytest.raises(ValidationError): + ReferenceField(EmbeddedDocument) user = User(name="Test User") # Ensure that the referenced object must have been saved post1 = BlogPost(content="Chips and gravy taste good.") post1.author = user - self.assertRaises(ValidationError, post1.save) + with pytest.raises(ValidationError): + post1.save() # Check that an invalid object type cannot be used post2 = BlogPost(content="Chips and chilli taste good.") post1.author = post2 - self.assertRaises(ValidationError, post1.validate) + with pytest.raises(ValidationError): + post1.validate() # Ensure ObjectID's are accepted as references user_object_id = user.pk @@ -52,7 +56,8 @@ class TestReferenceField(MongoDBTestCase): # Make sure referencing a saved document of the *wrong* type fails post2.save() post1.author = post2 - self.assertRaises(ValidationError, post1.validate) + with pytest.raises(ValidationError): + post1.validate() def test_objectid_reference_fields(self): """Make sure storing Object ID references works.""" @@ -67,7 +72,7 @@ class TestReferenceField(MongoDBTestCase): Person(name="Ross", parent=p1.pk).save() p = Person.objects.get(name="Ross") - self.assertEqual(p.parent, p1) + assert p.parent == p1 def test_dbref_reference_fields(self): """Make sure storing references as bson.dbref.DBRef works.""" @@ -81,13 +86,12 @@ class TestReferenceField(MongoDBTestCase): p1 = Person(name="John").save() Person(name="Ross", parent=p1).save() - self.assertEqual( - Person._get_collection().find_one({"name": "Ross"})["parent"], - DBRef("person", p1.pk), + assert Person._get_collection().find_one({"name": "Ross"})["parent"] == DBRef( + "person", p1.pk ) p = Person.objects.get(name="Ross") - self.assertEqual(p.parent, p1) + assert p.parent == p1 def test_dbref_to_mongo(self): """Make sure that calling to_mongo on a ReferenceField which @@ -100,9 +104,7 @@ class TestReferenceField(MongoDBTestCase): parent = ReferenceField("self", dbref=False) p = Person(name="Steve", parent=DBRef("person", "abcdefghijklmnop")) - self.assertEqual( - p.to_mongo(), SON([("name", u"Steve"), ("parent", "abcdefghijklmnop")]) - ) + assert p.to_mongo() == SON([("name", u"Steve"), ("parent", "abcdefghijklmnop")]) def test_objectid_reference_fields(self): class Person(Document): @@ -116,10 +118,10 @@ class TestReferenceField(MongoDBTestCase): col = Person._get_collection() data = col.find_one({"name": "Ross"}) - self.assertEqual(data["parent"], p1.pk) + assert data["parent"] == p1.pk p = Person.objects.get(name="Ross") - self.assertEqual(p.parent, p1) + assert p.parent == p1 def test_undefined_reference(self): """Ensure that ReferenceFields may reference undefined Documents. @@ -144,14 +146,14 @@ class TestReferenceField(MongoDBTestCase): me.save() obj = Product.objects(company=ten_gen).first() - self.assertEqual(obj, mongodb) - self.assertEqual(obj.company, ten_gen) + assert obj == mongodb + assert obj.company == ten_gen obj = Product.objects(company=None).first() - self.assertEqual(obj, me) + assert obj == me obj = Product.objects.get(company=None) - self.assertEqual(obj, me) + assert obj == me def test_reference_query_conversion(self): """Ensure that ReferenceFields can be queried using objects and values @@ -180,10 +182,10 @@ class TestReferenceField(MongoDBTestCase): post2.save() post = BlogPost.objects(author=m1).first() - self.assertEqual(post.id, post1.id) + assert post.id == post1.id post = BlogPost.objects(author=m2).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id def test_reference_query_conversion_dbref(self): """Ensure that ReferenceFields can be queried using objects and values @@ -212,7 +214,7 @@ class TestReferenceField(MongoDBTestCase): post2.save() post = BlogPost.objects(author=m1).first() - self.assertEqual(post.id, post1.id) + assert post.id == post1.id post = BlogPost.objects(author=m2).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id diff --git a/tests/fields/test_sequence_field.py b/tests/fields/test_sequence_field.py index f2c8388b..aa83f710 100644 --- a/tests/fields/test_sequence_field.py +++ b/tests/fields/test_sequence_field.py @@ -18,17 +18,17 @@ class TestSequenceField(MongoDBTestCase): Person(name="Person %s" % x).save() c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) - self.assertEqual(c["next"], 10) + assert c["next"] == 10 ids = [i.id for i in Person.objects] - self.assertEqual(ids, range(1, 11)) + assert ids == range(1, 11) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) - self.assertEqual(c["next"], 10) + assert c["next"] == 10 Person.id.set_next_value(1000) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) - self.assertEqual(c["next"], 1000) + assert c["next"] == 1000 def test_sequence_field_get_next_value(self): class Person(Document): @@ -41,10 +41,10 @@ class TestSequenceField(MongoDBTestCase): for x in range(10): Person(name="Person %s" % x).save() - self.assertEqual(Person.id.get_next_value(), 11) + assert Person.id.get_next_value() == 11 self.db["mongoengine.counters"].drop() - self.assertEqual(Person.id.get_next_value(), 1) + assert Person.id.get_next_value() == 1 class Person(Document): id = SequenceField(primary_key=True, value_decorator=str) @@ -56,10 +56,10 @@ class TestSequenceField(MongoDBTestCase): for x in range(10): Person(name="Person %s" % x).save() - self.assertEqual(Person.id.get_next_value(), "11") + assert Person.id.get_next_value() == "11" self.db["mongoengine.counters"].drop() - self.assertEqual(Person.id.get_next_value(), "1") + assert Person.id.get_next_value() == "1" def test_sequence_field_sequence_name(self): class Person(Document): @@ -73,17 +73,17 @@ class TestSequenceField(MongoDBTestCase): Person(name="Person %s" % x).save() c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) - self.assertEqual(c["next"], 10) + assert c["next"] == 10 ids = [i.id for i in Person.objects] - self.assertEqual(ids, range(1, 11)) + assert ids == range(1, 11) c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) - self.assertEqual(c["next"], 10) + assert c["next"] == 10 Person.id.set_next_value(1000) c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) - self.assertEqual(c["next"], 1000) + assert c["next"] == 1000 def test_multiple_sequence_fields(self): class Person(Document): @@ -98,24 +98,24 @@ class TestSequenceField(MongoDBTestCase): Person(name="Person %s" % x).save() c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) - self.assertEqual(c["next"], 10) + assert c["next"] == 10 ids = [i.id for i in Person.objects] - self.assertEqual(ids, range(1, 11)) + assert ids == range(1, 11) counters = [i.counter for i in Person.objects] - self.assertEqual(counters, range(1, 11)) + assert counters == range(1, 11) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) - self.assertEqual(c["next"], 10) + assert c["next"] == 10 Person.id.set_next_value(1000) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) - self.assertEqual(c["next"], 1000) + assert c["next"] == 1000 Person.counter.set_next_value(999) c = self.db["mongoengine.counters"].find_one({"_id": "person.counter"}) - self.assertEqual(c["next"], 999) + assert c["next"] == 999 def test_sequence_fields_reload(self): class Animal(Document): @@ -127,20 +127,20 @@ class TestSequenceField(MongoDBTestCase): a = Animal(name="Boi").save() - self.assertEqual(a.counter, 1) + assert a.counter == 1 a.reload() - self.assertEqual(a.counter, 1) + assert a.counter == 1 a.counter = None - self.assertEqual(a.counter, 2) + assert a.counter == 2 a.save() - self.assertEqual(a.counter, 2) + assert a.counter == 2 a = Animal.objects.first() - self.assertEqual(a.counter, 2) + assert a.counter == 2 a.reload() - self.assertEqual(a.counter, 2) + assert a.counter == 2 def test_multiple_sequence_fields_on_docs(self): class Animal(Document): @@ -160,22 +160,22 @@ class TestSequenceField(MongoDBTestCase): Person(name="Person %s" % x).save() c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) - self.assertEqual(c["next"], 10) + assert c["next"] == 10 c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"}) - self.assertEqual(c["next"], 10) + assert c["next"] == 10 ids = [i.id for i in Person.objects] - self.assertEqual(ids, range(1, 11)) + assert ids == range(1, 11) id = [i.id for i in Animal.objects] - self.assertEqual(id, range(1, 11)) + assert id == range(1, 11) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) - self.assertEqual(c["next"], 10) + assert c["next"] == 10 c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"}) - self.assertEqual(c["next"], 10) + assert c["next"] == 10 def test_sequence_field_value_decorator(self): class Person(Document): @@ -190,13 +190,13 @@ class TestSequenceField(MongoDBTestCase): p.save() c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) - self.assertEqual(c["next"], 10) + assert c["next"] == 10 ids = [i.id for i in Person.objects] - self.assertEqual(ids, map(str, range(1, 11))) + assert ids == map(str, range(1, 11)) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) - self.assertEqual(c["next"], 10) + assert c["next"] == 10 def test_embedded_sequence_field(self): class Comment(EmbeddedDocument): @@ -218,10 +218,10 @@ class TestSequenceField(MongoDBTestCase): ], ).save() c = self.db["mongoengine.counters"].find_one({"_id": "comment.id"}) - self.assertEqual(c["next"], 2) + assert c["next"] == 2 post = Post.objects.first() - self.assertEqual(1, post.comments[0].id) - self.assertEqual(2, post.comments[1].id) + assert 1 == post.comments[0].id + assert 2 == post.comments[1].id def test_inherited_sequencefield(self): class Base(Document): @@ -241,16 +241,14 @@ class TestSequenceField(MongoDBTestCase): foo = Foo(name="Foo") foo.save() - self.assertTrue( - "base.counter" in self.db["mongoengine.counters"].find().distinct("_id") - ) - self.assertFalse( + assert "base.counter" in self.db["mongoengine.counters"].find().distinct("_id") + assert not ( ("foo.counter" or "bar.counter") in self.db["mongoengine.counters"].find().distinct("_id") ) - self.assertNotEqual(foo.counter, bar.counter) - self.assertEqual(foo._fields["counter"].owner_document, Base) - self.assertEqual(bar._fields["counter"].owner_document, Base) + assert foo.counter != bar.counter + assert foo._fields["counter"].owner_document == Base + assert bar._fields["counter"].owner_document == Base def test_no_inherited_sequencefield(self): class Base(Document): @@ -269,13 +267,12 @@ class TestSequenceField(MongoDBTestCase): foo = Foo(name="Foo") foo.save() - self.assertFalse( + assert not ( "base.counter" in self.db["mongoengine.counters"].find().distinct("_id") ) - self.assertTrue( - ("foo.counter" and "bar.counter") - in self.db["mongoengine.counters"].find().distinct("_id") - ) - self.assertEqual(foo.counter, bar.counter) - self.assertEqual(foo._fields["counter"].owner_document, Foo) - self.assertEqual(bar._fields["counter"].owner_document, Bar) + assert ("foo.counter" and "bar.counter") in self.db[ + "mongoengine.counters" + ].find().distinct("_id") + assert foo.counter == bar.counter + assert foo._fields["counter"].owner_document == Foo + assert bar._fields["counter"].owner_document == Bar diff --git a/tests/fields/test_url_field.py b/tests/fields/test_url_field.py index 81baf8d0..e7df0e08 100644 --- a/tests/fields/test_url_field.py +++ b/tests/fields/test_url_field.py @@ -2,6 +2,7 @@ from mongoengine import * from tests.utils import MongoDBTestCase +import pytest class TestURLField(MongoDBTestCase): @@ -13,7 +14,8 @@ class TestURLField(MongoDBTestCase): link = Link() link.url = "google" - self.assertRaises(ValidationError, link.validate) + with pytest.raises(ValidationError): + link.validate() link.url = "http://www.google.com:8080" link.validate() @@ -29,11 +31,11 @@ class TestURLField(MongoDBTestCase): # TODO fix URL validation - this *IS* a valid URL # For now we just want to make sure that the error message is correct - with self.assertRaises(ValidationError) as ctx_err: + with pytest.raises(ValidationError) as ctx_err: link.validate() - self.assertEqual( - unicode(ctx_err.exception), - u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])", + assert ( + unicode(ctx_err.exception) + == u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])" ) def test_url_scheme_validation(self): @@ -48,7 +50,8 @@ class TestURLField(MongoDBTestCase): link = Link() link.url = "ws://google.com" - self.assertRaises(ValidationError, link.validate) + with pytest.raises(ValidationError): + link.validate() scheme_link = SchemeLink() scheme_link.url = "ws://google.com" diff --git a/tests/fields/test_uuid_field.py b/tests/fields/test_uuid_field.py index 647dceaf..b1413f95 100644 --- a/tests/fields/test_uuid_field.py +++ b/tests/fields/test_uuid_field.py @@ -4,6 +4,7 @@ import uuid from mongoengine import * from tests.utils import MongoDBTestCase, get_as_pymongo +import pytest class Person(Document): @@ -14,9 +15,7 @@ class TestUUIDField(MongoDBTestCase): def test_storage(self): uid = uuid.uuid4() person = Person(api_key=uid).save() - self.assertEqual( - get_as_pymongo(person), {"_id": person.id, "api_key": str(uid)} - ) + assert get_as_pymongo(person) == {"_id": person.id, "api_key": str(uid)} def test_field_string(self): """Test UUID fields storing as String @@ -25,8 +24,8 @@ class TestUUIDField(MongoDBTestCase): uu = uuid.uuid4() Person(api_key=uu).save() - self.assertEqual(1, Person.objects(api_key=uu).count()) - self.assertEqual(uu, Person.objects.first().api_key) + assert 1 == Person.objects(api_key=uu).count() + assert uu == Person.objects.first().api_key person = Person() valid = (uuid.uuid4(), uuid.uuid1()) @@ -40,7 +39,8 @@ class TestUUIDField(MongoDBTestCase): ) for api_key in invalid: person.api_key = api_key - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() def test_field_binary(self): """Test UUID fields storing as Binary object.""" @@ -48,8 +48,8 @@ class TestUUIDField(MongoDBTestCase): uu = uuid.uuid4() Person(api_key=uu).save() - self.assertEqual(1, Person.objects(api_key=uu).count()) - self.assertEqual(uu, Person.objects.first().api_key) + assert 1 == Person.objects(api_key=uu).count() + assert uu == Person.objects.first().api_key person = Person() valid = (uuid.uuid4(), uuid.uuid1()) @@ -63,4 +63,5 @@ class TestUUIDField(MongoDBTestCase): ) for api_key in invalid: person.api_key = api_key - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() diff --git a/tests/queryset/test_field_list.py b/tests/queryset/test_field_list.py index 703c2031..d33c4c86 100644 --- a/tests/queryset/test_field_list.py +++ b/tests/queryset/test_field_list.py @@ -2,66 +2,67 @@ import unittest from mongoengine import * from mongoengine.queryset import QueryFieldList +import pytest class TestQueryFieldList(unittest.TestCase): def test_empty(self): q = QueryFieldList() - self.assertFalse(q) + assert not q q = QueryFieldList(always_include=["_cls"]) - self.assertFalse(q) + assert not q def test_include_include(self): q = QueryFieldList() q += QueryFieldList( fields=["a", "b"], value=QueryFieldList.ONLY, _only_called=True ) - self.assertEqual(q.as_dict(), {"a": 1, "b": 1}) + assert q.as_dict() == {"a": 1, "b": 1} q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {"a": 1, "b": 1, "c": 1}) + assert q.as_dict() == {"a": 1, "b": 1, "c": 1} def test_include_exclude(self): q = QueryFieldList() q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {"a": 1, "b": 1}) + assert q.as_dict() == {"a": 1, "b": 1} q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {"a": 1}) + assert q.as_dict() == {"a": 1} def test_exclude_exclude(self): q = QueryFieldList() q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {"a": 0, "b": 0}) + assert q.as_dict() == {"a": 0, "b": 0} q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {"a": 0, "b": 0, "c": 0}) + assert q.as_dict() == {"a": 0, "b": 0, "c": 0} def test_exclude_include(self): q = QueryFieldList() q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {"a": 0, "b": 0}) + assert q.as_dict() == {"a": 0, "b": 0} q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {"c": 1}) + assert q.as_dict() == {"c": 1} def test_always_include(self): q = QueryFieldList(always_include=["x", "y"]) q += QueryFieldList(fields=["a", "b", "x"], value=QueryFieldList.EXCLUDE) q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {"x": 1, "y": 1, "c": 1}) + assert q.as_dict() == {"x": 1, "y": 1, "c": 1} def test_reset(self): q = QueryFieldList(always_include=["x", "y"]) q += QueryFieldList(fields=["a", "b", "x"], value=QueryFieldList.EXCLUDE) q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {"x": 1, "y": 1, "c": 1}) + assert q.as_dict() == {"x": 1, "y": 1, "c": 1} q.reset() - self.assertFalse(q) + assert not q q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {"x": 1, "y": 1, "b": 1, "c": 1}) + assert q.as_dict() == {"x": 1, "y": 1, "b": 1, "c": 1} def test_using_a_slice(self): q = QueryFieldList() q += QueryFieldList(fields=["a"], value={"$slice": 5}) - self.assertEqual(q.as_dict(), {"a": {"$slice": 5}}) + assert q.as_dict() == {"a": {"$slice": 5}} class TestOnlyExcludeAll(unittest.TestCase): @@ -90,25 +91,23 @@ class TestOnlyExcludeAll(unittest.TestCase): only = ["b", "c"] qs = MyDoc.objects.fields(**{i: 1 for i in include}) - self.assertEqual( - qs._loaded_fields.as_dict(), {"a": 1, "b": 1, "c": 1, "d": 1, "e": 1} - ) + assert qs._loaded_fields.as_dict() == {"a": 1, "b": 1, "c": 1, "d": 1, "e": 1} qs = qs.only(*only) - self.assertEqual(qs._loaded_fields.as_dict(), {"b": 1, "c": 1}) + assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1} qs = qs.exclude(*exclude) - self.assertEqual(qs._loaded_fields.as_dict(), {"b": 1, "c": 1}) + assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1} qs = MyDoc.objects.fields(**{i: 1 for i in include}) qs = qs.exclude(*exclude) - self.assertEqual(qs._loaded_fields.as_dict(), {"a": 1, "b": 1, "c": 1}) + assert qs._loaded_fields.as_dict() == {"a": 1, "b": 1, "c": 1} qs = qs.only(*only) - self.assertEqual(qs._loaded_fields.as_dict(), {"b": 1, "c": 1}) + assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1} qs = MyDoc.objects.exclude(*exclude) qs = qs.fields(**{i: 1 for i in include}) - self.assertEqual(qs._loaded_fields.as_dict(), {"a": 1, "b": 1, "c": 1}) + assert qs._loaded_fields.as_dict() == {"a": 1, "b": 1, "c": 1} qs = qs.only(*only) - self.assertEqual(qs._loaded_fields.as_dict(), {"b": 1, "c": 1}) + assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1} def test_slicing(self): class MyDoc(Document): @@ -127,15 +126,16 @@ class TestOnlyExcludeAll(unittest.TestCase): qs = qs.exclude(*exclude) qs = qs.only(*only) qs = qs.fields(slice__b=5) - self.assertEqual(qs._loaded_fields.as_dict(), {"b": {"$slice": 5}, "c": 1}) + assert qs._loaded_fields.as_dict() == {"b": {"$slice": 5}, "c": 1} qs = qs.fields(slice__c=[5, 1]) - self.assertEqual( - qs._loaded_fields.as_dict(), {"b": {"$slice": 5}, "c": {"$slice": [5, 1]}} - ) + assert qs._loaded_fields.as_dict() == { + "b": {"$slice": 5}, + "c": {"$slice": [5, 1]}, + } qs = qs.exclude("c") - self.assertEqual(qs._loaded_fields.as_dict(), {"b": {"$slice": 5}}) + assert qs._loaded_fields.as_dict() == {"b": {"$slice": 5}} def test_mix_slice_with_other_fields(self): class MyDoc(Document): @@ -144,7 +144,7 @@ class TestOnlyExcludeAll(unittest.TestCase): c = ListField() qs = MyDoc.objects.fields(a=1, b=0, slice__c=2) - self.assertEqual(qs._loaded_fields.as_dict(), {"c": {"$slice": 2}, "a": 1}) + assert qs._loaded_fields.as_dict() == {"c": {"$slice": 2}, "a": 1} def test_only(self): """Ensure that QuerySet.only only returns the requested fields. @@ -153,20 +153,20 @@ class TestOnlyExcludeAll(unittest.TestCase): person.save() obj = self.Person.objects.only("name").get() - self.assertEqual(obj.name, person.name) - self.assertEqual(obj.age, None) + assert obj.name == person.name + assert obj.age == None obj = self.Person.objects.only("age").get() - self.assertEqual(obj.name, None) - self.assertEqual(obj.age, person.age) + assert obj.name == None + assert obj.age == person.age obj = self.Person.objects.only("name", "age").get() - self.assertEqual(obj.name, person.name) - self.assertEqual(obj.age, person.age) + assert obj.name == person.name + assert obj.age == person.age obj = self.Person.objects.only(*("id", "name")).get() - self.assertEqual(obj.name, person.name) - self.assertEqual(obj.age, None) + assert obj.name == person.name + assert obj.age == None # Check polymorphism still works class Employee(self.Person): @@ -176,12 +176,12 @@ class TestOnlyExcludeAll(unittest.TestCase): employee.save() obj = self.Person.objects(id=employee.id).only("age").get() - self.assertIsInstance(obj, Employee) + assert isinstance(obj, Employee) # Check field names are looked up properly obj = Employee.objects(id=employee.id).only("salary").get() - self.assertEqual(obj.salary, employee.salary) - self.assertEqual(obj.name, None) + assert obj.salary == employee.salary + assert obj.name == None def test_only_with_subfields(self): class User(EmbeddedDocument): @@ -215,29 +215,29 @@ class TestOnlyExcludeAll(unittest.TestCase): post.save() obj = BlogPost.objects.only("author.name").get() - self.assertEqual(obj.content, None) - self.assertEqual(obj.author.email, None) - self.assertEqual(obj.author.name, "Test User") - self.assertEqual(obj.comments, []) + assert obj.content == None + assert obj.author.email == None + assert obj.author.name == "Test User" + assert obj.comments == [] obj = BlogPost.objects.only("various.test_dynamic.some").get() - self.assertEqual(obj.various["test_dynamic"].some, True) + assert obj.various["test_dynamic"].some == True obj = BlogPost.objects.only("content", "comments.title").get() - self.assertEqual(obj.content, "Had a good coffee today...") - self.assertEqual(obj.author, None) - self.assertEqual(obj.comments[0].title, "I aggree") - self.assertEqual(obj.comments[1].title, "Coffee") - self.assertEqual(obj.comments[0].text, None) - self.assertEqual(obj.comments[1].text, None) + assert obj.content == "Had a good coffee today..." + assert obj.author == None + assert obj.comments[0].title == "I aggree" + assert obj.comments[1].title == "Coffee" + assert obj.comments[0].text == None + assert obj.comments[1].text == None obj = BlogPost.objects.only("comments").get() - self.assertEqual(obj.content, None) - self.assertEqual(obj.author, None) - self.assertEqual(obj.comments[0].title, "I aggree") - self.assertEqual(obj.comments[1].title, "Coffee") - self.assertEqual(obj.comments[0].text, "Great post!") - self.assertEqual(obj.comments[1].text, "I hate coffee") + assert obj.content == None + assert obj.author == None + assert obj.comments[0].title == "I aggree" + assert obj.comments[1].title == "Coffee" + assert obj.comments[0].text == "Great post!" + assert obj.comments[1].text == "I hate coffee" BlogPost.drop_collection() @@ -266,10 +266,10 @@ class TestOnlyExcludeAll(unittest.TestCase): post.save() obj = BlogPost.objects.exclude("author", "comments.text").get() - self.assertEqual(obj.author, None) - self.assertEqual(obj.content, "Had a good coffee today...") - self.assertEqual(obj.comments[0].title, "I aggree") - self.assertEqual(obj.comments[0].text, None) + assert obj.author == None + assert obj.content == "Had a good coffee today..." + assert obj.comments[0].title == "I aggree" + assert obj.comments[0].text == None BlogPost.drop_collection() @@ -301,18 +301,18 @@ class TestOnlyExcludeAll(unittest.TestCase): email.save() obj = Email.objects.exclude("content_type").exclude("body").get() - self.assertEqual(obj.sender, "me") - self.assertEqual(obj.to, "you") - self.assertEqual(obj.subject, "From Russia with Love") - self.assertEqual(obj.body, None) - self.assertEqual(obj.content_type, None) + assert obj.sender == "me" + assert obj.to == "you" + assert obj.subject == "From Russia with Love" + assert obj.body == None + assert obj.content_type == None obj = Email.objects.only("sender", "to").exclude("body", "sender").get() - self.assertEqual(obj.sender, None) - self.assertEqual(obj.to, "you") - self.assertEqual(obj.subject, None) - self.assertEqual(obj.body, None) - self.assertEqual(obj.content_type, None) + assert obj.sender == None + assert obj.to == "you" + assert obj.subject == None + assert obj.body == None + assert obj.content_type == None obj = ( Email.objects.exclude("attachments.content") @@ -320,13 +320,13 @@ class TestOnlyExcludeAll(unittest.TestCase): .only("to", "attachments.name") .get() ) - self.assertEqual(obj.attachments[0].name, "file1.doc") - self.assertEqual(obj.attachments[0].content, None) - self.assertEqual(obj.sender, None) - self.assertEqual(obj.to, "you") - self.assertEqual(obj.subject, None) - self.assertEqual(obj.body, None) - self.assertEqual(obj.content_type, None) + assert obj.attachments[0].name == "file1.doc" + assert obj.attachments[0].content == None + assert obj.sender == None + assert obj.to == "you" + assert obj.subject == None + assert obj.body == None + assert obj.content_type == None Email.drop_collection() @@ -355,11 +355,11 @@ class TestOnlyExcludeAll(unittest.TestCase): .all_fields() .get() ) - self.assertEqual(obj.sender, "me") - self.assertEqual(obj.to, "you") - self.assertEqual(obj.subject, "From Russia with Love") - self.assertEqual(obj.body, "Hello!") - self.assertEqual(obj.content_type, "text/plain") + assert obj.sender == "me" + assert obj.to == "you" + assert obj.subject == "From Russia with Love" + assert obj.body == "Hello!" + assert obj.content_type == "text/plain" Email.drop_collection() @@ -377,27 +377,27 @@ class TestOnlyExcludeAll(unittest.TestCase): # first three numbers = Numbers.objects.fields(slice__n=3).get() - self.assertEqual(numbers.n, [0, 1, 2]) + assert numbers.n == [0, 1, 2] # last three numbers = Numbers.objects.fields(slice__n=-3).get() - self.assertEqual(numbers.n, [-3, -2, -1]) + assert numbers.n == [-3, -2, -1] # skip 2, limit 3 numbers = Numbers.objects.fields(slice__n=[2, 3]).get() - self.assertEqual(numbers.n, [2, 3, 4]) + assert numbers.n == [2, 3, 4] # skip to fifth from last, limit 4 numbers = Numbers.objects.fields(slice__n=[-5, 4]).get() - self.assertEqual(numbers.n, [-5, -4, -3, -2]) + assert numbers.n == [-5, -4, -3, -2] # skip to fifth from last, limit 10 numbers = Numbers.objects.fields(slice__n=[-5, 10]).get() - self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) + assert numbers.n == [-5, -4, -3, -2, -1] # skip to fifth from last, limit 10 dict method numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get() - self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) + assert numbers.n == [-5, -4, -3, -2, -1] def test_slicing_nested_fields(self): """Ensure that query slicing an embedded array works. @@ -417,27 +417,27 @@ class TestOnlyExcludeAll(unittest.TestCase): # first three numbers = Numbers.objects.fields(slice__embedded__n=3).get() - self.assertEqual(numbers.embedded.n, [0, 1, 2]) + assert numbers.embedded.n == [0, 1, 2] # last three numbers = Numbers.objects.fields(slice__embedded__n=-3).get() - self.assertEqual(numbers.embedded.n, [-3, -2, -1]) + assert numbers.embedded.n == [-3, -2, -1] # skip 2, limit 3 numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get() - self.assertEqual(numbers.embedded.n, [2, 3, 4]) + assert numbers.embedded.n == [2, 3, 4] # skip to fifth from last, limit 4 numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get() - self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2]) + assert numbers.embedded.n == [-5, -4, -3, -2] # skip to fifth from last, limit 10 numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get() - self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) + assert numbers.embedded.n == [-5, -4, -3, -2, -1] # skip to fifth from last, limit 10 dict method numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() - self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) + assert numbers.embedded.n == [-5, -4, -3, -2, -1] def test_exclude_from_subclasses_docs(self): class Base(Document): @@ -456,9 +456,10 @@ class TestOnlyExcludeAll(unittest.TestCase): User(username="mongodb", password="secret").save() user = Base.objects().exclude("password", "wibble").first() - self.assertEqual(user.password, None) + assert user.password == None - self.assertRaises(LookUpError, Base.objects.exclude, "made_up") + with pytest.raises(LookUpError): + Base.objects.exclude("made_up") if __name__ == "__main__": diff --git a/tests/queryset/test_geo.py b/tests/queryset/test_geo.py index 343f864b..a546fdb6 100644 --- a/tests/queryset/test_geo.py +++ b/tests/queryset/test_geo.py @@ -48,14 +48,14 @@ class TestGeoQueries(MongoDBTestCase): # note that "near" will show the san francisco event, too, # although it sorts to last. events = self.Event.objects(location__near=[-87.67892, 41.9120459]) - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event1, event3, event2]) + assert events.count() == 3 + assert list(events) == [event1, event3, event2] # ensure ordering is respected by "near" events = self.Event.objects(location__near=[-87.67892, 41.9120459]) events = events.order_by("-date") - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event3, event1, event2]) + assert events.count() == 3 + assert list(events) == [event3, event1, event2] def test_near_and_max_distance(self): """Ensure the "max_distance" operator works alongside the "near" @@ -66,8 +66,8 @@ class TestGeoQueries(MongoDBTestCase): # find events within 10 degrees of san francisco point = [-122.415579, 37.7566023] events = self.Event.objects(location__near=point, location__max_distance=10) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event2) + assert events.count() == 1 + assert events[0] == event2 def test_near_and_min_distance(self): """Ensure the "min_distance" operator works alongside the "near" @@ -78,7 +78,7 @@ class TestGeoQueries(MongoDBTestCase): # find events at least 10 degrees away of san francisco point = [-122.415579, 37.7566023] events = self.Event.objects(location__near=point, location__min_distance=10) - self.assertEqual(events.count(), 2) + assert events.count() == 2 def test_within_distance(self): """Make sure the "within_distance" operator works.""" @@ -87,29 +87,29 @@ class TestGeoQueries(MongoDBTestCase): # find events within 5 degrees of pitchfork office, chicago point_and_distance = [[-87.67892, 41.9120459], 5] events = self.Event.objects(location__within_distance=point_and_distance) - self.assertEqual(events.count(), 2) + assert events.count() == 2 events = list(events) - self.assertNotIn(event2, events) - self.assertIn(event1, events) - self.assertIn(event3, events) + assert event2 not in events + assert event1 in events + assert event3 in events # find events within 10 degrees of san francisco point_and_distance = [[-122.415579, 37.7566023], 10] events = self.Event.objects(location__within_distance=point_and_distance) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event2) + assert events.count() == 1 + assert events[0] == event2 # find events within 1 degree of greenpoint, broolyn, nyc, ny point_and_distance = [[-73.9509714, 40.7237134], 1] events = self.Event.objects(location__within_distance=point_and_distance) - self.assertEqual(events.count(), 0) + assert events.count() == 0 # ensure ordering is respected by "within_distance" point_and_distance = [[-87.67892, 41.9120459], 10] events = self.Event.objects(location__within_distance=point_and_distance) events = events.order_by("-date") - self.assertEqual(events.count(), 2) - self.assertEqual(events[0], event3) + assert events.count() == 2 + assert events[0] == event3 def test_within_box(self): """Ensure the "within_box" operator works.""" @@ -118,8 +118,8 @@ class TestGeoQueries(MongoDBTestCase): # check that within_box works box = [(-125.0, 35.0), (-100.0, 40.0)] events = self.Event.objects(location__within_box=box) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0].id, event2.id) + assert events.count() == 1 + assert events[0].id == event2.id def test_within_polygon(self): """Ensure the "within_polygon" operator works.""" @@ -133,8 +133,8 @@ class TestGeoQueries(MongoDBTestCase): (-87.656164, 41.898061), ] events = self.Event.objects(location__within_polygon=polygon) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0].id, event1.id) + assert events.count() == 1 + assert events[0].id == event1.id polygon2 = [ (-1.742249, 54.033586), @@ -142,7 +142,7 @@ class TestGeoQueries(MongoDBTestCase): (-4.40094, 53.389881), ] events = self.Event.objects(location__within_polygon=polygon2) - self.assertEqual(events.count(), 0) + assert events.count() == 0 def test_2dsphere_near(self): """Make sure the "near" operator works with a PointField, which @@ -154,14 +154,14 @@ class TestGeoQueries(MongoDBTestCase): # note that "near" will show the san francisco event, too, # although it sorts to last. events = self.Event.objects(location__near=[-87.67892, 41.9120459]) - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event1, event3, event2]) + assert events.count() == 3 + assert list(events) == [event1, event3, event2] # ensure ordering is respected by "near" events = self.Event.objects(location__near=[-87.67892, 41.9120459]) events = events.order_by("-date") - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event3, event1, event2]) + assert events.count() == 3 + assert list(events) == [event3, event1, event2] def test_2dsphere_near_and_max_distance(self): """Ensure the "max_distance" operator works alongside the "near" @@ -172,21 +172,21 @@ class TestGeoQueries(MongoDBTestCase): # find events within 10km of san francisco point = [-122.415579, 37.7566023] events = self.Event.objects(location__near=point, location__max_distance=10000) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event2) + assert events.count() == 1 + assert events[0] == event2 # find events within 1km of greenpoint, broolyn, nyc, ny events = self.Event.objects( location__near=[-73.9509714, 40.7237134], location__max_distance=1000 ) - self.assertEqual(events.count(), 0) + assert events.count() == 0 # ensure ordering is respected by "near" events = self.Event.objects( location__near=[-87.67892, 41.9120459], location__max_distance=10000 ).order_by("-date") - self.assertEqual(events.count(), 2) - self.assertEqual(events[0], event3) + assert events.count() == 2 + assert events[0] == event3 def test_2dsphere_geo_within_box(self): """Ensure the "geo_within_box" operator works with a 2dsphere @@ -197,8 +197,8 @@ class TestGeoQueries(MongoDBTestCase): # check that within_box works box = [(-125.0, 35.0), (-100.0, 40.0)] events = self.Event.objects(location__geo_within_box=box) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0].id, event2.id) + assert events.count() == 1 + assert events[0].id == event2.id def test_2dsphere_geo_within_polygon(self): """Ensure the "geo_within_polygon" operator works with a @@ -214,8 +214,8 @@ class TestGeoQueries(MongoDBTestCase): (-87.656164, 41.898061), ] events = self.Event.objects(location__geo_within_polygon=polygon) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0].id, event1.id) + assert events.count() == 1 + assert events[0].id == event1.id polygon2 = [ (-1.742249, 54.033586), @@ -223,7 +223,7 @@ class TestGeoQueries(MongoDBTestCase): (-4.40094, 53.389881), ] events = self.Event.objects(location__geo_within_polygon=polygon2) - self.assertEqual(events.count(), 0) + assert events.count() == 0 def test_2dsphere_near_and_min_max_distance(self): """Ensure "min_distace" and "max_distance" operators work well @@ -237,15 +237,15 @@ class TestGeoQueries(MongoDBTestCase): location__min_distance=1000, location__max_distance=10000, ).order_by("-date") - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event3) + assert events.count() == 1 + assert events[0] == event3 # ensure ordering is respected by "near" with "min_distance" events = self.Event.objects( location__near=[-87.67892, 41.9120459], location__min_distance=10000 ).order_by("-date") - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event2) + assert events.count() == 1 + assert events[0] == event2 def test_2dsphere_geo_within_center(self): """Make sure the "geo_within_center" operator works with a @@ -256,11 +256,11 @@ class TestGeoQueries(MongoDBTestCase): # find events within 5 degrees of pitchfork office, chicago point_and_distance = [[-87.67892, 41.9120459], 2] events = self.Event.objects(location__geo_within_center=point_and_distance) - self.assertEqual(events.count(), 2) + assert events.count() == 2 events = list(events) - self.assertNotIn(event2, events) - self.assertIn(event1, events) - self.assertIn(event3, events) + assert event2 not in events + assert event1 in events + assert event3 in events def _test_embedded(self, point_field_class): """Helper test method ensuring given point field class works @@ -290,8 +290,8 @@ class TestGeoQueries(MongoDBTestCase): # note that "near" will show the san francisco event, too, # although it sorts to last. events = Event.objects(venue__location__near=[-87.67892, 41.9120459]) - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event1, event3, event2]) + assert events.count() == 3 + assert list(events) == [event1, event3, event2] def test_geo_spatial_embedded(self): """Make sure GeoPointField works properly in an embedded document.""" @@ -319,55 +319,55 @@ class TestGeoQueries(MongoDBTestCase): # Finds both points because they are within 60 km of the reference # point equidistant between them. points = Point.objects(location__near_sphere=[-122, 37.5]) - self.assertEqual(points.count(), 2) + assert points.count() == 2 # Same behavior for _within_spherical_distance points = Point.objects( location__within_spherical_distance=[[-122, 37.5], 60 / earth_radius] ) - self.assertEqual(points.count(), 2) + assert points.count() == 2 points = Point.objects( location__near_sphere=[-122, 37.5], location__max_distance=60 / earth_radius ) - self.assertEqual(points.count(), 2) + assert points.count() == 2 # Test query works with max_distance, being farer from one point points = Point.objects( location__near_sphere=[-122, 37.8], location__max_distance=60 / earth_radius ) close_point = points.first() - self.assertEqual(points.count(), 1) + assert points.count() == 1 # Test query works with min_distance, being farer from one point points = Point.objects( location__near_sphere=[-122, 37.8], location__min_distance=60 / earth_radius ) - self.assertEqual(points.count(), 1) + assert points.count() == 1 far_point = points.first() - self.assertNotEqual(close_point, far_point) + assert close_point != far_point # Finds both points, but orders the north point first because it's # closer to the reference point to the north. points = Point.objects(location__near_sphere=[-122, 38.5]) - self.assertEqual(points.count(), 2) - self.assertEqual(points[0].id, north_point.id) - self.assertEqual(points[1].id, south_point.id) + assert points.count() == 2 + assert points[0].id == north_point.id + assert points[1].id == south_point.id # Finds both points, but orders the south point first because it's # closer to the reference point to the south. points = Point.objects(location__near_sphere=[-122, 36.5]) - self.assertEqual(points.count(), 2) - self.assertEqual(points[0].id, south_point.id) - self.assertEqual(points[1].id, north_point.id) + assert points.count() == 2 + assert points[0].id == south_point.id + assert points[1].id == north_point.id # Finds only one point because only the first point is within 60km of # the reference point to the south. points = Point.objects( location__within_spherical_distance=[[-122, 36.5], 60 / earth_radius] ) - self.assertEqual(points.count(), 1) - self.assertEqual(points[0].id, south_point.id) + assert points.count() == 1 + assert points[0].id == south_point.id def test_linestring(self): class Road(Document): @@ -381,13 +381,13 @@ class TestGeoQueries(MongoDBTestCase): # near point = {"type": "Point", "coordinates": [40, 5]} roads = Road.objects.filter(line__near=point["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__near=point).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__near={"$geometry": point}).count() - self.assertEqual(1, roads) + assert 1 == roads # Within polygon = { @@ -395,37 +395,37 @@ class TestGeoQueries(MongoDBTestCase): "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], } roads = Road.objects.filter(line__geo_within=polygon["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__geo_within=polygon).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__geo_within={"$geometry": polygon}).count() - self.assertEqual(1, roads) + assert 1 == roads # Intersects line = {"type": "LineString", "coordinates": [[40, 5], [40, 6]]} roads = Road.objects.filter(line__geo_intersects=line["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__geo_intersects=line).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__geo_intersects={"$geometry": line}).count() - self.assertEqual(1, roads) + assert 1 == roads polygon = { "type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], } roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__geo_intersects=polygon).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__geo_intersects={"$geometry": polygon}).count() - self.assertEqual(1, roads) + assert 1 == roads def test_polygon(self): class Road(Document): @@ -439,13 +439,13 @@ class TestGeoQueries(MongoDBTestCase): # near point = {"type": "Point", "coordinates": [40, 5]} roads = Road.objects.filter(poly__near=point["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__near=point).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__near={"$geometry": point}).count() - self.assertEqual(1, roads) + assert 1 == roads # Within polygon = { @@ -453,37 +453,37 @@ class TestGeoQueries(MongoDBTestCase): "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], } roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__geo_within=polygon).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__geo_within={"$geometry": polygon}).count() - self.assertEqual(1, roads) + assert 1 == roads # Intersects line = {"type": "LineString", "coordinates": [[40, 5], [41, 6]]} roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__geo_intersects=line).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}).count() - self.assertEqual(1, roads) + assert 1 == roads polygon = { "type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], } roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__geo_intersects=polygon).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count() - self.assertEqual(1, roads) + assert 1 == roads def test_aspymongo_with_only(self): """Ensure as_pymongo works with only""" @@ -495,13 +495,10 @@ class TestGeoQueries(MongoDBTestCase): p = Place(location=[24.946861267089844, 60.16311983618494]) p.save() qs = Place.objects().only("location") - self.assertDictEqual( - qs.as_pymongo()[0]["location"], - { - u"type": u"Point", - u"coordinates": [24.946861267089844, 60.16311983618494], - }, - ) + assert qs.as_pymongo()[0]["location"] == { + u"type": u"Point", + u"coordinates": [24.946861267089844, 60.16311983618494], + } def test_2dsphere_point_sets_correctly(self): class Location(Document): @@ -511,11 +508,11 @@ class TestGeoQueries(MongoDBTestCase): Location(loc=[1, 2]).save() loc = Location.objects.as_pymongo()[0] - self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [1, 2]}) + assert loc["loc"] == {"type": "Point", "coordinates": [1, 2]} Location.objects.update(set__loc=[2, 1]) loc = Location.objects.as_pymongo()[0] - self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [2, 1]}) + assert loc["loc"] == {"type": "Point", "coordinates": [2, 1]} def test_2dsphere_linestring_sets_correctly(self): class Location(Document): @@ -525,15 +522,11 @@ class TestGeoQueries(MongoDBTestCase): Location(line=[[1, 2], [2, 2]]).save() loc = Location.objects.as_pymongo()[0] - self.assertEqual( - loc["line"], {"type": "LineString", "coordinates": [[1, 2], [2, 2]]} - ) + assert loc["line"] == {"type": "LineString", "coordinates": [[1, 2], [2, 2]]} Location.objects.update(set__line=[[2, 1], [1, 2]]) loc = Location.objects.as_pymongo()[0] - self.assertEqual( - loc["line"], {"type": "LineString", "coordinates": [[2, 1], [1, 2]]} - ) + assert loc["line"] == {"type": "LineString", "coordinates": [[2, 1], [1, 2]]} def test_geojson_PolygonField(self): class Location(Document): @@ -543,17 +536,17 @@ class TestGeoQueries(MongoDBTestCase): Location(poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save() loc = Location.objects.as_pymongo()[0] - self.assertEqual( - loc["poly"], - {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}, - ) + assert loc["poly"] == { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], + } Location.objects.update(set__poly=[[[40, 4], [40, 6], [41, 6], [40, 4]]]) loc = Location.objects.as_pymongo()[0] - self.assertEqual( - loc["poly"], - {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]}, - ) + assert loc["poly"] == { + "type": "Polygon", + "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]], + } if __name__ == "__main__": diff --git a/tests/queryset/test_modify.py b/tests/queryset/test_modify.py index 60f4884c..293a463e 100644 --- a/tests/queryset/test_modify.py +++ b/tests/queryset/test_modify.py @@ -14,14 +14,14 @@ class TestFindAndModify(unittest.TestCase): Doc.drop_collection() def assertDbEqual(self, docs): - self.assertEqual(list(Doc._collection.find().sort("id")), docs) + assert list(Doc._collection.find().sort("id")) == docs def test_modify(self): Doc(id=0, value=0).save() doc = Doc(id=1, value=1).save() old_doc = Doc.objects(id=1).modify(set__value=-1) - self.assertEqual(old_doc.to_json(), doc.to_json()) + assert old_doc.to_json() == doc.to_json() self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) def test_modify_with_new(self): @@ -30,18 +30,18 @@ class TestFindAndModify(unittest.TestCase): new_doc = Doc.objects(id=1).modify(set__value=-1, new=True) doc.value = -1 - self.assertEqual(new_doc.to_json(), doc.to_json()) + assert new_doc.to_json() == doc.to_json() self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) def test_modify_not_existing(self): Doc(id=0, value=0).save() - self.assertEqual(Doc.objects(id=1).modify(set__value=-1), None) + assert Doc.objects(id=1).modify(set__value=-1) == None self.assertDbEqual([{"_id": 0, "value": 0}]) def test_modify_with_upsert(self): Doc(id=0, value=0).save() old_doc = Doc.objects(id=1).modify(set__value=1, upsert=True) - self.assertEqual(old_doc, None) + assert old_doc == None self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}]) def test_modify_with_upsert_existing(self): @@ -49,13 +49,13 @@ class TestFindAndModify(unittest.TestCase): doc = Doc(id=1, value=1).save() old_doc = Doc.objects(id=1).modify(set__value=-1, upsert=True) - self.assertEqual(old_doc.to_json(), doc.to_json()) + assert old_doc.to_json() == doc.to_json() self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) def test_modify_with_upsert_with_new(self): Doc(id=0, value=0).save() new_doc = Doc.objects(id=1).modify(upsert=True, new=True, set__value=1) - self.assertEqual(new_doc.to_mongo(), {"_id": 1, "value": 1}) + assert new_doc.to_mongo() == {"_id": 1, "value": 1} self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}]) def test_modify_with_remove(self): @@ -63,12 +63,12 @@ class TestFindAndModify(unittest.TestCase): doc = Doc(id=1, value=1).save() old_doc = Doc.objects(id=1).modify(remove=True) - self.assertEqual(old_doc.to_json(), doc.to_json()) + assert old_doc.to_json() == doc.to_json() self.assertDbEqual([{"_id": 0, "value": 0}]) def test_find_and_modify_with_remove_not_existing(self): Doc(id=0, value=0).save() - self.assertEqual(Doc.objects(id=1).modify(remove=True), None) + assert Doc.objects(id=1).modify(remove=True) == None self.assertDbEqual([{"_id": 0, "value": 0}]) def test_modify_with_order_by(self): @@ -78,7 +78,7 @@ class TestFindAndModify(unittest.TestCase): doc = Doc(id=3, value=0).save() old_doc = Doc.objects().order_by("-id").modify(set__value=-1) - self.assertEqual(old_doc.to_json(), doc.to_json()) + assert old_doc.to_json() == doc.to_json() self.assertDbEqual( [ {"_id": 0, "value": 3}, @@ -93,7 +93,7 @@ class TestFindAndModify(unittest.TestCase): Doc(id=1, value=1).save() old_doc = Doc.objects(id=1).only("id").modify(set__value=-1) - self.assertEqual(old_doc.to_mongo(), {"_id": 1}) + assert old_doc.to_mongo() == {"_id": 1} self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) def test_modify_with_push(self): @@ -106,23 +106,23 @@ class TestFindAndModify(unittest.TestCase): # Push a new tag via modify with new=False (default). BlogPost(id=blog.id).modify(push__tags="code") - self.assertEqual(blog.tags, []) + assert blog.tags == [] blog.reload() - self.assertEqual(blog.tags, ["code"]) + assert blog.tags == ["code"] # Push a new tag via modify with new=True. blog = BlogPost.objects(id=blog.id).modify(push__tags="java", new=True) - self.assertEqual(blog.tags, ["code", "java"]) + assert blog.tags == ["code", "java"] # Push a new tag with a positional argument. blog = BlogPost.objects(id=blog.id).modify(push__tags__0="python", new=True) - self.assertEqual(blog.tags, ["python", "code", "java"]) + assert blog.tags == ["python", "code", "java"] # Push multiple new tags with a positional argument. blog = BlogPost.objects(id=blog.id).modify( push__tags__1=["go", "rust"], new=True ) - self.assertEqual(blog.tags, ["python", "go", "rust", "code", "java"]) + assert blog.tags == ["python", "go", "rust", "code", "java"] if __name__ == "__main__": diff --git a/tests/queryset/test_pickable.py b/tests/queryset/test_pickable.py index 8c4e3426..d41f56df 100644 --- a/tests/queryset/test_pickable.py +++ b/tests/queryset/test_pickable.py @@ -37,13 +37,13 @@ class TestQuerysetPickable(MongoDBTestCase): loadedQs = self._get_loaded(qs) - self.assertEqual(qs.count(), loadedQs.count()) + assert qs.count() == loadedQs.count() # can update loadedQs loadedQs.update(age=23) # check - self.assertEqual(Person.objects.first().age, 23) + assert Person.objects.first().age == 23 def test_pickle_support_filtration(self): Person.objects.create(name="Alice", age=22) @@ -51,9 +51,9 @@ class TestQuerysetPickable(MongoDBTestCase): Person.objects.create(name="Bob", age=23) qs = Person.objects.filter(age__gte=22) - self.assertEqual(qs.count(), 2) + assert qs.count() == 2 loaded = self._get_loaded(qs) - self.assertEqual(loaded.count(), 2) - self.assertEqual(loaded.filter(name="Bob").first().age, 23) + assert loaded.count() == 2 + assert loaded.filter(name="Bob").first().age == 23 diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 16213254..d154de8d 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -24,6 +24,7 @@ from mongoengine.queryset import ( QuerySetManager, queryset_manager, ) +import pytest class db_ops_tracker(query_counter): @@ -64,11 +65,11 @@ class TestQueryset(unittest.TestCase): def test_initialisation(self): """Ensure that a QuerySet is correctly initialised by QuerySetManager. """ - self.assertIsInstance(self.Person.objects, QuerySet) - self.assertEqual( - self.Person.objects._collection.name, self.Person._get_collection_name() + assert isinstance(self.Person.objects, QuerySet) + assert ( + self.Person.objects._collection.name == self.Person._get_collection_name() ) - self.assertIsInstance( + assert isinstance( self.Person.objects._collection, pymongo.collection.Collection ) @@ -78,11 +79,11 @@ class TestQueryset(unittest.TestCase): author2 = GenericReferenceField() # test addressing a field from a reference - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): list(BlogPost.objects(author__name="test")) # should fail for a generic reference as well - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): list(BlogPost.objects(author2__name="test")) def test_find(self): @@ -92,27 +93,27 @@ class TestQueryset(unittest.TestCase): # Find all people in the collection people = self.Person.objects - self.assertEqual(people.count(), 2) + assert people.count() == 2 results = list(people) - self.assertIsInstance(results[0], self.Person) - self.assertIsInstance(results[0].id, ObjectId) + assert isinstance(results[0], self.Person) + assert isinstance(results[0].id, ObjectId) - self.assertEqual(results[0], user_a) - self.assertEqual(results[0].name, "User A") - self.assertEqual(results[0].age, 20) + assert results[0] == user_a + assert results[0].name == "User A" + assert results[0].age == 20 - self.assertEqual(results[1], user_b) - self.assertEqual(results[1].name, "User B") - self.assertEqual(results[1].age, 30) + assert results[1] == user_b + assert results[1].name == "User B" + assert results[1].age == 30 # Filter people by age people = self.Person.objects(age=20) - self.assertEqual(people.count(), 1) + assert people.count() == 1 person = people.next() - self.assertEqual(person, user_a) - self.assertEqual(person.name, "User A") - self.assertEqual(person.age, 20) + assert person == user_a + assert person.name == "User A" + assert person.age == 20 def test_limit(self): """Ensure that QuerySet.limit works as expected.""" @@ -121,27 +122,27 @@ class TestQueryset(unittest.TestCase): # Test limit on a new queryset people = list(self.Person.objects.limit(1)) - self.assertEqual(len(people), 1) - self.assertEqual(people[0], user_a) + assert len(people) == 1 + assert people[0] == user_a # Test limit on an existing queryset people = self.Person.objects - self.assertEqual(len(people), 2) + assert len(people) == 2 people2 = people.limit(1) - self.assertEqual(len(people), 2) - self.assertEqual(len(people2), 1) - self.assertEqual(people2[0], user_a) + assert len(people) == 2 + assert len(people2) == 1 + assert people2[0] == user_a # Test limit with 0 as parameter people = self.Person.objects.limit(0) - self.assertEqual(people.count(with_limit_and_skip=True), 2) - self.assertEqual(len(people), 2) + assert people.count(with_limit_and_skip=True) == 2 + assert len(people) == 2 # Test chaining of only after limit person = self.Person.objects().limit(1).only("name").first() - self.assertEqual(person, user_a) - self.assertEqual(person.name, "User A") - self.assertEqual(person.age, None) + assert person == user_a + assert person.name == "User A" + assert person.age == None def test_skip(self): """Ensure that QuerySet.skip works as expected.""" @@ -150,26 +151,26 @@ class TestQueryset(unittest.TestCase): # Test skip on a new queryset people = list(self.Person.objects.skip(1)) - self.assertEqual(len(people), 1) - self.assertEqual(people[0], user_b) + assert len(people) == 1 + assert people[0] == user_b # Test skip on an existing queryset people = self.Person.objects - self.assertEqual(len(people), 2) + assert len(people) == 2 people2 = people.skip(1) - self.assertEqual(len(people), 2) - self.assertEqual(len(people2), 1) - self.assertEqual(people2[0], user_b) + assert len(people) == 2 + assert len(people2) == 1 + assert people2[0] == user_b # Test chaining of only after skip person = self.Person.objects().skip(1).only("name").first() - self.assertEqual(person, user_b) - self.assertEqual(person.name, "User B") - self.assertEqual(person.age, None) + assert person == user_b + assert person.name == "User B" + assert person.age == None def test___getitem___invalid_index(self): """Ensure slicing a queryset works as expected.""" - with self.assertRaises(TypeError): + with pytest.raises(TypeError): self.Person.objects()["a"] def test_slice(self): @@ -180,27 +181,27 @@ class TestQueryset(unittest.TestCase): # Test slice limit people = list(self.Person.objects[:2]) - self.assertEqual(len(people), 2) - self.assertEqual(people[0], user_a) - self.assertEqual(people[1], user_b) + assert len(people) == 2 + assert people[0] == user_a + assert people[1] == user_b # Test slice skip people = list(self.Person.objects[1:]) - self.assertEqual(len(people), 2) - self.assertEqual(people[0], user_b) - self.assertEqual(people[1], user_c) + assert len(people) == 2 + assert people[0] == user_b + assert people[1] == user_c # Test slice limit and skip people = list(self.Person.objects[1:2]) - self.assertEqual(len(people), 1) - self.assertEqual(people[0], user_b) + assert len(people) == 1 + assert people[0] == user_b # Test slice limit and skip on an existing queryset people = self.Person.objects - self.assertEqual(len(people), 3) + assert len(people) == 3 people2 = people[1:2] - self.assertEqual(len(people2), 1) - self.assertEqual(people2[0], user_b) + assert len(people2) == 1 + assert people2[0] == user_b # Test slice limit and skip cursor reset qs = self.Person.objects[1:2] @@ -208,31 +209,31 @@ class TestQueryset(unittest.TestCase): qs._cursor qs._cursor_obj = None people = list(qs) - self.assertEqual(len(people), 1) - self.assertEqual(people[0].name, "User B") + assert len(people) == 1 + assert people[0].name == "User B" # Test empty slice people = list(self.Person.objects[1:1]) - self.assertEqual(len(people), 0) + assert len(people) == 0 # Test slice out of range people = list(self.Person.objects[80000:80001]) - self.assertEqual(len(people), 0) + assert len(people) == 0 # Test larger slice __repr__ self.Person.objects.delete() for i in range(55): self.Person(name="A%s" % i, age=i).save() - self.assertEqual(self.Person.objects.count(), 55) - self.assertEqual("Person object", "%s" % self.Person.objects[0]) - self.assertEqual( - "[, ]", - "%s" % self.Person.objects[1:3], + assert self.Person.objects.count() == 55 + assert "Person object" == "%s" % self.Person.objects[0] + assert ( + "[, ]" + == "%s" % self.Person.objects[1:3] ) - self.assertEqual( - "[, ]", - "%s" % self.Person.objects[51:53], + assert ( + "[, ]" + == "%s" % self.Person.objects[51:53] ) def test_find_one(self): @@ -245,40 +246,42 @@ class TestQueryset(unittest.TestCase): # Retrieve the first person from the database person = self.Person.objects.first() - self.assertIsInstance(person, self.Person) - self.assertEqual(person.name, "User A") - self.assertEqual(person.age, 20) + assert isinstance(person, self.Person) + assert person.name == "User A" + assert person.age == 20 # Use a query to filter the people found to just person2 person = self.Person.objects(age=30).first() - self.assertEqual(person.name, "User B") + assert person.name == "User B" person = self.Person.objects(age__lt=30).first() - self.assertEqual(person.name, "User A") + assert person.name == "User A" # Use array syntax person = self.Person.objects[0] - self.assertEqual(person.name, "User A") + assert person.name == "User A" person = self.Person.objects[1] - self.assertEqual(person.name, "User B") + assert person.name == "User B" - with self.assertRaises(IndexError): + with pytest.raises(IndexError): self.Person.objects[2] # Find a document using just the object id person = self.Person.objects.with_id(person1.id) - self.assertEqual(person.name, "User A") + assert person.name == "User A" - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): self.Person.objects(name="User A").with_id(person1.id) def test_find_only_one(self): """Ensure that a query using ``get`` returns at most one result. """ # Try retrieving when no objects exists - self.assertRaises(DoesNotExist, self.Person.objects.get) - self.assertRaises(self.Person.DoesNotExist, self.Person.objects.get) + with pytest.raises(DoesNotExist): + self.Person.objects.get() + with pytest.raises(self.Person.DoesNotExist): + self.Person.objects.get() person1 = self.Person(name="User A", age=20) person1.save() @@ -286,15 +289,17 @@ class TestQueryset(unittest.TestCase): person2.save() # Retrieve the first person from the database - self.assertRaises(MultipleObjectsReturned, self.Person.objects.get) - self.assertRaises(self.Person.MultipleObjectsReturned, self.Person.objects.get) + with pytest.raises(MultipleObjectsReturned): + self.Person.objects.get() + with pytest.raises(self.Person.MultipleObjectsReturned): + self.Person.objects.get() # Use a query to filter the people found to just person2 person = self.Person.objects.get(age=30) - self.assertEqual(person.name, "User B") + assert person.name == "User B" person = self.Person.objects.get(age__lt=30) - self.assertEqual(person.name, "User A") + assert person.name == "User A" def test_find_array_position(self): """Ensure that query by array position works. @@ -313,10 +318,10 @@ class TestQueryset(unittest.TestCase): Blog.drop_collection() Blog.objects.create(tags=["a", "b"]) - self.assertEqual(Blog.objects(tags__0="a").count(), 1) - self.assertEqual(Blog.objects(tags__0="b").count(), 0) - self.assertEqual(Blog.objects(tags__1="a").count(), 0) - self.assertEqual(Blog.objects(tags__1="b").count(), 1) + assert Blog.objects(tags__0="a").count() == 1 + assert Blog.objects(tags__0="b").count() == 0 + assert Blog.objects(tags__1="a").count() == 0 + assert Blog.objects(tags__1="b").count() == 1 Blog.drop_collection() @@ -328,19 +333,19 @@ class TestQueryset(unittest.TestCase): blog2 = Blog.objects.create(posts=[post2, post1]) blog = Blog.objects(posts__0__comments__0__name="testa").get() - self.assertEqual(blog, blog1) + assert blog == blog1 blog = Blog.objects(posts__0__comments__0__name="testb").get() - self.assertEqual(blog, blog2) + assert blog == blog2 query = Blog.objects(posts__1__comments__1__name="testb") - self.assertEqual(query.count(), 2) + assert query.count() == 2 query = Blog.objects(posts__1__comments__1__name="testa") - self.assertEqual(query.count(), 0) + assert query.count() == 0 query = Blog.objects(posts__0__comments__1__name="testa") - self.assertEqual(query.count(), 0) + assert query.count() == 0 Blog.drop_collection() @@ -351,8 +356,8 @@ class TestQueryset(unittest.TestCase): A.drop_collection() A().save() - self.assertEqual(list(A.objects.none()), []) - self.assertEqual(list(A.objects.none().all()), []) + assert list(A.objects.none()) == [] + assert list(A.objects.none().all()) == [] def test_chaining(self): class A(Document): @@ -376,12 +381,12 @@ class TestQueryset(unittest.TestCase): # Doesn't work q2 = B.objects.filter(ref__in=[a1, a2]) q2 = q2.filter(ref=a1)._query - self.assertEqual(q1, q2) + assert q1 == q2 a_objects = A.objects(s="test1") query = B.objects(ref__in=a_objects) query = query.filter(boolfield=True) - self.assertEqual(query.count(), 1) + assert query.count() == 1 def test_batch_size(self): """Ensure that batch_size works.""" @@ -398,7 +403,7 @@ class TestQueryset(unittest.TestCase): cnt = 0 for a in A.objects.batch_size(10): cnt += 1 - self.assertEqual(cnt, 100) + assert cnt == 100 # test chaining qs = A.objects.all() @@ -406,11 +411,11 @@ class TestQueryset(unittest.TestCase): cnt = 0 for a in qs: cnt += 1 - self.assertEqual(cnt, 9) + assert cnt == 9 # test invalid batch size qs = A.objects.batch_size(-1) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): list(qs) def test_batch_size_cloned(self): @@ -419,9 +424,9 @@ class TestQueryset(unittest.TestCase): # test that batch size gets cloned qs = A.objects.batch_size(5) - self.assertEqual(qs._batch_size, 5) + assert qs._batch_size == 5 qs_clone = qs.clone() - self.assertEqual(qs_clone._batch_size, 5) + assert qs_clone._batch_size == 5 def test_update_write_concern(self): """Test that passing write_concern works""" @@ -437,18 +442,18 @@ class TestQueryset(unittest.TestCase): result = self.Person.objects.update(set__name="Ross", write_concern={"w": 1}) - self.assertEqual(result, 2) + assert result == 2 result = self.Person.objects.update(set__name="Ross", write_concern={"w": 0}) - self.assertEqual(result, None) + assert result == None result = self.Person.objects.update_one( set__name="Test User", write_concern={"w": 1} ) - self.assertEqual(result, 1) + assert result == 1 result = self.Person.objects.update_one( set__name="Test User", write_concern={"w": 0} ) - self.assertEqual(result, None) + assert result == None def test_update_update_has_a_value(self): """Test to ensure that update is passed a value to update to""" @@ -456,10 +461,10 @@ class TestQueryset(unittest.TestCase): author = self.Person.objects.create(name="Test User") - with self.assertRaises(OperationError): + with pytest.raises(OperationError): self.Person.objects(pk=author.pk).update({}) - with self.assertRaises(OperationError): + with pytest.raises(OperationError): self.Person.objects(pk=author.pk).update_one({}) def test_update_array_position(self): @@ -492,7 +497,7 @@ class TestQueryset(unittest.TestCase): # Update all of the first comments of second posts of all blogs Blog.objects().update(set__posts__1__comments__0__name="testc") testc_blogs = Blog.objects(posts__1__comments__0__name="testc") - self.assertEqual(testc_blogs.count(), 2) + assert testc_blogs.count() == 2 Blog.drop_collection() Blog.objects.create(posts=[post1, post2]) @@ -501,10 +506,10 @@ class TestQueryset(unittest.TestCase): # Update only the first blog returned by the query Blog.objects().update_one(set__posts__1__comments__1__name="testc") testc_blogs = Blog.objects(posts__1__comments__1__name="testc") - self.assertEqual(testc_blogs.count(), 1) + assert testc_blogs.count() == 1 # Check that using this indexing syntax on a non-list fails - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): Blog.objects().update(set__posts__1__comments__0__name__1="asdf") Blog.drop_collection() @@ -531,8 +536,8 @@ class TestQueryset(unittest.TestCase): BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1) post = BlogPost.objects.first() - self.assertEqual(post.comments[1].by, "jane") - self.assertEqual(post.comments[1].votes, 8) + assert post.comments[1].by == "jane" + assert post.comments[1].votes == 8 def test_update_using_positional_operator_matches_first(self): @@ -547,7 +552,7 @@ class TestQueryset(unittest.TestCase): Simple.objects(x=2).update(inc__x__S=1) simple = Simple.objects.first() - self.assertEqual(simple.x, [1, 3, 3, 2]) + assert simple.x == [1, 3, 3, 2] Simple.drop_collection() # You can set multiples @@ -559,10 +564,10 @@ class TestQueryset(unittest.TestCase): Simple.objects(x=3).update(set__x__S=0) s = Simple.objects() - self.assertEqual(s[0].x, [1, 2, 0, 4]) - self.assertEqual(s[1].x, [2, 0, 4, 5]) - self.assertEqual(s[2].x, [0, 4, 5, 6]) - self.assertEqual(s[3].x, [4, 5, 6, 7]) + assert s[0].x == [1, 2, 0, 4] + assert s[1].x == [2, 0, 4, 5] + assert s[2].x == [0, 4, 5, 6] + assert s[3].x == [4, 5, 6, 7] # Using "$unset" with an expression like this "array.$" will result in # the array item becoming None, not being removed. @@ -570,14 +575,14 @@ class TestQueryset(unittest.TestCase): Simple(x=[1, 2, 3, 4, 3, 2, 3, 4]).save() Simple.objects(x=3).update(unset__x__S=1) simple = Simple.objects.first() - self.assertEqual(simple.x, [1, 2, None, 4, 3, 2, 3, 4]) + assert simple.x == [1, 2, None, 4, 3, 2, 3, 4] # Nested updates arent supported yet.. - with self.assertRaises(OperationError): + with pytest.raises(OperationError): Simple.drop_collection() Simple(x=[{"test": [1, 2, 3, 4]}]).save() Simple.objects(x__test=2).update(set__x__S__test__S=3) - self.assertEqual(simple.x, [1, 2, 3, 4]) + assert simple.x == [1, 2, 3, 4] def test_update_using_positional_operator_embedded_document(self): """Ensure that the embedded documents can be updated using the positional @@ -606,8 +611,8 @@ class TestQueryset(unittest.TestCase): ) post = BlogPost.objects.first() - self.assertEqual(post.comments[0].by, "joe") - self.assertEqual(post.comments[0].votes.score, 4) + assert post.comments[0].by == "joe" + assert post.comments[0].votes.score == 4 def test_update_min_max(self): class Scores(Document): @@ -617,14 +622,14 @@ class TestQueryset(unittest.TestCase): scores = Scores.objects.create(high_score=800, low_score=200) Scores.objects(id=scores.id).update(min__low_score=150) - self.assertEqual(Scores.objects.get(id=scores.id).low_score, 150) + assert Scores.objects.get(id=scores.id).low_score == 150 Scores.objects(id=scores.id).update(min__low_score=250) - self.assertEqual(Scores.objects.get(id=scores.id).low_score, 150) + assert Scores.objects.get(id=scores.id).low_score == 150 Scores.objects(id=scores.id).update(max__high_score=1000) - self.assertEqual(Scores.objects.get(id=scores.id).high_score, 1000) + assert Scores.objects.get(id=scores.id).high_score == 1000 Scores.objects(id=scores.id).update(max__high_score=500) - self.assertEqual(Scores.objects.get(id=scores.id).high_score, 1000) + assert Scores.objects.get(id=scores.id).high_score == 1000 def test_update_multiple(self): class Product(Document): @@ -634,10 +639,10 @@ class TestQueryset(unittest.TestCase): product = Product.objects.create(item="ABC", price=10.99) product = Product.objects.create(item="ABC", price=10.99) Product.objects(id=product.id).update(mul__price=1.25) - self.assertEqual(Product.objects.get(id=product.id).price, 13.7375) + assert Product.objects.get(id=product.id).price == 13.7375 unknown_product = Product.objects.create(item="Unknown") Product.objects(id=unknown_product.id).update(mul__price=100) - self.assertEqual(Product.objects.get(id=unknown_product.id).price, 0) + assert Product.objects.get(id=unknown_product.id).price == 0 def test_updates_can_have_match_operators(self): class Comment(EmbeddedDocument): @@ -663,7 +668,7 @@ class TestQueryset(unittest.TestCase): Post.objects().update_one(pull__comments__vote__lt=1) - self.assertEqual(1, len(Post.objects.first().comments)) + assert 1 == len(Post.objects.first().comments) def test_mapfield_update(self): """Ensure that the MapField can be updated.""" @@ -684,8 +689,8 @@ class TestQueryset(unittest.TestCase): Club.objects().update(set__members={"John": Member(gender="F", age=14)}) club = Club.objects().first() - self.assertEqual(club.members["John"].gender, "F") - self.assertEqual(club.members["John"].age, 14) + assert club.members["John"].gender == "F" + assert club.members["John"].age == 14 def test_dictfield_update(self): """Ensure that the DictField can be updated.""" @@ -700,25 +705,25 @@ class TestQueryset(unittest.TestCase): Club.objects().update(set__members={"John": {"gender": "F", "age": 14}}) club = Club.objects().first() - self.assertEqual(club.members["John"]["gender"], "F") - self.assertEqual(club.members["John"]["age"], 14) + assert club.members["John"]["gender"] == "F" + assert club.members["John"]["age"] == 14 def test_update_results(self): self.Person.drop_collection() result = self.Person(name="Bob", age=25).update(upsert=True, full_result=True) - self.assertIsInstance(result, UpdateResult) - self.assertIn("upserted", result.raw_result) - self.assertFalse(result.raw_result["updatedExisting"]) + assert isinstance(result, UpdateResult) + assert "upserted" in result.raw_result + assert not result.raw_result["updatedExisting"] bob = self.Person.objects.first() result = bob.update(set__age=30, full_result=True) - self.assertIsInstance(result, UpdateResult) - self.assertTrue(result.raw_result["updatedExisting"]) + assert isinstance(result, UpdateResult) + assert result.raw_result["updatedExisting"] self.Person(name="Bob", age=20).save() result = self.Person.objects(name="Bob").update(set__name="bobby", multi=True) - self.assertEqual(result, 2) + assert result == 2 def test_update_validate(self): class EmDoc(EmbeddedDocument): @@ -730,13 +735,12 @@ class TestQueryset(unittest.TestCase): cdt_f = ComplexDateTimeField() ed_f = EmbeddedDocumentField(EmDoc) - self.assertRaises(ValidationError, Doc.objects().update, str_f=1, upsert=True) - self.assertRaises( - ValidationError, Doc.objects().update, dt_f="datetime", upsert=True - ) - self.assertRaises( - ValidationError, Doc.objects().update, ed_f__str_f=1, upsert=True - ) + with pytest.raises(ValidationError): + Doc.objects().update(str_f=1, upsert=True) + with pytest.raises(ValidationError): + Doc.objects().update(dt_f="datetime", upsert=True) + with pytest.raises(ValidationError): + Doc.objects().update(ed_f__str_f=1, upsert=True) def test_update_related_models(self): class TestPerson(Document): @@ -757,20 +761,20 @@ class TestQueryset(unittest.TestCase): o.owner = p p.name = "p2" - self.assertEqual(o._get_changed_fields(), ["owner"]) - self.assertEqual(p._get_changed_fields(), ["name"]) + assert o._get_changed_fields() == ["owner"] + assert p._get_changed_fields() == ["name"] o.save() - self.assertEqual(o._get_changed_fields(), []) - self.assertEqual(p._get_changed_fields(), ["name"]) # Fails; it's empty + assert o._get_changed_fields() == [] + assert p._get_changed_fields() == ["name"] # Fails; it's empty # This will do NOTHING at all, even though we changed the name p.save() p.reload() - self.assertEqual(p.name, "p2") # Fails; it's still `p1` + assert p.name == "p2" # Fails; it's still `p1` def test_upsert(self): self.Person.drop_collection() @@ -778,25 +782,25 @@ class TestQueryset(unittest.TestCase): self.Person.objects(pk=ObjectId(), name="Bob", age=30).update(upsert=True) bob = self.Person.objects.first() - self.assertEqual("Bob", bob.name) - self.assertEqual(30, bob.age) + assert "Bob" == bob.name + assert 30 == bob.age def test_upsert_one(self): self.Person.drop_collection() bob = self.Person.objects(name="Bob", age=30).upsert_one() - self.assertEqual("Bob", bob.name) - self.assertEqual(30, bob.age) + assert "Bob" == bob.name + assert 30 == bob.age bob.name = "Bobby" bob.save() bobby = self.Person.objects(name="Bobby", age=30).upsert_one() - self.assertEqual("Bobby", bobby.name) - self.assertEqual(30, bobby.age) - self.assertEqual(bob.id, bobby.id) + assert "Bobby" == bobby.name + assert 30 == bobby.age + assert bob.id == bobby.id def test_set_on_insert(self): self.Person.drop_collection() @@ -806,8 +810,8 @@ class TestQueryset(unittest.TestCase): ) bob = self.Person.objects.first() - self.assertEqual("Bob", bob.name) - self.assertEqual(30, bob.age) + assert "Bob" == bob.name + assert 30 == bob.age def test_save_and_only_on_fields_with_default(self): class Embed(EmbeddedDocument): @@ -832,9 +836,9 @@ class TestQueryset(unittest.TestCase): # Checking it was saved correctly record.reload() - self.assertEqual(record.field, 2) - self.assertEqual(record.embed_no_default.field, 2) - self.assertEqual(record.embed.field, 2) + assert record.field == 2 + assert record.embed_no_default.field == 2 + assert record.embed.field == 2 # Request only the _id field and save clone = B.objects().only("id").first() @@ -842,9 +846,9 @@ class TestQueryset(unittest.TestCase): # Reload the record and see that the embed data is not lost record.reload() - self.assertEqual(record.field, 2) - self.assertEqual(record.embed_no_default.field, 2) - self.assertEqual(record.embed.field, 2) + assert record.field == 2 + assert record.embed_no_default.field == 2 + assert record.embed.field == 2 def test_bulk_insert(self): """Ensure that bulk insert works""" @@ -863,7 +867,7 @@ class TestQueryset(unittest.TestCase): Blog.drop_collection() # Recreates the collection - self.assertEqual(0, Blog.objects.count()) + assert 0 == Blog.objects.count() comment1 = Comment(name="testa") comment2 = Comment(name="testb") @@ -873,11 +877,11 @@ class TestQueryset(unittest.TestCase): # Check bulk insert using load_bulk=False blogs = [Blog(title="%s" % i, posts=[post1, post2]) for i in range(99)] with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 Blog.objects.insert(blogs, load_bulk=False) - self.assertEqual(q, 1) # 1 entry containing the list of inserts + assert q == 1 # 1 entry containing the list of inserts - self.assertEqual(Blog.objects.count(), len(blogs)) + assert Blog.objects.count() == len(blogs) Blog.drop_collection() Blog.ensure_indexes() @@ -885,9 +889,9 @@ class TestQueryset(unittest.TestCase): # Check bulk insert using load_bulk=True blogs = [Blog(title="%s" % i, posts=[post1, post2]) for i in range(99)] with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 Blog.objects.insert(blogs) - self.assertEqual(q, 2) # 1 for insert 1 for fetch + assert q == 2 # 1 for insert 1 for fetch Blog.drop_collection() @@ -898,25 +902,27 @@ class TestQueryset(unittest.TestCase): blog1 = Blog(title="code", posts=[post1, post2]) blog2 = Blog(title="mongodb", posts=[post2, post1]) blog1, blog2 = Blog.objects.insert([blog1, blog2]) - self.assertEqual(blog1.title, "code") - self.assertEqual(blog2.title, "mongodb") + assert blog1.title == "code" + assert blog2.title == "mongodb" - self.assertEqual(Blog.objects.count(), 2) + assert Blog.objects.count() == 2 # test inserting an existing document (shouldn't be allowed) - with self.assertRaises(OperationError) as cm: + with pytest.raises(OperationError) as cm: blog = Blog.objects.first() Blog.objects.insert(blog) - self.assertEqual( - str(cm.exception), "Some documents have ObjectIds, use doc.update() instead" + assert ( + str(cm.exception) + == "Some documents have ObjectIds, use doc.update() instead" ) # test inserting a query set - with self.assertRaises(OperationError) as cm: + with pytest.raises(OperationError) as cm: blogs_qs = Blog.objects Blog.objects.insert(blogs_qs) - self.assertEqual( - str(cm.exception), "Some documents have ObjectIds, use doc.update() instead" + assert ( + str(cm.exception) + == "Some documents have ObjectIds, use doc.update() instead" ) # insert 1 new doc @@ -927,13 +933,13 @@ class TestQueryset(unittest.TestCase): blog1 = Blog(title="code", posts=[post1, post2]) blog1 = Blog.objects.insert(blog1) - self.assertEqual(blog1.title, "code") - self.assertEqual(Blog.objects.count(), 1) + assert blog1.title == "code" + assert Blog.objects.count() == 1 Blog.drop_collection() blog1 = Blog(title="code", posts=[post1, post2]) obj_id = Blog.objects.insert(blog1, load_bulk=False) - self.assertIsInstance(obj_id, ObjectId) + assert isinstance(obj_id, ObjectId) Blog.drop_collection() post3 = Post(comments=[comment1, comment1]) @@ -941,10 +947,10 @@ class TestQueryset(unittest.TestCase): blog2 = Blog(title="bar", posts=[post2, post3]) Blog.objects.insert([blog1, blog2]) - with self.assertRaises(NotUniqueError): + with pytest.raises(NotUniqueError): Blog.objects.insert(Blog(title=blog2.title)) - self.assertEqual(Blog.objects.count(), 2) + assert Blog.objects.count() == 2 def test_bulk_insert_different_class_fails(self): class Blog(Document): @@ -954,7 +960,7 @@ class TestQueryset(unittest.TestCase): pass # try inserting a different document class - with self.assertRaises(OperationError): + with pytest.raises(OperationError): Blog.objects.insert(Author()) def test_bulk_insert_with_wrong_type(self): @@ -964,10 +970,10 @@ class TestQueryset(unittest.TestCase): Blog.drop_collection() Blog(name="test").save() - with self.assertRaises(OperationError): + with pytest.raises(OperationError): Blog.objects.insert("HELLO WORLD") - with self.assertRaises(OperationError): + with pytest.raises(OperationError): Blog.objects.insert({"name": "garbage"}) def test_bulk_insert_update_input_document_ids(self): @@ -979,23 +985,23 @@ class TestQueryset(unittest.TestCase): # Test with bulk comments = [Comment(idx=idx) for idx in range(20)] for com in comments: - self.assertIsNone(com.id) + assert com.id is None returned_comments = Comment.objects.insert(comments, load_bulk=True) for com in comments: - self.assertIsInstance(com.id, ObjectId) + assert isinstance(com.id, ObjectId) input_mapping = {com.id: com.idx for com in comments} saved_mapping = {com.id: com.idx for com in returned_comments} - self.assertEqual(input_mapping, saved_mapping) + assert input_mapping == saved_mapping Comment.drop_collection() # Test with just one comment = Comment(idx=0) inserted_comment_id = Comment.objects.insert(comment, load_bulk=False) - self.assertEqual(comment.id, inserted_comment_id) + assert comment.id == inserted_comment_id def test_bulk_insert_accepts_doc_with_ids(self): class Comment(Document): @@ -1017,7 +1023,7 @@ class TestQueryset(unittest.TestCase): Comment.objects.insert(com1) - with self.assertRaises(NotUniqueError): + with pytest.raises(NotUniqueError): Comment.objects.insert(com1) def test_get_changed_fields_query_count(self): @@ -1050,28 +1056,28 @@ class TestQueryset(unittest.TestCase): o1 = Organization(name="o1", employees=[p1]).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 # Fetching a document should result in a query. org = Organization.objects.get(id=o1.id) - self.assertEqual(q, 1) + assert q == 1 # Checking changed fields of a newly fetched document should not # result in a query. org._get_changed_fields() - self.assertEqual(q, 1) + assert q == 1 # Saving a doc without changing any of its fields should not result # in a query (with or without cascade=False). org = Organization.objects.get(id=o1.id) with query_counter() as q: org.save() - self.assertEqual(q, 0) + assert q == 0 org = Organization.objects.get(id=o1.id) with query_counter() as q: org.save(cascade=False) - self.assertEqual(q, 0) + assert q == 0 # Saving a doc after you append a reference to it should result in # two db operations (a query for the reference and an update). @@ -1080,7 +1086,7 @@ class TestQueryset(unittest.TestCase): with query_counter() as q: org.employees.append(p2) # dereferences p2 org.save() # saves the org - self.assertEqual(q, 2) + assert q == 2 def test_repeated_iteration(self): """Ensure that QuerySet rewinds itself one iteration finishes. @@ -1097,8 +1103,8 @@ class TestQueryset(unittest.TestCase): break people3 = [person for person in queryset] - self.assertEqual(people1, people2) - self.assertEqual(people1, people3) + assert people1 == people2 + assert people1 == people3 def test_repr(self): """Test repr behavior isnt destructive""" @@ -1116,21 +1122,21 @@ class TestQueryset(unittest.TestCase): docs = Doc.objects.order_by("number") - self.assertEqual(docs.count(), 1000) + assert docs.count() == 1000 docs_string = "%s" % docs - self.assertIn("Doc: 0", docs_string) + assert "Doc: 0" in docs_string - self.assertEqual(docs.count(), 1000) - self.assertIn("(remaining elements truncated)", "%s" % docs) + assert docs.count() == 1000 + assert "(remaining elements truncated)" in "%s" % docs # Limit and skip docs = docs[1:4] - self.assertEqual("[, , ]", "%s" % docs) + assert "[, , ]" == "%s" % docs - self.assertEqual(docs.count(with_limit_and_skip=True), 3) + assert docs.count(with_limit_and_skip=True) == 3 for doc in docs: - self.assertEqual(".. queryset mid-iteration ..", repr(docs)) + assert ".. queryset mid-iteration .." == repr(docs) def test_regex_query_shortcuts(self): """Ensure that contains, startswith, endswith, etc work. @@ -1140,54 +1146,54 @@ class TestQueryset(unittest.TestCase): # Test contains obj = self.Person.objects(name__contains="van").first() - self.assertEqual(obj, person) + assert obj == person obj = self.Person.objects(name__contains="Van").first() - self.assertEqual(obj, None) + assert obj == None # Test icontains obj = self.Person.objects(name__icontains="Van").first() - self.assertEqual(obj, person) + assert obj == person # Test startswith obj = self.Person.objects(name__startswith="Guido").first() - self.assertEqual(obj, person) + assert obj == person obj = self.Person.objects(name__startswith="guido").first() - self.assertEqual(obj, None) + assert obj == None # Test istartswith obj = self.Person.objects(name__istartswith="guido").first() - self.assertEqual(obj, person) + assert obj == person # Test endswith obj = self.Person.objects(name__endswith="Rossum").first() - self.assertEqual(obj, person) + assert obj == person obj = self.Person.objects(name__endswith="rossuM").first() - self.assertEqual(obj, None) + assert obj == None # Test iendswith obj = self.Person.objects(name__iendswith="rossuM").first() - self.assertEqual(obj, person) + assert obj == person # Test exact obj = self.Person.objects(name__exact="Guido van Rossum").first() - self.assertEqual(obj, person) + assert obj == person obj = self.Person.objects(name__exact="Guido van rossum").first() - self.assertEqual(obj, None) + assert obj == None obj = self.Person.objects(name__exact="Guido van Rossu").first() - self.assertEqual(obj, None) + assert obj == None # Test iexact obj = self.Person.objects(name__iexact="gUIDO VAN rOSSUM").first() - self.assertEqual(obj, person) + assert obj == person obj = self.Person.objects(name__iexact="gUIDO VAN rOSSU").first() - self.assertEqual(obj, None) + assert obj == None # Test unsafe expressions person = self.Person(name="Guido van Rossum [.'Geek']") person.save() obj = self.Person.objects(name__icontains="[.'Geek").first() - self.assertEqual(obj, person) + assert obj == person def test_not(self): """Ensure that the __not operator works as expected. @@ -1196,10 +1202,10 @@ class TestQueryset(unittest.TestCase): alice.save() obj = self.Person.objects(name__iexact="alice").first() - self.assertEqual(obj, alice) + assert obj == alice obj = self.Person.objects(name__not__iexact="alice").first() - self.assertEqual(obj, None) + assert obj == None def test_filter_chaining(self): """Ensure filters can be chained together. @@ -1253,12 +1259,12 @@ class TestQueryset(unittest.TestCase): published_posts = published_posts.filter( published_date__lt=datetime.datetime(2010, 1, 7, 0, 0, 0) ) - self.assertEqual(published_posts.count(), 2) + assert published_posts.count() == 2 blog_posts = BlogPost.objects blog_posts = blog_posts.filter(blog__in=[blog_1, blog_2]) blog_posts = blog_posts.filter(blog=blog_3) - self.assertEqual(blog_posts.count(), 0) + assert blog_posts.count() == 0 BlogPost.drop_collection() Blog.drop_collection() @@ -1269,14 +1275,14 @@ class TestQueryset(unittest.TestCase): people = self.Person.objects people = people.filter(name__startswith="Gui").filter(name__not__endswith="tum") - self.assertEqual(people.count(), 1) + assert people.count() == 1 def assertSequence(self, qs, expected): qs = list(qs) expected = list(expected) - self.assertEqual(len(qs), len(expected)) + assert len(qs) == len(expected) for i in range(len(qs)): - self.assertEqual(qs[i], expected[i]) + assert qs[i] == expected[i] def test_ordering(self): """Ensure default ordering is applied and can be overridden. @@ -1327,31 +1333,27 @@ class TestQueryset(unittest.TestCase): # default ordering should be used by default with db_ops_tracker() as q: BlogPost.objects.filter(title="whatever").first() - self.assertEqual(len(q.get_ops()), 1) - self.assertEqual( - q.get_ops()[0][CMD_QUERY_KEY][ORDER_BY_KEY], {"published_date": -1} - ) + assert len(q.get_ops()) == 1 + assert q.get_ops()[0][CMD_QUERY_KEY][ORDER_BY_KEY] == {"published_date": -1} # calling order_by() should clear the default ordering with db_ops_tracker() as q: BlogPost.objects.filter(title="whatever").order_by().first() - self.assertEqual(len(q.get_ops()), 1) - self.assertNotIn(ORDER_BY_KEY, q.get_ops()[0][CMD_QUERY_KEY]) + assert len(q.get_ops()) == 1 + assert ORDER_BY_KEY not in q.get_ops()[0][CMD_QUERY_KEY] # calling an explicit order_by should use a specified sort with db_ops_tracker() as q: BlogPost.objects.filter(title="whatever").order_by("published_date").first() - self.assertEqual(len(q.get_ops()), 1) - self.assertEqual( - q.get_ops()[0][CMD_QUERY_KEY][ORDER_BY_KEY], {"published_date": 1} - ) + assert len(q.get_ops()) == 1 + assert q.get_ops()[0][CMD_QUERY_KEY][ORDER_BY_KEY] == {"published_date": 1} # calling order_by() after an explicit sort should clear it with db_ops_tracker() as q: qs = BlogPost.objects.filter(title="whatever").order_by("published_date") qs.order_by().first() - self.assertEqual(len(q.get_ops()), 1) - self.assertNotIn(ORDER_BY_KEY, q.get_ops()[0][CMD_QUERY_KEY]) + assert len(q.get_ops()) == 1 + assert ORDER_BY_KEY not in q.get_ops()[0][CMD_QUERY_KEY] def test_no_ordering_for_get(self): """ Ensure that Doc.objects.get doesn't use any ordering. @@ -1370,14 +1372,14 @@ class TestQueryset(unittest.TestCase): with db_ops_tracker() as q: BlogPost.objects.get(title="whatever") - self.assertEqual(len(q.get_ops()), 1) - self.assertNotIn(ORDER_BY_KEY, q.get_ops()[0][CMD_QUERY_KEY]) + assert len(q.get_ops()) == 1 + assert ORDER_BY_KEY not in q.get_ops()[0][CMD_QUERY_KEY] # Ordering should be ignored for .get even if we set it explicitly with db_ops_tracker() as q: BlogPost.objects.order_by("-title").get(title="whatever") - self.assertEqual(len(q.get_ops()), 1) - self.assertNotIn(ORDER_BY_KEY, q.get_ops()[0][CMD_QUERY_KEY]) + assert len(q.get_ops()) == 1 + assert ORDER_BY_KEY not in q.get_ops()[0][CMD_QUERY_KEY] def test_find_embedded(self): """Ensure that an embedded document is properly returned from @@ -1397,20 +1399,20 @@ class TestQueryset(unittest.TestCase): BlogPost.objects.create(author=user, content="Had a good coffee today...") result = BlogPost.objects.first() - self.assertIsInstance(result.author, User) - self.assertEqual(result.author.name, "Test User") + assert isinstance(result.author, User) + assert result.author.name == "Test User" result = BlogPost.objects.get(author__name=user.name) - self.assertIsInstance(result.author, User) - self.assertEqual(result.author.name, "Test User") + assert isinstance(result.author, User) + assert result.author.name == "Test User" result = BlogPost.objects.get(author={"name": user.name}) - self.assertIsInstance(result.author, User) - self.assertEqual(result.author.name, "Test User") + assert isinstance(result.author, User) + assert result.author.name == "Test User" # Fails, since the string is not a type that is able to represent the # author's document structure (should be dict) - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): BlogPost.objects.get(author=user.name) def test_find_empty_embedded(self): @@ -1428,7 +1430,7 @@ class TestQueryset(unittest.TestCase): BlogPost.objects.create(content="Anonymous post...") result = BlogPost.objects.get(author=None) - self.assertEqual(result.author, None) + assert result.author == None def test_find_dict_item(self): """Ensure that DictField items may be found. @@ -1443,7 +1445,7 @@ class TestQueryset(unittest.TestCase): post.save() post_obj = BlogPost.objects(info__title="test").first() - self.assertEqual(post_obj.id, post.id) + assert post_obj.id == post.id BlogPost.drop_collection() @@ -1478,10 +1480,10 @@ class TestQueryset(unittest.TestCase): # Ensure that normal queries work c = BlogPost.objects(published=True).exec_js(js_func, "hits") - self.assertEqual(c, 2) + assert c == 2 c = BlogPost.objects(published=False).exec_js(js_func, "hits") - self.assertEqual(c, 1) + assert c == 1 BlogPost.drop_collection() @@ -1525,7 +1527,7 @@ class TestQueryset(unittest.TestCase): sub_code = BlogPost.objects._sub_js_fields(code) code_chunks = ['doc["cmnts"];', 'doc["doc-name"],', 'doc["cmnts"][i]["body"]'] for chunk in code_chunks: - self.assertIn(chunk, sub_code) + assert chunk in sub_code results = BlogPost.objects.exec_js(code) expected_results = [ @@ -1533,12 +1535,12 @@ class TestQueryset(unittest.TestCase): {u"comment": u"yay", u"document": u"post1"}, {u"comment": u"nice stuff", u"document": u"post2"}, ] - self.assertEqual(results, expected_results) + assert results == expected_results # Test template style code = "{{~comments.content}}" sub_code = BlogPost.objects._sub_js_fields(code) - self.assertEqual("cmnts.body", sub_code) + assert "cmnts.body" == sub_code BlogPost.drop_collection() @@ -1549,13 +1551,13 @@ class TestQueryset(unittest.TestCase): self.Person(name="User B", age=30).save() self.Person(name="User C", age=40).save() - self.assertEqual(self.Person.objects.count(), 3) + assert self.Person.objects.count() == 3 self.Person.objects(age__lt=30).delete() - self.assertEqual(self.Person.objects.count(), 2) + assert self.Person.objects.count() == 2 self.Person.objects.delete() - self.assertEqual(self.Person.objects.count(), 0) + assert self.Person.objects.count() == 0 def test_reverse_delete_rule_cascade(self): """Ensure cascading deletion of referring documents from the database. @@ -1576,9 +1578,9 @@ class TestQueryset(unittest.TestCase): BlogPost(content="Chilling out", author=me).save() BlogPost(content="Pro Testing", author=someoneelse).save() - self.assertEqual(3, BlogPost.objects.count()) + assert 3 == BlogPost.objects.count() self.Person.objects(name="Test User").delete() - self.assertEqual(1, BlogPost.objects.count()) + assert 1 == BlogPost.objects.count() def test_reverse_delete_rule_cascade_on_abstract_document(self): """Ensure cascading deletion of referring documents from the database @@ -1603,9 +1605,9 @@ class TestQueryset(unittest.TestCase): BlogPost(content="Chilling out", author=me).save() BlogPost(content="Pro Testing", author=someoneelse).save() - self.assertEqual(3, BlogPost.objects.count()) + assert 3 == BlogPost.objects.count() self.Person.objects(name="Test User").delete() - self.assertEqual(1, BlogPost.objects.count()) + assert 1 == BlogPost.objects.count() def test_reverse_delete_rule_cascade_cycle(self): """Ensure reference cascading doesn't loop if reference graph isn't @@ -1622,8 +1624,10 @@ class TestQueryset(unittest.TestCase): base.delete() - self.assertRaises(DoesNotExist, base.reload) - self.assertRaises(DoesNotExist, other.reload) + with pytest.raises(DoesNotExist): + base.reload() + with pytest.raises(DoesNotExist): + other.reload() def test_reverse_delete_rule_cascade_complex_cycle(self): """Ensure reference cascading doesn't loop if reference graph isn't @@ -1646,9 +1650,12 @@ class TestQueryset(unittest.TestCase): cat.delete() - self.assertRaises(DoesNotExist, base.reload) - self.assertRaises(DoesNotExist, other.reload) - self.assertRaises(DoesNotExist, other2.reload) + with pytest.raises(DoesNotExist): + base.reload() + with pytest.raises(DoesNotExist): + other.reload() + with pytest.raises(DoesNotExist): + other2.reload() def test_reverse_delete_rule_cascade_self_referencing(self): """Ensure self-referencing CASCADE deletes do not result in infinite @@ -1677,13 +1684,13 @@ class TestQueryset(unittest.TestCase): child_child.save() tree_size = 1 + num_children + (num_children * num_children) - self.assertEqual(tree_size, Category.objects.count()) - self.assertEqual(num_children, Category.objects(parent=base).count()) + assert tree_size == Category.objects.count() + assert num_children == Category.objects(parent=base).count() # The delete should effectively wipe out the Category collection # without resulting in infinite parent-child cascade recursion base.delete() - self.assertEqual(0, Category.objects.count()) + assert 0 == Category.objects.count() def test_reverse_delete_rule_nullify(self): """Ensure nullification of references to deleted documents. @@ -1705,11 +1712,11 @@ class TestQueryset(unittest.TestCase): post = BlogPost(content="Watching TV", category=lameness) post.save() - self.assertEqual(1, BlogPost.objects.count()) - self.assertEqual("Lameness", BlogPost.objects.first().category.name) + assert 1 == BlogPost.objects.count() + assert "Lameness" == BlogPost.objects.first().category.name Category.objects.delete() - self.assertEqual(1, BlogPost.objects.count()) - self.assertEqual(None, BlogPost.objects.first().category) + assert 1 == BlogPost.objects.count() + assert None == BlogPost.objects.first().category def test_reverse_delete_rule_nullify_on_abstract_document(self): """Ensure nullification of references to deleted documents when @@ -1732,11 +1739,11 @@ class TestQueryset(unittest.TestCase): BlogPost(content="Watching TV", author=me).save() - self.assertEqual(1, BlogPost.objects.count()) - self.assertEqual(me, BlogPost.objects.first().author) + assert 1 == BlogPost.objects.count() + assert me == BlogPost.objects.first().author self.Person.objects(name="Test User").delete() - self.assertEqual(1, BlogPost.objects.count()) - self.assertEqual(None, BlogPost.objects.first().author) + assert 1 == BlogPost.objects.count() + assert None == BlogPost.objects.first().author def test_reverse_delete_rule_deny(self): """Ensure deletion gets denied on documents that still have references @@ -1756,7 +1763,8 @@ class TestQueryset(unittest.TestCase): post = BlogPost(content="Watching TV", author=me) post.save() - self.assertRaises(OperationError, self.Person.objects.delete) + with pytest.raises(OperationError): + self.Person.objects.delete() def test_reverse_delete_rule_deny_on_abstract_document(self): """Ensure deletion gets denied on documents that still have references @@ -1777,8 +1785,9 @@ class TestQueryset(unittest.TestCase): BlogPost(content="Watching TV", author=me).save() - self.assertEqual(1, BlogPost.objects.count()) - self.assertRaises(OperationError, self.Person.objects.delete) + assert 1 == BlogPost.objects.count() + with pytest.raises(OperationError): + self.Person.objects.delete() def test_reverse_delete_rule_pull(self): """Ensure pulling of references to deleted documents. @@ -1807,8 +1816,8 @@ class TestQueryset(unittest.TestCase): post.reload() another.reload() - self.assertEqual(post.authors, [me]) - self.assertEqual(another.authors, []) + assert post.authors == [me] + assert another.authors == [] def test_reverse_delete_rule_pull_on_abstract_documents(self): """Ensure pulling of references to deleted documents when reference @@ -1841,8 +1850,8 @@ class TestQueryset(unittest.TestCase): post.reload() another.reload() - self.assertEqual(post.authors, [me]) - self.assertEqual(another.authors, []) + assert post.authors == [me] + assert another.authors == [] def test_delete_with_limits(self): class Log(Document): @@ -1854,7 +1863,7 @@ class TestQueryset(unittest.TestCase): Log().save() Log.objects()[3:5].delete() - self.assertEqual(8, Log.objects.count()) + assert 8 == Log.objects.count() def test_delete_with_limit_handles_delete_rules(self): """Ensure cascading deletion of referring documents from the database. @@ -1875,9 +1884,9 @@ class TestQueryset(unittest.TestCase): BlogPost(content="Chilling out", author=me).save() BlogPost(content="Pro Testing", author=someoneelse).save() - self.assertEqual(3, BlogPost.objects.count()) + assert 3 == BlogPost.objects.count() self.Person.objects()[:1].delete() - self.assertEqual(1, BlogPost.objects.count()) + assert 1 == BlogPost.objects.count() def test_delete_edge_case_with_write_concern_0_return_None(self): """Return None if the delete operation is unacknowledged. @@ -1887,7 +1896,7 @@ class TestQueryset(unittest.TestCase): """ p1 = self.Person(name="User Z", age=20).save() del_result = p1.delete(w=0) - self.assertEqual(None, del_result) + assert None == del_result def test_reference_field_find(self): """Ensure cascading deletion of referring documents from the database. @@ -1903,13 +1912,13 @@ class TestQueryset(unittest.TestCase): me = self.Person(name="Test User").save() BlogPost(content="test 123", author=me).save() - self.assertEqual(1, BlogPost.objects(author=me).count()) - self.assertEqual(1, BlogPost.objects(author=me.pk).count()) - self.assertEqual(1, BlogPost.objects(author="%s" % me.pk).count()) + assert 1 == BlogPost.objects(author=me).count() + assert 1 == BlogPost.objects(author=me.pk).count() + assert 1 == BlogPost.objects(author="%s" % me.pk).count() - self.assertEqual(1, BlogPost.objects(author__in=[me]).count()) - self.assertEqual(1, BlogPost.objects(author__in=[me.pk]).count()) - self.assertEqual(1, BlogPost.objects(author__in=["%s" % me.pk]).count()) + assert 1 == BlogPost.objects(author__in=[me]).count() + assert 1 == BlogPost.objects(author__in=[me.pk]).count() + assert 1 == BlogPost.objects(author__in=["%s" % me.pk]).count() def test_reference_field_find_dbref(self): """Ensure cascading deletion of referring documents from the database. @@ -1925,13 +1934,13 @@ class TestQueryset(unittest.TestCase): me = self.Person(name="Test User").save() BlogPost(content="test 123", author=me).save() - self.assertEqual(1, BlogPost.objects(author=me).count()) - self.assertEqual(1, BlogPost.objects(author=me.pk).count()) - self.assertEqual(1, BlogPost.objects(author="%s" % me.pk).count()) + assert 1 == BlogPost.objects(author=me).count() + assert 1 == BlogPost.objects(author=me.pk).count() + assert 1 == BlogPost.objects(author="%s" % me.pk).count() - self.assertEqual(1, BlogPost.objects(author__in=[me]).count()) - self.assertEqual(1, BlogPost.objects(author__in=[me.pk]).count()) - self.assertEqual(1, BlogPost.objects(author__in=["%s" % me.pk]).count()) + assert 1 == BlogPost.objects(author__in=[me]).count() + assert 1 == BlogPost.objects(author__in=[me.pk]).count() + assert 1 == BlogPost.objects(author__in=["%s" % me.pk]).count() def test_update_intfield_operator(self): class BlogPost(Document): @@ -1944,20 +1953,20 @@ class TestQueryset(unittest.TestCase): BlogPost.objects.update_one(set__hits=10) post.reload() - self.assertEqual(post.hits, 10) + assert post.hits == 10 BlogPost.objects.update_one(inc__hits=1) post.reload() - self.assertEqual(post.hits, 11) + assert post.hits == 11 BlogPost.objects.update_one(dec__hits=1) post.reload() - self.assertEqual(post.hits, 10) + assert post.hits == 10 # Negative dec operator is equal to a positive inc operator BlogPost.objects.update_one(dec__hits=-1) post.reload() - self.assertEqual(post.hits, 11) + assert post.hits == 11 def test_update_decimalfield_operator(self): class BlogPost(Document): @@ -1970,19 +1979,19 @@ class TestQueryset(unittest.TestCase): BlogPost.objects.update_one(inc__review=0.1) # test with floats post.reload() - self.assertEqual(float(post.review), 3.6) + assert float(post.review) == 3.6 BlogPost.objects.update_one(dec__review=0.1) post.reload() - self.assertEqual(float(post.review), 3.5) + assert float(post.review) == 3.5 BlogPost.objects.update_one(inc__review=Decimal(0.12)) # test with Decimal post.reload() - self.assertEqual(float(post.review), 3.62) + assert float(post.review) == 3.62 BlogPost.objects.update_one(dec__review=Decimal(0.12)) post.reload() - self.assertEqual(float(post.review), 3.5) + assert float(post.review) == 3.5 def test_update_decimalfield_operator_not_working_with_force_string(self): class BlogPost(Document): @@ -1993,7 +2002,7 @@ class TestQueryset(unittest.TestCase): post = BlogPost(review=3.5) post.save() - with self.assertRaises(OperationError): + with pytest.raises(OperationError): BlogPost.objects.update_one(inc__review=0.1) # test with floats def test_update_listfield_operator(self): @@ -2011,22 +2020,22 @@ class TestQueryset(unittest.TestCase): # ListField operator BlogPost.objects.update(push__tags="mongo") post.reload() - self.assertIn("mongo", post.tags) + assert "mongo" in post.tags BlogPost.objects.update_one(push_all__tags=["db", "nosql"]) post.reload() - self.assertIn("db", post.tags) - self.assertIn("nosql", post.tags) + assert "db" in post.tags + assert "nosql" in post.tags tags = post.tags[:-1] BlogPost.objects.update(pop__tags=1) post.reload() - self.assertEqual(post.tags, tags) + assert post.tags == tags BlogPost.objects.update_one(add_to_set__tags="unique") BlogPost.objects.update_one(add_to_set__tags="unique") post.reload() - self.assertEqual(post.tags.count("unique"), 1) + assert post.tags.count("unique") == 1 BlogPost.drop_collection() @@ -2038,12 +2047,12 @@ class TestQueryset(unittest.TestCase): post = BlogPost(title="garbage").save() - self.assertNotEqual(post.title, None) + assert post.title != None BlogPost.objects.update_one(unset__title=1) post.reload() - self.assertEqual(post.title, None) + assert post.title == None pymongo_doc = BlogPost.objects.as_pymongo().first() - self.assertNotIn("title", pymongo_doc) + assert "title" not in pymongo_doc def test_update_push_with_position(self): """Ensure that the 'push' update with position works properly. @@ -2060,16 +2069,16 @@ class TestQueryset(unittest.TestCase): BlogPost.objects.filter(id=post.id).update(push__tags="code") BlogPost.objects.filter(id=post.id).update(push__tags__0=["mongodb", "python"]) post.reload() - self.assertEqual(post.tags, ["mongodb", "python", "code"]) + assert post.tags == ["mongodb", "python", "code"] BlogPost.objects.filter(id=post.id).update(set__tags__2="java") post.reload() - self.assertEqual(post.tags, ["mongodb", "python", "java"]) + assert post.tags == ["mongodb", "python", "java"] # test push with singular value BlogPost.objects.filter(id=post.id).update(push__tags__0="scala") post.reload() - self.assertEqual(post.tags, ["scala", "mongodb", "python", "java"]) + assert post.tags == ["scala", "mongodb", "python", "java"] def test_update_push_list_of_list(self): """Ensure that the 'push' update operation works in the list of list @@ -2085,7 +2094,7 @@ class TestQueryset(unittest.TestCase): BlogPost.objects.filter(slug="test").update(push__tags=["value1", 123]) post.reload() - self.assertEqual(post.tags, [["value1", 123]]) + assert post.tags == [["value1", 123]] def test_update_push_and_pull_add_to_set(self): """Ensure that the 'pull' update operation works correctly. @@ -2102,25 +2111,25 @@ class TestQueryset(unittest.TestCase): BlogPost.objects.filter(id=post.id).update(push__tags="code") post.reload() - self.assertEqual(post.tags, ["code"]) + assert post.tags == ["code"] BlogPost.objects.filter(id=post.id).update(push_all__tags=["mongodb", "code"]) post.reload() - self.assertEqual(post.tags, ["code", "mongodb", "code"]) + assert post.tags == ["code", "mongodb", "code"] BlogPost.objects(slug="test").update(pull__tags="code") post.reload() - self.assertEqual(post.tags, ["mongodb"]) + assert post.tags == ["mongodb"] BlogPost.objects(slug="test").update(pull_all__tags=["mongodb", "code"]) post.reload() - self.assertEqual(post.tags, []) + assert post.tags == [] BlogPost.objects(slug="test").update( __raw__={"$addToSet": {"tags": {"$each": ["code", "mongodb", "code"]}}} ) post.reload() - self.assertEqual(post.tags, ["code", "mongodb"]) + assert post.tags == ["code", "mongodb"] def test_add_to_set_each(self): class Item(Document): @@ -2137,7 +2146,7 @@ class TestQueryset(unittest.TestCase): item.update(add_to_set__parents=[parent_1, parent_2, parent_1]) item.reload() - self.assertEqual([parent_1, parent_2], item.parents) + assert [parent_1, parent_2] == item.parents def test_pull_nested(self): class Collaborator(EmbeddedDocument): @@ -2156,9 +2165,9 @@ class TestQueryset(unittest.TestCase): s = Site(name="test", collaborators=[c]).save() Site.objects(id=s.id).update_one(pull__collaborators__user="Esteban") - self.assertEqual(Site.objects.first().collaborators, []) + assert Site.objects.first().collaborators == [] - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): Site.objects(id=s.id).update_one(pull_all__collaborators__user=["Ross"]) def test_pull_from_nested_embedded(self): @@ -2185,14 +2194,14 @@ class TestQueryset(unittest.TestCase): ).save() Site.objects(id=s.id).update_one(pull__collaborators__helpful=c) - self.assertEqual(Site.objects.first().collaborators["helpful"], []) + assert Site.objects.first().collaborators["helpful"] == [] Site.objects(id=s.id).update_one( pull__collaborators__unhelpful={"name": "Frank"} ) - self.assertEqual(Site.objects.first().collaborators["unhelpful"], []) + assert Site.objects.first().collaborators["unhelpful"] == [] - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): Site.objects(id=s.id).update_one( pull_all__collaborators__helpful__name=["Ross"] ) @@ -2229,12 +2238,12 @@ class TestQueryset(unittest.TestCase): Site.objects(id=s.id).update_one( pull__collaborators__helpful__name__in=["Esteban"] ) # Pull a - self.assertEqual(Site.objects.first().collaborators["helpful"], [b]) + assert Site.objects.first().collaborators["helpful"] == [b] Site.objects(id=s.id).update_one( pull__collaborators__unhelpful__name__nin=["John"] ) # Pull x - self.assertEqual(Site.objects.first().collaborators["unhelpful"], [y]) + assert Site.objects.first().collaborators["unhelpful"] == [y] def test_pull_from_nested_mapfield(self): class Collaborator(EmbeddedDocument): @@ -2255,14 +2264,14 @@ class TestQueryset(unittest.TestCase): s.save() Site.objects(id=s.id).update_one(pull__collaborators__helpful__user="Esteban") - self.assertEqual(Site.objects.first().collaborators["helpful"], []) + assert Site.objects.first().collaborators["helpful"] == [] Site.objects(id=s.id).update_one( pull__collaborators__unhelpful={"user": "Frank"} ) - self.assertEqual(Site.objects.first().collaborators["unhelpful"], []) + assert Site.objects.first().collaborators["unhelpful"] == [] - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): Site.objects(id=s.id).update_one( pull_all__collaborators__helpful__user=["Ross"] ) @@ -2280,7 +2289,7 @@ class TestQueryset(unittest.TestCase): bar = Bar(foos=[foo]).save() Bar.objects(id=bar.id).update(pull__foos=foo) bar.reload() - self.assertEqual(len(bar.foos), 0) + assert len(bar.foos) == 0 def test_update_one_check_return_with_full_result(self): class BlogTag(Document): @@ -2290,10 +2299,10 @@ class TestQueryset(unittest.TestCase): BlogTag(name="garbage").save() default_update = BlogTag.objects.update_one(name="new") - self.assertEqual(default_update, 1) + assert default_update == 1 full_result_update = BlogTag.objects.update_one(name="new", full_result=True) - self.assertIsInstance(full_result_update, UpdateResult) + assert isinstance(full_result_update, UpdateResult) def test_update_one_pop_generic_reference(self): class BlogTag(Document): @@ -2316,12 +2325,12 @@ class TestQueryset(unittest.TestCase): post = BlogPost(slug="test-2", tags=[tag_1, tag_2]) post.save() - self.assertEqual(len(post.tags), 2) + assert len(post.tags) == 2 BlogPost.objects(slug="test-2").update_one(pop__tags=-1) post.reload() - self.assertEqual(len(post.tags), 1) + assert len(post.tags) == 1 BlogPost.drop_collection() BlogTag.drop_collection() @@ -2344,15 +2353,15 @@ class TestQueryset(unittest.TestCase): post = BlogPost(slug="test-2", tags=[tag_1, tag_2]) post.save() - self.assertEqual(len(post.tags), 2) + assert len(post.tags) == 2 BlogPost.objects(slug="test-2").update_one(set__tags__0__name="python") post.reload() - self.assertEqual(post.tags[0].name, "python") + assert post.tags[0].name == "python" BlogPost.objects(slug="test-2").update_one(pop__tags=-1) post.reload() - self.assertEqual(len(post.tags), 1) + assert len(post.tags) == 1 BlogPost.drop_collection() @@ -2374,7 +2383,7 @@ class TestQueryset(unittest.TestCase): ) message = message.reload() - self.assertEqual(message.authors[0].name, "Ross") + assert message.authors[0].name == "Ross" Message.objects(authors__name="Ross").update_one( set__authors=[ @@ -2385,9 +2394,9 @@ class TestQueryset(unittest.TestCase): ) message = message.reload() - self.assertEqual(message.authors[0].name, "Harry") - self.assertEqual(message.authors[1].name, "Ross") - self.assertEqual(message.authors[2].name, "Adam") + assert message.authors[0].name == "Harry" + assert message.authors[1].name == "Ross" + assert message.authors[2].name == "Adam" def test_set_generic_embedded_documents(self): class Bar(EmbeddedDocument): @@ -2403,7 +2412,7 @@ class TestQueryset(unittest.TestCase): User.objects(username="abc").update(set__bar=Bar(name="test"), upsert=True) user = User.objects(username="abc").first() - self.assertEqual(user.bar.name, "test") + assert user.bar.name == "test" def test_reload_embedded_docs_instance(self): class SubDoc(EmbeddedDocument): @@ -2415,7 +2424,7 @@ class TestQueryset(unittest.TestCase): doc = Doc(embedded=SubDoc(val=0)).save() doc.reload() - self.assertEqual(doc.pk, doc.embedded._instance.pk) + assert doc.pk == doc.embedded._instance.pk def test_reload_list_embedded_docs_instance(self): class SubDoc(EmbeddedDocument): @@ -2427,7 +2436,7 @@ class TestQueryset(unittest.TestCase): doc = Doc(embedded=[SubDoc(val=0)]).save() doc.reload() - self.assertEqual(doc.pk, doc.embedded[0]._instance.pk) + assert doc.pk == doc.embedded[0]._instance.pk def test_order_by(self): """Ensure that QuerySets may be ordered. @@ -2437,16 +2446,16 @@ class TestQueryset(unittest.TestCase): self.Person(name="User C", age=30).save() names = [p.name for p in self.Person.objects.order_by("-age")] - self.assertEqual(names, ["User B", "User C", "User A"]) + assert names == ["User B", "User C", "User A"] names = [p.name for p in self.Person.objects.order_by("+age")] - self.assertEqual(names, ["User A", "User C", "User B"]) + assert names == ["User A", "User C", "User B"] names = [p.name for p in self.Person.objects.order_by("age")] - self.assertEqual(names, ["User A", "User C", "User B"]) + assert names == ["User A", "User C", "User B"] ages = [p.age for p in self.Person.objects.order_by("-name")] - self.assertEqual(ages, [30, 40, 20]) + assert ages == [30, 40, 20] def test_order_by_optional(self): class BlogPost(Document): @@ -2511,24 +2520,24 @@ class TestQueryset(unittest.TestCase): ages = [p.age for p in only_age] # The .only('age') clause should mean that all names are None - self.assertEqual(names, [None, None, None]) - self.assertEqual(ages, [40, 30, 20]) + assert names == [None, None, None] + assert ages == [40, 30, 20] qs = self.Person.objects.all().order_by("-age") qs = qs.limit(10) ages = [p.age for p in qs] - self.assertEqual(ages, [40, 30, 20]) + assert ages == [40, 30, 20] qs = self.Person.objects.all().limit(10) qs = qs.order_by("-age") ages = [p.age for p in qs] - self.assertEqual(ages, [40, 30, 20]) + assert ages == [40, 30, 20] qs = self.Person.objects.all().skip(0) qs = qs.order_by("-age") ages = [p.age for p in qs] - self.assertEqual(ages, [40, 30, 20]) + assert ages == [40, 30, 20] def test_confirm_order_by_reference_wont_work(self): """Ordering by reference is not possible. Use map / reduce.. or @@ -2551,7 +2560,7 @@ class TestQueryset(unittest.TestCase): Author(author=person_c).save() names = [a.author.name for a in Author.objects.order_by("-author__age")] - self.assertEqual(names, ["User A", "User B", "User C"]) + assert names == ["User A", "User B", "User C"] def test_comment(self): """Make sure adding a comment to the query gets added to the query""" @@ -2573,10 +2582,10 @@ class TestQueryset(unittest.TestCase): ) ops = q.get_ops() - self.assertEqual(len(ops), 2) + assert len(ops) == 2 for op in ops: - self.assertEqual(op[CMD_QUERY_KEY][QUERY_KEY], {"age": {"$gte": 18}}) - self.assertEqual(op[CMD_QUERY_KEY][COMMENT_KEY], "looking for an adult") + assert op[CMD_QUERY_KEY][QUERY_KEY] == {"age": {"$gte": 18}} + assert op[CMD_QUERY_KEY][COMMENT_KEY] == "looking for an adult" def test_map_reduce(self): """Ensure map/reduce is both mapping and reducing. @@ -2613,13 +2622,13 @@ class TestQueryset(unittest.TestCase): # run a map/reduce operation spanning all posts results = BlogPost.objects.map_reduce(map_f, reduce_f, "myresults") results = list(results) - self.assertEqual(len(results), 4) + assert len(results) == 4 music = list(filter(lambda r: r.key == "music", results))[0] - self.assertEqual(music.value, 2) + assert music.value == 2 film = list(filter(lambda r: r.key == "film", results))[0] - self.assertEqual(film.value, 3) + assert film.value == 3 BlogPost.drop_collection() @@ -2640,8 +2649,8 @@ class TestQueryset(unittest.TestCase): post2.save() post3.save() - self.assertEqual(BlogPost._fields["title"].db_field, "_id") - self.assertEqual(BlogPost._meta["id_field"], "title") + assert BlogPost._fields["title"].db_field == "_id" + assert BlogPost._meta["id_field"] == "title" map_f = """ function() { @@ -2663,9 +2672,9 @@ class TestQueryset(unittest.TestCase): results = BlogPost.objects.map_reduce(map_f, reduce_f, "myresults") results = list(results) - self.assertEqual(results[0].object, post1) - self.assertEqual(results[1].object, post2) - self.assertEqual(results[2].object, post3) + assert results[0].object == post1 + assert results[1].object == post2 + assert results[2].object == post3 BlogPost.drop_collection() @@ -2770,50 +2779,41 @@ class TestQueryset(unittest.TestCase): results = list(results) collection = get_db("test2").family_map - self.assertEqual( - collection.find_one({"_id": 1}), - { - "_id": 1, - "value": { - "persons": [ - {"age": 21, "name": u"Wilson Jr"}, - {"age": 45, "name": u"Wilson Father"}, - {"age": 40, "name": u"Eliana Costa"}, - {"age": 17, "name": u"Tayza Mariana"}, - ], - "totalAge": 123, - }, + assert collection.find_one({"_id": 1}) == { + "_id": 1, + "value": { + "persons": [ + {"age": 21, "name": u"Wilson Jr"}, + {"age": 45, "name": u"Wilson Father"}, + {"age": 40, "name": u"Eliana Costa"}, + {"age": 17, "name": u"Tayza Mariana"}, + ], + "totalAge": 123, }, - ) + } - self.assertEqual( - collection.find_one({"_id": 2}), - { - "_id": 2, - "value": { - "persons": [ - {"age": 16, "name": u"Isabella Luanna"}, - {"age": 36, "name": u"Sandra Mara"}, - {"age": 10, "name": u"Igor Gabriel"}, - ], - "totalAge": 62, - }, + assert collection.find_one({"_id": 2}) == { + "_id": 2, + "value": { + "persons": [ + {"age": 16, "name": u"Isabella Luanna"}, + {"age": 36, "name": u"Sandra Mara"}, + {"age": 10, "name": u"Igor Gabriel"}, + ], + "totalAge": 62, }, - ) + } - self.assertEqual( - collection.find_one({"_id": 3}), - { - "_id": 3, - "value": { - "persons": [ - {"age": 30, "name": u"Arthur WA"}, - {"age": 25, "name": u"Paula Leonel"}, - ], - "totalAge": 55, - }, + assert collection.find_one({"_id": 3}) == { + "_id": 3, + "value": { + "persons": [ + {"age": 30, "name": u"Arthur WA"}, + {"age": 25, "name": u"Paula Leonel"}, + ], + "totalAge": 55, }, - ) + } def test_map_reduce_finalize(self): """Ensure that map, reduce, and finalize run and introduce "scope" @@ -2933,10 +2933,10 @@ class TestQueryset(unittest.TestCase): results = list(results) # assert troublesome Buzz article is ranked 1st - self.assertTrue(results[0].object.title.startswith("Google Buzz")) + assert results[0].object.title.startswith("Google Buzz") # assert laser vision is ranked last - self.assertTrue(results[-1].object.title.startswith("How to see")) + assert results[-1].object.title.startswith("How to see") Link.drop_collection() @@ -2956,11 +2956,11 @@ class TestQueryset(unittest.TestCase): def test_assertions(f): f = {key: int(val) for key, val in f.items()} - self.assertEqual(set(["music", "film", "actors", "watch"]), set(f.keys())) - self.assertEqual(f["music"], 3) - self.assertEqual(f["actors"], 2) - self.assertEqual(f["watch"], 2) - self.assertEqual(f["film"], 1) + assert set(["music", "film", "actors", "watch"]) == set(f.keys()) + assert f["music"] == 3 + assert f["actors"] == 2 + assert f["watch"] == 2 + assert f["film"] == 1 exec_js = BlogPost.objects.item_frequencies("tags") map_reduce = BlogPost.objects.item_frequencies("tags", map_reduce=True) @@ -2970,10 +2970,10 @@ class TestQueryset(unittest.TestCase): # Ensure query is taken into account def test_assertions(f): f = {key: int(val) for key, val in f.items()} - self.assertEqual(set(["music", "actors", "watch"]), set(f.keys())) - self.assertEqual(f["music"], 2) - self.assertEqual(f["actors"], 1) - self.assertEqual(f["watch"], 1) + assert set(["music", "actors", "watch"]) == set(f.keys()) + assert f["music"] == 2 + assert f["actors"] == 1 + assert f["watch"] == 1 exec_js = BlogPost.objects(hits__gt=1).item_frequencies("tags") map_reduce = BlogPost.objects(hits__gt=1).item_frequencies( @@ -2984,10 +2984,10 @@ class TestQueryset(unittest.TestCase): # Check that normalization works def test_assertions(f): - self.assertAlmostEqual(f["music"], 3.0 / 8.0) - self.assertAlmostEqual(f["actors"], 2.0 / 8.0) - self.assertAlmostEqual(f["watch"], 2.0 / 8.0) - self.assertAlmostEqual(f["film"], 1.0 / 8.0) + assert round(abs(f["music"] - 3.0 / 8.0), 7) == 0 + assert round(abs(f["actors"] - 2.0 / 8.0), 7) == 0 + assert round(abs(f["watch"] - 2.0 / 8.0), 7) == 0 + assert round(abs(f["film"] - 1.0 / 8.0), 7) == 0 exec_js = BlogPost.objects.item_frequencies("tags", normalize=True) map_reduce = BlogPost.objects.item_frequencies( @@ -2998,9 +2998,9 @@ class TestQueryset(unittest.TestCase): # Check item_frequencies works for non-list fields def test_assertions(f): - self.assertEqual(set([1, 2]), set(f.keys())) - self.assertEqual(f[1], 1) - self.assertEqual(f[2], 2) + assert set([1, 2]) == set(f.keys()) + assert f[1] == 1 + assert f[2] == 2 exec_js = BlogPost.objects.item_frequencies("hits") map_reduce = BlogPost.objects.item_frequencies("hits", map_reduce=True) @@ -3036,9 +3036,9 @@ class TestQueryset(unittest.TestCase): def test_assertions(f): f = {key: int(val) for key, val in f.items()} - self.assertEqual(set(["62-3331-1656", "62-3332-1656"]), set(f.keys())) - self.assertEqual(f["62-3331-1656"], 2) - self.assertEqual(f["62-3332-1656"], 1) + assert set(["62-3331-1656", "62-3332-1656"]) == set(f.keys()) + assert f["62-3331-1656"] == 2 + assert f["62-3332-1656"] == 1 exec_js = Person.objects.item_frequencies("phone.number") map_reduce = Person.objects.item_frequencies("phone.number", map_reduce=True) @@ -3048,8 +3048,8 @@ class TestQueryset(unittest.TestCase): # Ensure query is taken into account def test_assertions(f): f = {key: int(val) for key, val in f.items()} - self.assertEqual(set(["62-3331-1656"]), set(f.keys())) - self.assertEqual(f["62-3331-1656"], 2) + assert set(["62-3331-1656"]) == set(f.keys()) + assert f["62-3331-1656"] == 2 exec_js = Person.objects(phone__number="62-3331-1656").item_frequencies( "phone.number" @@ -3062,8 +3062,8 @@ class TestQueryset(unittest.TestCase): # Check that normalization works def test_assertions(f): - self.assertEqual(f["62-3331-1656"], 2.0 / 3.0) - self.assertEqual(f["62-3332-1656"], 1.0 / 3.0) + assert f["62-3331-1656"] == 2.0 / 3.0 + assert f["62-3332-1656"] == 1.0 / 3.0 exec_js = Person.objects.item_frequencies("phone.number", normalize=True) map_reduce = Person.objects.item_frequencies( @@ -3083,14 +3083,14 @@ class TestQueryset(unittest.TestCase): Person(name="Wilson Jr").save() freq = Person.objects.item_frequencies("city") - self.assertEqual(freq, {"CRB": 1.0, None: 1.0}) + assert freq == {"CRB": 1.0, None: 1.0} freq = Person.objects.item_frequencies("city", normalize=True) - self.assertEqual(freq, {"CRB": 0.5, None: 0.5}) + assert freq == {"CRB": 0.5, None: 0.5} freq = Person.objects.item_frequencies("city", map_reduce=True) - self.assertEqual(freq, {"CRB": 1.0, None: 1.0}) + assert freq == {"CRB": 1.0, None: 1.0} freq = Person.objects.item_frequencies("city", normalize=True, map_reduce=True) - self.assertEqual(freq, {"CRB": 0.5, None: 0.5}) + assert freq == {"CRB": 0.5, None: 0.5} def test_item_frequencies_with_null_embedded(self): class Data(EmbeddedDocument): @@ -3115,10 +3115,10 @@ class TestQueryset(unittest.TestCase): p.save() ot = Person.objects.item_frequencies("extra.tag", map_reduce=False) - self.assertEqual(ot, {None: 1.0, u"friend": 1.0}) + assert ot == {None: 1.0, u"friend": 1.0} ot = Person.objects.item_frequencies("extra.tag", map_reduce=True) - self.assertEqual(ot, {None: 1.0, u"friend": 1.0}) + assert ot == {None: 1.0, u"friend": 1.0} def test_item_frequencies_with_0_values(self): class Test(Document): @@ -3130,9 +3130,9 @@ class TestQueryset(unittest.TestCase): t.save() ot = Test.objects.item_frequencies("val", map_reduce=True) - self.assertEqual(ot, {0: 1}) + assert ot == {0: 1} ot = Test.objects.item_frequencies("val", map_reduce=False) - self.assertEqual(ot, {0: 1}) + assert ot == {0: 1} def test_item_frequencies_with_False_values(self): class Test(Document): @@ -3144,9 +3144,9 @@ class TestQueryset(unittest.TestCase): t.save() ot = Test.objects.item_frequencies("val", map_reduce=True) - self.assertEqual(ot, {False: 1}) + assert ot == {False: 1} ot = Test.objects.item_frequencies("val", map_reduce=False) - self.assertEqual(ot, {False: 1}) + assert ot == {False: 1} def test_item_frequencies_normalize(self): class Test(Document): @@ -3161,31 +3161,32 @@ class TestQueryset(unittest.TestCase): Test(val=2).save() freqs = Test.objects.item_frequencies("val", map_reduce=False, normalize=True) - self.assertEqual(freqs, {1: 50.0 / 70, 2: 20.0 / 70}) + assert freqs == {1: 50.0 / 70, 2: 20.0 / 70} freqs = Test.objects.item_frequencies("val", map_reduce=True, normalize=True) - self.assertEqual(freqs, {1: 50.0 / 70, 2: 20.0 / 70}) + assert freqs == {1: 50.0 / 70, 2: 20.0 / 70} def test_average(self): """Ensure that field can be averaged correctly. """ self.Person(name="person", age=0).save() - self.assertEqual(int(self.Person.objects.average("age")), 0) + assert int(self.Person.objects.average("age")) == 0 ages = [23, 54, 12, 94, 27] for i, age in enumerate(ages): self.Person(name="test%s" % i, age=age).save() avg = float(sum(ages)) / (len(ages) + 1) # take into account the 0 - self.assertAlmostEqual(int(self.Person.objects.average("age")), avg) + assert round(abs(int(self.Person.objects.average("age")) - avg), 7) == 0 self.Person(name="ageless person").save() - self.assertEqual(int(self.Person.objects.average("age")), avg) + assert int(self.Person.objects.average("age")) == avg # dot notation self.Person(name="person meta", person_meta=self.PersonMeta(weight=0)).save() - self.assertAlmostEqual( - int(self.Person.objects.average("person_meta.weight")), 0 + assert ( + round(abs(int(self.Person.objects.average("person_meta.weight")) - 0), 7) + == 0 ) for i, weight in enumerate(ages): @@ -3193,17 +3194,18 @@ class TestQueryset(unittest.TestCase): name="test meta%i", person_meta=self.PersonMeta(weight=weight) ).save() - self.assertAlmostEqual( - int(self.Person.objects.average("person_meta.weight")), avg + assert ( + round(abs(int(self.Person.objects.average("person_meta.weight")) - avg), 7) + == 0 ) self.Person(name="test meta none").save() - self.assertEqual(int(self.Person.objects.average("person_meta.weight")), avg) + assert int(self.Person.objects.average("person_meta.weight")) == avg # test summing over a filtered queryset over_50 = [a for a in ages if a >= 50] avg = float(sum(over_50)) / len(over_50) - self.assertEqual(self.Person.objects.filter(age__gte=50).average("age"), avg) + assert self.Person.objects.filter(age__gte=50).average("age") == avg def test_sum(self): """Ensure that field can be summed over correctly. @@ -3212,25 +3214,24 @@ class TestQueryset(unittest.TestCase): for i, age in enumerate(ages): self.Person(name="test%s" % i, age=age).save() - self.assertEqual(self.Person.objects.sum("age"), sum(ages)) + assert self.Person.objects.sum("age") == sum(ages) self.Person(name="ageless person").save() - self.assertEqual(self.Person.objects.sum("age"), sum(ages)) + assert self.Person.objects.sum("age") == sum(ages) for i, age in enumerate(ages): self.Person( name="test meta%s" % i, person_meta=self.PersonMeta(weight=age) ).save() - self.assertEqual(self.Person.objects.sum("person_meta.weight"), sum(ages)) + assert self.Person.objects.sum("person_meta.weight") == sum(ages) self.Person(name="weightless person").save() - self.assertEqual(self.Person.objects.sum("age"), sum(ages)) + assert self.Person.objects.sum("age") == sum(ages) # test summing over a filtered queryset - self.assertEqual( - self.Person.objects.filter(age__gte=50).sum("age"), - sum([a for a in ages if a >= 50]), + assert self.Person.objects.filter(age__gte=50).sum("age") == sum( + [a for a in ages if a >= 50] ) def test_sum_over_db_field(self): @@ -3246,7 +3247,7 @@ class TestQueryset(unittest.TestCase): UserVisit.objects.create(num_visits=10) UserVisit.objects.create(num_visits=5) - self.assertEqual(UserVisit.objects.sum("num_visits"), 15) + assert UserVisit.objects.sum("num_visits") == 15 def test_average_over_db_field(self): """Ensure that a field mapped to a db field with a different name @@ -3261,7 +3262,7 @@ class TestQueryset(unittest.TestCase): UserVisit.objects.create(num_visits=20) UserVisit.objects.create(num_visits=10) - self.assertEqual(UserVisit.objects.average("num_visits"), 15) + assert UserVisit.objects.average("num_visits") == 15 def test_embedded_average(self): class Pay(EmbeddedDocument): @@ -3278,7 +3279,7 @@ class TestQueryset(unittest.TestCase): Doc(name="Tayza mariana", pay=Pay(value=165)).save() Doc(name="Eliana Costa", pay=Pay(value=115)).save() - self.assertEqual(Doc.objects.average("pay.value"), 240) + assert Doc.objects.average("pay.value") == 240 def test_embedded_array_average(self): class Pay(EmbeddedDocument): @@ -3295,7 +3296,7 @@ class TestQueryset(unittest.TestCase): Doc(name="Tayza mariana", pay=Pay(values=[165, 100])).save() Doc(name="Eliana Costa", pay=Pay(values=[115, 100])).save() - self.assertEqual(Doc.objects.average("pay.values"), 170) + assert Doc.objects.average("pay.values") == 170 def test_array_average(self): class Doc(Document): @@ -3308,7 +3309,7 @@ class TestQueryset(unittest.TestCase): Doc(values=[165, 100]).save() Doc(values=[115, 100]).save() - self.assertEqual(Doc.objects.average("values"), 170) + assert Doc.objects.average("values") == 170 def test_embedded_sum(self): class Pay(EmbeddedDocument): @@ -3325,7 +3326,7 @@ class TestQueryset(unittest.TestCase): Doc(name="Tayza mariana", pay=Pay(value=165)).save() Doc(name="Eliana Costa", pay=Pay(value=115)).save() - self.assertEqual(Doc.objects.sum("pay.value"), 960) + assert Doc.objects.sum("pay.value") == 960 def test_embedded_array_sum(self): class Pay(EmbeddedDocument): @@ -3342,7 +3343,7 @@ class TestQueryset(unittest.TestCase): Doc(name="Tayza mariana", pay=Pay(values=[165, 100])).save() Doc(name="Eliana Costa", pay=Pay(values=[115, 100])).save() - self.assertEqual(Doc.objects.sum("pay.values"), 1360) + assert Doc.objects.sum("pay.values") == 1360 def test_array_sum(self): class Doc(Document): @@ -3355,7 +3356,7 @@ class TestQueryset(unittest.TestCase): Doc(values=[165, 100]).save() Doc(values=[115, 100]).save() - self.assertEqual(Doc.objects.sum("values"), 1360) + assert Doc.objects.sum("values") == 1360 def test_distinct(self): """Ensure that the QuerySet.distinct method works. @@ -3364,14 +3365,12 @@ class TestQueryset(unittest.TestCase): self.Person(name="Mr White", age=20).save() self.Person(name="Mr Orange", age=30).save() self.Person(name="Mr Pink", age=30).save() - self.assertEqual( - set(self.Person.objects.distinct("name")), - set(["Mr Orange", "Mr White", "Mr Pink"]), + assert set(self.Person.objects.distinct("name")) == set( + ["Mr Orange", "Mr White", "Mr Pink"] ) - self.assertEqual(set(self.Person.objects.distinct("age")), set([20, 30])) - self.assertEqual( - set(self.Person.objects(age=30).distinct("name")), - set(["Mr Orange", "Mr Pink"]), + assert set(self.Person.objects.distinct("age")) == set([20, 30]) + assert set(self.Person.objects(age=30).distinct("name")) == set( + ["Mr Orange", "Mr Pink"] ) def test_distinct_handles_references(self): @@ -3390,7 +3389,7 @@ class TestQueryset(unittest.TestCase): foo = Foo(bar=bar) foo.save() - self.assertEqual(Foo.objects.distinct("bar"), [bar]) + assert Foo.objects.distinct("bar") == [bar] def test_text_indexes(self): class News(Document): @@ -3410,8 +3409,8 @@ class TestQueryset(unittest.TestCase): News.drop_collection() info = News.objects._collection.index_information() - self.assertIn("title_text_content_text", info) - self.assertIn("textIndexVersion", info["title_text_content_text"]) + assert "title_text_content_text" in info + assert "textIndexVersion" in info["title_text_content_text"] News( title="Neymar quebrou a vertebra", @@ -3426,11 +3425,11 @@ class TestQueryset(unittest.TestCase): count = News.objects.search_text("neymar", language="portuguese").count() - self.assertEqual(count, 1) + assert count == 1 count = News.objects.search_text("brasil -neymar").count() - self.assertEqual(count, 1) + assert count == 1 News( title=u"As eleições no Brasil já estão em planejamento", @@ -3442,41 +3441,41 @@ class TestQueryset(unittest.TestCase): query = News.objects(is_active=False).search_text("dilma", language="pt")._query - self.assertEqual( - query, - {"$text": {"$search": "dilma", "$language": "pt"}, "is_active": False}, - ) + assert query == { + "$text": {"$search": "dilma", "$language": "pt"}, + "is_active": False, + } - self.assertFalse(new.is_active) - self.assertIn("dilma", new.content) - self.assertIn("planejamento", new.title) + assert not new.is_active + assert "dilma" in new.content + assert "planejamento" in new.title query = News.objects.search_text("candidata") - self.assertEqual(query._search_text, "candidata") + assert query._search_text == "candidata" new = query.first() - self.assertIsInstance(new.get_text_score(), float) + assert isinstance(new.get_text_score(), float) # count query = News.objects.search_text("brasil").order_by("$text_score") - self.assertEqual(query._search_text, "brasil") + assert query._search_text == "brasil" - self.assertEqual(query.count(), 3) - self.assertEqual(query._query, {"$text": {"$search": "brasil"}}) + assert query.count() == 3 + assert query._query == {"$text": {"$search": "brasil"}} cursor_args = query._cursor_args cursor_args_fields = cursor_args["projection"] - self.assertEqual(cursor_args_fields, {"_text_score": {"$meta": "textScore"}}) + assert cursor_args_fields == {"_text_score": {"$meta": "textScore"}} text_scores = [i.get_text_score() for i in query] - self.assertEqual(len(text_scores), 3) + assert len(text_scores) == 3 - self.assertTrue(text_scores[0] > text_scores[1]) - self.assertTrue(text_scores[1] > text_scores[2]) + assert text_scores[0] > text_scores[1] + assert text_scores[1] > text_scores[2] max_text_score = text_scores[0] # get item item = News.objects.search_text("brasil").order_by("$text_score").first() - self.assertEqual(item.get_text_score(), max_text_score) + assert item.get_text_score() == max_text_score def test_distinct_handles_references_to_alias(self): register_connection("testdb", "mongoenginetest2") @@ -3498,7 +3497,7 @@ class TestQueryset(unittest.TestCase): foo = Foo(bar=bar) foo.save() - self.assertEqual(Foo.objects.distinct("bar"), [bar]) + assert Foo.objects.distinct("bar") == [bar] def test_distinct_handles_db_field(self): """Ensure that distinct resolves field name to db_field as expected. @@ -3513,8 +3512,8 @@ class TestQueryset(unittest.TestCase): Product(product_id=2).save() Product(product_id=1).save() - self.assertEqual(set(Product.objects.distinct("product_id")), set([1, 2])) - self.assertEqual(set(Product.objects.distinct("pid")), set([1, 2])) + assert set(Product.objects.distinct("product_id")) == set([1, 2]) + assert set(Product.objects.distinct("pid")) == set([1, 2]) Product.drop_collection() @@ -3536,7 +3535,7 @@ class TestQueryset(unittest.TestCase): Book.objects.create(title="The Stories", authors=[mark_twain, john_tolkien]) authors = Book.objects.distinct("authors") - self.assertEqual(authors, [mark_twain, john_tolkien]) + assert authors == [mark_twain, john_tolkien] def test_distinct_ListField_EmbeddedDocumentField_EmbeddedDocumentField(self): class Continent(EmbeddedDocument): @@ -3570,10 +3569,10 @@ class TestQueryset(unittest.TestCase): Book.objects.create(title="The Stories", authors=[mark_twain, john_tolkien]) country_list = Book.objects.distinct("authors.country") - self.assertEqual(country_list, [scotland, tibet]) + assert country_list == [scotland, tibet] continent_list = Book.objects.distinct("authors.country.continent") - self.assertEqual(continent_list, [europe, asia]) + assert continent_list == [europe, asia] def test_distinct_ListField_ReferenceField(self): class Bar(Document): @@ -3595,7 +3594,7 @@ class TestQueryset(unittest.TestCase): foo = Foo(bar=bar_1, bar_lst=[bar_1, bar_2]) foo.save() - self.assertEqual(Foo.objects.distinct("bar_lst"), [bar_1, bar_2]) + assert Foo.objects.distinct("bar_lst") == [bar_1, bar_2] def test_custom_manager(self): """Ensure that custom QuerySetManager instances work as expected. @@ -3627,15 +3626,15 @@ class TestQueryset(unittest.TestCase): post3 = BlogPost(tags=["film", "actors"]).save() post4 = BlogPost(tags=["film", "actors", "music"], deleted=True).save() - self.assertEqual( - [p.id for p in BlogPost.objects()], [post1.id, post2.id, post3.id] - ) - self.assertEqual( - [p.id for p in BlogPost.objects_1_arg()], [post1.id, post2.id, post3.id] - ) - self.assertEqual([p.id for p in BlogPost.music_posts()], [post1.id, post2.id]) + assert [p.id for p in BlogPost.objects()] == [post1.id, post2.id, post3.id] + assert [p.id for p in BlogPost.objects_1_arg()] == [ + post1.id, + post2.id, + post3.id, + ] + assert [p.id for p in BlogPost.music_posts()] == [post1.id, post2.id] - self.assertEqual([p.id for p in BlogPost.music_posts(True)], [post4.id]) + assert [p.id for p in BlogPost.music_posts(True)] == [post4.id] BlogPost.drop_collection() @@ -3657,12 +3656,12 @@ class TestQueryset(unittest.TestCase): Foo(active=True).save() Foo(active=False).save() - self.assertEqual(1, Foo.objects.count()) - self.assertEqual(1, Foo.with_inactive.count()) + assert 1 == Foo.objects.count() + assert 1 == Foo.with_inactive.count() Foo.with_inactive.first().delete() - self.assertEqual(0, Foo.with_inactive.count()) - self.assertEqual(1, Foo.objects.count()) + assert 0 == Foo.with_inactive.count() + assert 1 == Foo.objects.count() def test_inherit_objects(self): class Foo(Document): @@ -3678,7 +3677,7 @@ class TestQueryset(unittest.TestCase): Bar.drop_collection() Bar.objects.create(active=False) - self.assertEqual(0, Bar.objects.count()) + assert 0 == Bar.objects.count() def test_inherit_objects_override(self): class Foo(Document): @@ -3696,8 +3695,8 @@ class TestQueryset(unittest.TestCase): Bar.drop_collection() Bar.objects.create(active=False) - self.assertEqual(0, Foo.objects.count()) - self.assertEqual(1, Bar.objects.count()) + assert 0 == Foo.objects.count() + assert 1 == Bar.objects.count() def test_query_value_conversion(self): """Ensure that query values are properly converted when necessary. @@ -3718,11 +3717,11 @@ class TestQueryset(unittest.TestCase): # while using a ReferenceField's name - the document should be # converted to an DBRef, which is legal, unlike a Document object post_obj = BlogPost.objects(author=person).first() - self.assertEqual(post.id, post_obj.id) + assert post.id == post_obj.id # Test that lists of values work when using the 'in', 'nin' and 'all' post_obj = BlogPost.objects(author__in=[person]).first() - self.assertEqual(post.id, post_obj.id) + assert post.id == post_obj.id BlogPost.drop_collection() @@ -3746,9 +3745,9 @@ class TestQueryset(unittest.TestCase): Group.objects(id=group.id).update(set__members=[user1, user2]) group.reload() - self.assertEqual(len(group.members), 2) - self.assertEqual(group.members[0].name, user1.name) - self.assertEqual(group.members[1].name, user2.name) + assert len(group.members) == 2 + assert group.members[0].name == user1.name + assert group.members[1].name == user2.name Group.drop_collection() @@ -3776,15 +3775,15 @@ class TestQueryset(unittest.TestCase): ids = [post_1.id, post_2.id, post_5.id] objects = BlogPost.objects.in_bulk(ids) - self.assertEqual(len(objects), 3) + assert len(objects) == 3 - self.assertIn(post_1.id, objects) - self.assertIn(post_2.id, objects) - self.assertIn(post_5.id, objects) + assert post_1.id in objects + assert post_2.id in objects + assert post_5.id in objects - self.assertEqual(objects[post_1.id].title, post_1.title) - self.assertEqual(objects[post_2.id].title, post_2.title) - self.assertEqual(objects[post_5.id].title, post_5.title) + assert objects[post_1.id].title == post_1.title + assert objects[post_2.id].title == post_2.title + assert objects[post_5.id].title == post_5.title BlogPost.drop_collection() @@ -3804,11 +3803,11 @@ class TestQueryset(unittest.TestCase): Post.drop_collection() - self.assertIsInstance(Post.objects, CustomQuerySet) - self.assertFalse(Post.objects.not_empty()) + assert isinstance(Post.objects, CustomQuerySet) + assert not Post.objects.not_empty() Post().save() - self.assertTrue(Post.objects.not_empty()) + assert Post.objects.not_empty() Post.drop_collection() @@ -3828,11 +3827,11 @@ class TestQueryset(unittest.TestCase): Post.drop_collection() - self.assertIsInstance(Post.objects, CustomQuerySet) - self.assertFalse(Post.objects.not_empty()) + assert isinstance(Post.objects, CustomQuerySet) + assert not Post.objects.not_empty() Post().save() - self.assertTrue(Post.objects.not_empty()) + assert Post.objects.not_empty() Post.drop_collection() @@ -3853,8 +3852,8 @@ class TestQueryset(unittest.TestCase): Post().save() Post(is_published=True).save() - self.assertEqual(Post.objects.count(), 2) - self.assertEqual(Post.published.count(), 1) + assert Post.objects.count() == 2 + assert Post.published.count() == 1 Post.drop_collection() @@ -3873,11 +3872,11 @@ class TestQueryset(unittest.TestCase): pass Post.drop_collection() - self.assertIsInstance(Post.objects, CustomQuerySet) - self.assertFalse(Post.objects.not_empty()) + assert isinstance(Post.objects, CustomQuerySet) + assert not Post.objects.not_empty() Post().save() - self.assertTrue(Post.objects.not_empty()) + assert Post.objects.not_empty() Post.drop_collection() @@ -3900,11 +3899,11 @@ class TestQueryset(unittest.TestCase): pass Post.drop_collection() - self.assertIsInstance(Post.objects, CustomQuerySet) - self.assertFalse(Post.objects.not_empty()) + assert isinstance(Post.objects, CustomQuerySet) + assert not Post.objects.not_empty() Post().save() - self.assertTrue(Post.objects.not_empty()) + assert Post.objects.not_empty() Post.drop_collection() @@ -3917,13 +3916,9 @@ class TestQueryset(unittest.TestCase): for i in range(10): Post(title="Post %s" % i).save() - self.assertEqual( - 5, Post.objects.limit(5).skip(5).count(with_limit_and_skip=True) - ) + assert 5 == Post.objects.limit(5).skip(5).count(with_limit_and_skip=True) - self.assertEqual( - 10, Post.objects.limit(5).skip(5).count(with_limit_and_skip=False) - ) + assert 10 == Post.objects.limit(5).skip(5).count(with_limit_and_skip=False) def test_count_and_none(self): """Test count works with None()""" @@ -3935,8 +3930,8 @@ class TestQueryset(unittest.TestCase): for i in range(0, 10): MyDoc().save() - self.assertEqual(MyDoc.objects.count(), 10) - self.assertEqual(MyDoc.objects.none().count(), 0) + assert MyDoc.objects.count() == 10 + assert MyDoc.objects.none().count() == 0 def test_count_list_embedded(self): class B(EmbeddedDocument): @@ -3945,7 +3940,7 @@ class TestQueryset(unittest.TestCase): class A(Document): b = ListField(EmbeddedDocumentField(B)) - self.assertEqual(A.objects(b=[{"c": "c"}]).count(), 0) + assert A.objects(b=[{"c": "c"}]).count() == 0 def test_call_after_limits_set(self): """Ensure that re-filtering after slicing works @@ -3960,7 +3955,7 @@ class TestQueryset(unittest.TestCase): Post(title="Post 2").save() posts = Post.objects.all()[0:1] - self.assertEqual(len(list(posts())), 1) + assert len(list(posts())) == 1 Post.drop_collection() @@ -3976,9 +3971,9 @@ class TestQueryset(unittest.TestCase): n2 = Number.objects.create(n=2) n1 = Number.objects.create(n=1) - self.assertEqual(list(Number.objects), [n2, n1]) - self.assertEqual(list(Number.objects.order_by("n")), [n1, n2]) - self.assertEqual(list(Number.objects.order_by("n").filter()), [n1, n2]) + assert list(Number.objects) == [n2, n1] + assert list(Number.objects.order_by("n")) == [n1, n2] + assert list(Number.objects.order_by("n").filter()) == [n1, n2] Number.drop_collection() @@ -3997,18 +3992,18 @@ class TestQueryset(unittest.TestCase): test = Number.objects test2 = test.clone() - self.assertNotEqual(test, test2) - self.assertEqual(test.count(), test2.count()) + assert test != test2 + assert test.count() == test2.count() test = test.filter(n__gt=11) test2 = test.clone() - self.assertNotEqual(test, test2) - self.assertEqual(test.count(), test2.count()) + assert test != test2 + assert test.count() == test2.count() test = test.limit(10) test2 = test.clone() - self.assertNotEqual(test, test2) - self.assertEqual(test.count(), test2.count()) + assert test != test2 + assert test.count() == test2.count() Number.drop_collection() @@ -4028,7 +4023,7 @@ class TestQueryset(unittest.TestCase): t.switch_db("test2") t.save() - self.assertEqual(len(Number2.objects.using("test2")), 9) + assert len(Number2.objects.using("test2")) == 9 def test_unset_reference(self): class Comment(Document): @@ -4043,10 +4038,10 @@ class TestQueryset(unittest.TestCase): comment = Comment.objects.create(text="test") post = Post.objects.create(comment=comment) - self.assertEqual(post.comment, comment) + assert post.comment == comment Post.objects.update(unset__comment=1) post.reload() - self.assertEqual(post.comment, None) + assert post.comment == None Comment.drop_collection() Post.drop_collection() @@ -4060,8 +4055,8 @@ class TestQueryset(unittest.TestCase): n2 = Number.objects.create(n=2) n1 = Number.objects.create(n=1) - self.assertEqual(list(Number.objects), [n2, n1]) - self.assertEqual(list(Number.objects.order_by("n")), [n1, n2]) + assert list(Number.objects) == [n2, n1] + assert list(Number.objects.order_by("n")) == [n1, n2] Number.drop_collection() @@ -4079,10 +4074,10 @@ class TestQueryset(unittest.TestCase): Number(n=3).save() numbers = [n.n for n in Number.objects.order_by("-n")] - self.assertEqual([3, 2, 1], numbers) + assert [3, 2, 1] == numbers numbers = [n.n for n in Number.objects.order_by("+n")] - self.assertEqual([1, 2, 3], numbers) + assert [1, 2, 3] == numbers Number.drop_collection() def test_ensure_index(self): @@ -4100,7 +4095,7 @@ class TestQueryset(unittest.TestCase): (value["key"], value.get("unique", False), value.get("sparse", False)) for key, value in iteritems(info) ] - self.assertIn(([("_cls", 1), ("message", 1)], False, False), info) + assert ([("_cls", 1), ("message", 1)], False, False) in info def test_where(self): """Ensure that where clauses work. @@ -4120,30 +4115,30 @@ class TestQueryset(unittest.TestCase): c.save() query = IntPair.objects.where("this[~fielda] >= this[~fieldb]") - self.assertEqual('this["fielda"] >= this["fieldb"]', query._where_clause) + assert 'this["fielda"] >= this["fieldb"]' == query._where_clause results = list(query) - self.assertEqual(2, len(results)) - self.assertIn(a, results) - self.assertIn(c, results) + assert 2 == len(results) + assert a in results + assert c in results query = IntPair.objects.where("this[~fielda] == this[~fieldb]") results = list(query) - self.assertEqual(1, len(results)) - self.assertIn(a, results) + assert 1 == len(results) + assert a in results query = IntPair.objects.where( "function() { return this[~fielda] >= this[~fieldb] }" ) - self.assertEqual( - 'function() { return this["fielda"] >= this["fieldb"] }', - query._where_clause, + assert ( + 'function() { return this["fielda"] >= this["fieldb"] }' + == query._where_clause ) results = list(query) - self.assertEqual(2, len(results)) - self.assertIn(a, results) - self.assertIn(c, results) + assert 2 == len(results) + assert a in results + assert c in results - with self.assertRaises(TypeError): + with pytest.raises(TypeError): list(IntPair.objects.where(fielda__gte=3)) def test_scalar(self): @@ -4165,13 +4160,13 @@ class TestQueryset(unittest.TestCase): # set of users (Pretend this has additional filtering.) user_orgs = set(User.objects.scalar("organization")) orgs = Organization.objects(id__in=user_orgs).scalar("name") - self.assertEqual(list(orgs), ["White House"]) + assert list(orgs) == ["White House"] # Efficient for generating listings, too. orgs = Organization.objects.scalar("name").in_bulk(list(user_orgs)) user_map = User.objects.scalar("name", "organization") user_listing = [(user, orgs[org]) for user, org in user_map] - self.assertEqual([("Bob Dole", "White House")], user_listing) + assert [("Bob Dole", "White House")] == user_listing def test_scalar_simple(self): class TestDoc(Document): @@ -4186,10 +4181,10 @@ class TestQueryset(unittest.TestCase): plist = list(TestDoc.objects.scalar("x", "y")) - self.assertEqual(len(plist), 3) - self.assertEqual(plist[0], (10, True)) - self.assertEqual(plist[1], (20, False)) - self.assertEqual(plist[2], (30, True)) + assert len(plist) == 3 + assert plist[0] == (10, True) + assert plist[1] == (20, False) + assert plist[2] == (30, True) class UserDoc(Document): name = StringField() @@ -4204,14 +4199,16 @@ class TestQueryset(unittest.TestCase): ulist = list(UserDoc.objects.scalar("name", "age")) - self.assertEqual( - ulist, - [(u"Wilson Jr", 19), (u"Wilson", 43), (u"Eliana", 37), (u"Tayza", 15)], - ) + assert ulist == [ + (u"Wilson Jr", 19), + (u"Wilson", 43), + (u"Eliana", 37), + (u"Tayza", 15), + ] ulist = list(UserDoc.objects.scalar("name").order_by("age")) - self.assertEqual(ulist, [(u"Tayza"), (u"Wilson Jr"), (u"Eliana"), (u"Wilson")]) + assert ulist == [(u"Tayza"), (u"Wilson Jr"), (u"Eliana"), (u"Wilson")] def test_scalar_embedded(self): class Profile(EmbeddedDocument): @@ -4248,25 +4245,21 @@ class TestQueryset(unittest.TestCase): locale=Locale(city="Brasilia", country="Brazil"), ).save() - self.assertEqual( - list(Person.objects.order_by("profile__age").scalar("profile__name")), - [u"Wilson Jr", u"Gabriel Falcao", u"Lincoln de souza", u"Walter cruz"], - ) + assert list( + Person.objects.order_by("profile__age").scalar("profile__name") + ) == [u"Wilson Jr", u"Gabriel Falcao", u"Lincoln de souza", u"Walter cruz"] ulist = list( Person.objects.order_by("locale.city").scalar( "profile__name", "profile__age", "locale__city" ) ) - self.assertEqual( - ulist, - [ - (u"Lincoln de souza", 28, u"Belo Horizonte"), - (u"Walter cruz", 30, u"Brasilia"), - (u"Wilson Jr", 19, u"Corumba-GO"), - (u"Gabriel Falcao", 23, u"New York"), - ], - ) + assert ulist == [ + (u"Lincoln de souza", 28, u"Belo Horizonte"), + (u"Walter cruz", 30, u"Brasilia"), + (u"Wilson Jr", 19, u"Corumba-GO"), + (u"Gabriel Falcao", 23, u"New York"), + ] def test_scalar_decimal(self): from decimal import Decimal @@ -4279,7 +4272,7 @@ class TestQueryset(unittest.TestCase): Person(name="Wilson Jr", rating=Decimal("1.0")).save() ulist = list(Person.objects.scalar("name", "rating")) - self.assertEqual(ulist, [(u"Wilson Jr", Decimal("1.0"))]) + assert ulist == [(u"Wilson Jr", Decimal("1.0"))] def test_scalar_reference_field(self): class State(Document): @@ -4298,7 +4291,7 @@ class TestQueryset(unittest.TestCase): Person(name="Wilson JR", state=s1).save() plist = list(Person.objects.scalar("name", "state")) - self.assertEqual(plist, [(u"Wilson JR", s1)]) + assert plist == [(u"Wilson JR", s1)] def test_scalar_generic_reference_field(self): class State(Document): @@ -4317,7 +4310,7 @@ class TestQueryset(unittest.TestCase): Person(name="Wilson JR", state=s1).save() plist = list(Person.objects.scalar("name", "state")) - self.assertEqual(plist, [(u"Wilson JR", s1)]) + assert plist == [(u"Wilson JR", s1)] def test_generic_reference_field_with_only_and_as_pymongo(self): class TestPerson(Document): @@ -4342,18 +4335,18 @@ class TestQueryset(unittest.TestCase): .no_dereference() .first() ) - self.assertEqual(activity[0], a1.pk) - self.assertEqual(activity[1]["_ref"], DBRef("test_person", person.pk)) + assert activity[0] == a1.pk + assert activity[1]["_ref"] == DBRef("test_person", person.pk) activity = TestActivity.objects(owner=person).only("id", "owner")[0] - self.assertEqual(activity.pk, a1.pk) - self.assertEqual(activity.owner, person) + assert activity.pk == a1.pk + assert activity.owner == person activity = ( TestActivity.objects(owner=person).only("id", "owner").as_pymongo().first() ) - self.assertEqual(activity["_id"], a1.pk) - self.assertTrue(activity["owner"]["_ref"], DBRef("test_person", person.pk)) + assert activity["_id"] == a1.pk + assert activity["owner"]["_ref"], DBRef("test_person", person.pk) def test_scalar_db_field(self): class TestDoc(Document): @@ -4367,10 +4360,10 @@ class TestQueryset(unittest.TestCase): TestDoc(x=30, y=True).save() plist = list(TestDoc.objects.scalar("x", "y")) - self.assertEqual(len(plist), 3) - self.assertEqual(plist[0], (10, True)) - self.assertEqual(plist[1], (20, False)) - self.assertEqual(plist[2], (30, True)) + assert len(plist) == 3 + assert plist[0] == (10, True) + assert plist[1] == (20, False) + assert plist[2] == (30, True) def test_scalar_primary_key(self): class SettingValue(Document): @@ -4382,7 +4375,7 @@ class TestQueryset(unittest.TestCase): s.save() val = SettingValue.objects.scalar("key", "value") - self.assertEqual(list(val), [("test", "test value")]) + assert list(val) == [("test", "test value")] def test_scalar_cursor_behaviour(self): """Ensure that a query returns a valid set of results. @@ -4394,90 +4387,86 @@ class TestQueryset(unittest.TestCase): # Find all people in the collection people = self.Person.objects.scalar("name") - self.assertEqual(people.count(), 2) + assert people.count() == 2 results = list(people) - self.assertEqual(results[0], "User A") - self.assertEqual(results[1], "User B") + assert results[0] == "User A" + assert results[1] == "User B" # Use a query to filter the people found to just person1 people = self.Person.objects(age=20).scalar("name") - self.assertEqual(people.count(), 1) + assert people.count() == 1 person = people.next() - self.assertEqual(person, "User A") + assert person == "User A" # Test limit people = list(self.Person.objects.limit(1).scalar("name")) - self.assertEqual(len(people), 1) - self.assertEqual(people[0], "User A") + assert len(people) == 1 + assert people[0] == "User A" # Test skip people = list(self.Person.objects.skip(1).scalar("name")) - self.assertEqual(len(people), 1) - self.assertEqual(people[0], "User B") + assert len(people) == 1 + assert people[0] == "User B" person3 = self.Person(name="User C", age=40) person3.save() # Test slice limit people = list(self.Person.objects[:2].scalar("name")) - self.assertEqual(len(people), 2) - self.assertEqual(people[0], "User A") - self.assertEqual(people[1], "User B") + assert len(people) == 2 + assert people[0] == "User A" + assert people[1] == "User B" # Test slice skip people = list(self.Person.objects[1:].scalar("name")) - self.assertEqual(len(people), 2) - self.assertEqual(people[0], "User B") - self.assertEqual(people[1], "User C") + assert len(people) == 2 + assert people[0] == "User B" + assert people[1] == "User C" # Test slice limit and skip people = list(self.Person.objects[1:2].scalar("name")) - self.assertEqual(len(people), 1) - self.assertEqual(people[0], "User B") + assert len(people) == 1 + assert people[0] == "User B" people = list(self.Person.objects[1:1].scalar("name")) - self.assertEqual(len(people), 0) + assert len(people) == 0 # Test slice out of range people = list(self.Person.objects.scalar("name")[80000:80001]) - self.assertEqual(len(people), 0) + assert len(people) == 0 # Test larger slice __repr__ self.Person.objects.delete() for i in range(55): self.Person(name="A%s" % i, age=i).save() - self.assertEqual(self.Person.objects.scalar("name").count(), 55) - self.assertEqual( - "A0", "%s" % self.Person.objects.order_by("name").scalar("name").first() - ) - self.assertEqual( - "A0", "%s" % self.Person.objects.scalar("name").order_by("name")[0] + assert self.Person.objects.scalar("name").count() == 55 + assert ( + "A0" == "%s" % self.Person.objects.order_by("name").scalar("name").first() ) + assert "A0" == "%s" % self.Person.objects.scalar("name").order_by("name")[0] if six.PY3: - self.assertEqual( - "['A1', 'A2']", - "%s" % self.Person.objects.order_by("age").scalar("name")[1:3], + assert ( + "['A1', 'A2']" + == "%s" % self.Person.objects.order_by("age").scalar("name")[1:3] ) - self.assertEqual( - "['A51', 'A52']", - "%s" % self.Person.objects.order_by("age").scalar("name")[51:53], + assert ( + "['A51', 'A52']" + == "%s" % self.Person.objects.order_by("age").scalar("name")[51:53] ) else: - self.assertEqual( - "[u'A1', u'A2']", - "%s" % self.Person.objects.order_by("age").scalar("name")[1:3], + assert ( + "[u'A1', u'A2']" + == "%s" % self.Person.objects.order_by("age").scalar("name")[1:3] ) - self.assertEqual( - "[u'A51', u'A52']", - "%s" % self.Person.objects.order_by("age").scalar("name")[51:53], + assert ( + "[u'A51', u'A52']" + == "%s" % self.Person.objects.order_by("age").scalar("name")[51:53] ) # with_id and in_bulk person = self.Person.objects.order_by("name").first() - self.assertEqual( - "A0", "%s" % self.Person.objects.scalar("name").with_id(person.id) - ) + assert "A0" == "%s" % self.Person.objects.scalar("name").with_id(person.id) pks = self.Person.objects.order_by("age").scalar("pk")[1:3] names = self.Person.objects.scalar("name").in_bulk(list(pks)).values() @@ -4485,7 +4474,7 @@ class TestQueryset(unittest.TestCase): expected = "['A1', 'A2']" else: expected = "[u'A1', u'A2']" - self.assertEqual(expected, "%s" % sorted(names)) + assert expected == "%s" % sorted(names) def test_elem_match(self): class Foo(EmbeddedDocument): @@ -4525,29 +4514,29 @@ class TestQueryset(unittest.TestCase): b3.save() ak = list(Bar.objects(foo__match={"shape": "square", "color": "purple"})) - self.assertEqual([b1], ak) + assert [b1] == ak ak = list(Bar.objects(foo__elemMatch={"shape": "square", "color": "purple"})) - self.assertEqual([b1], ak) + assert [b1] == ak ak = list(Bar.objects(foo__match=Foo(shape="square", color="purple"))) - self.assertEqual([b1], ak) + assert [b1] == ak ak = list( Bar.objects(foo__elemMatch={"shape": "square", "color__exists": True}) ) - self.assertEqual([b1, b2], ak) + assert [b1, b2] == ak ak = list(Bar.objects(foo__match={"shape": "square", "color__exists": True})) - self.assertEqual([b1, b2], ak) + assert [b1, b2] == ak ak = list( Bar.objects(foo__elemMatch={"shape": "square", "color__exists": False}) ) - self.assertEqual([b3], ak) + assert [b3] == ak ak = list(Bar.objects(foo__match={"shape": "square", "color__exists": False})) - self.assertEqual([b3], ak) + assert [b3] == ak def test_upsert_includes_cls(self): """Upserts should include _cls information for inheritable classes @@ -4558,7 +4547,7 @@ class TestQueryset(unittest.TestCase): Test.drop_collection() Test.objects(test="foo").update_one(upsert=True, set__test="foo") - self.assertNotIn("_cls", Test._collection.find_one()) + assert "_cls" not in Test._collection.find_one() class Test(Document): meta = {"allow_inheritance": True} @@ -4567,15 +4556,15 @@ class TestQueryset(unittest.TestCase): Test.drop_collection() Test.objects(test="foo").update_one(upsert=True, set__test="foo") - self.assertIn("_cls", Test._collection.find_one()) + assert "_cls" in Test._collection.find_one() def test_update_upsert_looks_like_a_digit(self): class MyDoc(DynamicDocument): pass MyDoc.drop_collection() - self.assertEqual(1, MyDoc.objects.update_one(upsert=True, inc__47=1)) - self.assertEqual(MyDoc.objects.get()["47"], 1) + assert 1 == MyDoc.objects.update_one(upsert=True, inc__47=1) + assert MyDoc.objects.get()["47"] == 1 def test_dictfield_key_looks_like_a_digit(self): """Only should work with DictField even if they have numeric keys.""" @@ -4586,7 +4575,7 @@ class TestQueryset(unittest.TestCase): MyDoc.drop_collection() doc = MyDoc(test={"47": 1}) doc.save() - self.assertEqual(MyDoc.objects.only("test__47").get().test["47"], 1) + assert MyDoc.objects.only("test__47").get().test["47"] == 1 def test_clear_cls_query(self): class Parent(Document): @@ -4599,32 +4588,28 @@ class TestQueryset(unittest.TestCase): Parent.drop_collection() # Default query includes the "_cls" check. - self.assertEqual( - Parent.objects._query, {"_cls": {"$in": ("Parent", "Parent.Child")}} - ) + assert Parent.objects._query == {"_cls": {"$in": ("Parent", "Parent.Child")}} # Clearing the "_cls" query should work. - self.assertEqual(Parent.objects.clear_cls_query()._query, {}) + assert Parent.objects.clear_cls_query()._query == {} # Clearing the "_cls" query should not persist across queryset instances. - self.assertEqual( - Parent.objects._query, {"_cls": {"$in": ("Parent", "Parent.Child")}} - ) + assert Parent.objects._query == {"_cls": {"$in": ("Parent", "Parent.Child")}} # The rest of the query should not be cleared. - self.assertEqual( - Parent.objects.filter(name="xyz").clear_cls_query()._query, {"name": "xyz"} - ) + assert Parent.objects.filter(name="xyz").clear_cls_query()._query == { + "name": "xyz" + } Parent.objects.create(name="foo") Child.objects.create(name="bar", age=1) - self.assertEqual(Parent.objects.clear_cls_query().count(), 2) - self.assertEqual(Parent.objects.count(), 2) - self.assertEqual(Child.objects().count(), 1) + assert Parent.objects.clear_cls_query().count() == 2 + assert Parent.objects.count() == 2 + assert Child.objects().count() == 1 # XXX This isn't really how you'd want to use `clear_cls_query()`, but # it's a decent test to validate its behavior nonetheless. - self.assertEqual(Child.objects.clear_cls_query().count(), 2) + assert Child.objects.clear_cls_query().count() == 2 def test_read_preference(self): class Bar(Document): @@ -4636,20 +4621,21 @@ class TestQueryset(unittest.TestCase): bar = Bar.objects.create(txt="xyz") bars = list(Bar.objects.read_preference(ReadPreference.PRIMARY)) - self.assertEqual(bars, [bar]) + assert bars == [bar] bars = Bar.objects.read_preference(ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, ReadPreference.SECONDARY_PREFERRED) - self.assertEqual( - bars._cursor.collection.read_preference, ReadPreference.SECONDARY_PREFERRED + assert bars._read_preference == ReadPreference.SECONDARY_PREFERRED + assert ( + bars._cursor.collection.read_preference == ReadPreference.SECONDARY_PREFERRED ) # Make sure that `.read_preference(...)` does accept string values. - self.assertRaises(TypeError, Bar.objects.read_preference, "Primary") + with pytest.raises(TypeError): + Bar.objects.read_preference("Primary") def assert_read_pref(qs, expected_read_pref): - self.assertEqual(qs._read_preference, expected_read_pref) - self.assertEqual(qs._cursor.collection.read_preference, expected_read_pref) + assert qs._read_preference == expected_read_pref + assert qs._cursor.collection.read_preference == expected_read_pref # Make sure read preference is respected after a `.skip(...)`. bars = Bar.objects.skip(1).read_preference(ReadPreference.SECONDARY_PREFERRED) @@ -4681,9 +4667,9 @@ class TestQueryset(unittest.TestCase): bars = Bar.objects.read_preference( ReadPreference.SECONDARY_PREFERRED ).aggregate() - self.assertEqual( - bars._CommandCursor__collection.read_preference, - ReadPreference.SECONDARY_PREFERRED, + assert ( + bars._CommandCursor__collection.read_preference + == ReadPreference.SECONDARY_PREFERRED ) def test_json_simple(self): @@ -4702,7 +4688,7 @@ class TestQueryset(unittest.TestCase): json_data = Doc.objects.to_json(sort_keys=True, separators=(",", ":")) doc_objects = list(Doc.objects) - self.assertEqual(doc_objects, Doc.objects.from_json(json_data)) + assert doc_objects == Doc.objects.from_json(json_data) def test_json_complex(self): class EmbeddedDoc(EmbeddedDocument): @@ -4748,7 +4734,7 @@ class TestQueryset(unittest.TestCase): json_data = Doc.objects.to_json() doc_objects = list(Doc.objects) - self.assertEqual(doc_objects, Doc.objects.from_json(json_data)) + assert doc_objects == Doc.objects.from_json(json_data) def test_as_pymongo(self): class LastLogin(EmbeddedDocument): @@ -4774,36 +4760,33 @@ class TestQueryset(unittest.TestCase): ) results = User.objects.as_pymongo() - self.assertEqual(set(results[0].keys()), set(["_id", "name", "age", "price"])) - self.assertEqual( - set(results[1].keys()), set(["_id", "name", "age", "price", "last_login"]) + assert set(results[0].keys()) == set(["_id", "name", "age", "price"]) + assert set(results[1].keys()) == set( + ["_id", "name", "age", "price", "last_login"] ) results = User.objects.only("id", "name").as_pymongo() - self.assertEqual(set(results[0].keys()), set(["_id", "name"])) + assert set(results[0].keys()) == set(["_id", "name"]) users = User.objects.only("name", "price").as_pymongo() results = list(users) - self.assertIsInstance(results[0], dict) - self.assertIsInstance(results[1], dict) - self.assertEqual(results[0]["name"], "Bob Dole") - self.assertEqual(results[0]["price"], 1.11) - self.assertEqual(results[1]["name"], "Barak Obama") - self.assertEqual(results[1]["price"], 2.22) + assert isinstance(results[0], dict) + assert isinstance(results[1], dict) + assert results[0]["name"] == "Bob Dole" + assert results[0]["price"] == 1.11 + assert results[1]["name"] == "Barak Obama" + assert results[1]["price"] == 2.22 users = User.objects.only("name", "last_login").as_pymongo() results = list(users) - self.assertIsInstance(results[0], dict) - self.assertIsInstance(results[1], dict) - self.assertEqual(results[0], {"_id": "Bob", "name": "Bob Dole"}) - self.assertEqual( - results[1], - { - "_id": "Barak", - "name": "Barak Obama", - "last_login": {"location": "White House", "ip": "104.107.108.116"}, - }, - ) + assert isinstance(results[0], dict) + assert isinstance(results[1], dict) + assert results[0] == {"_id": "Bob", "name": "Bob Dole"} + assert results[1] == { + "_id": "Barak", + "name": "Barak Obama", + "last_login": {"location": "White House", "ip": "104.107.108.116"}, + } def test_as_pymongo_returns_cls_attribute_when_using_inheritance(self): class User(Document): @@ -4814,7 +4797,7 @@ class TestQueryset(unittest.TestCase): user = User(name="Bob Dole").save() result = User.objects.as_pymongo().first() - self.assertEqual(result, {"_cls": "User", "_id": user.id, "name": "Bob Dole"}) + assert result == {"_cls": "User", "_id": user.id, "name": "Bob Dole"} def test_as_pymongo_json_limit_fields(self): class User(Document): @@ -4830,30 +4813,30 @@ class TestQueryset(unittest.TestCase): serialized_user = User.objects.exclude( "password_salt", "password_hash" ).as_pymongo()[0] - self.assertEqual({"_id", "email"}, set(serialized_user.keys())) + assert {"_id", "email"} == set(serialized_user.keys()) serialized_user = User.objects.exclude( "id", "password_salt", "password_hash" ).to_json() - self.assertEqual('[{"email": "ross@example.com"}]', serialized_user) + assert '[{"email": "ross@example.com"}]' == serialized_user serialized_user = User.objects.only("email").as_pymongo()[0] - self.assertEqual({"_id", "email"}, set(serialized_user.keys())) + assert {"_id", "email"} == set(serialized_user.keys()) serialized_user = ( User.objects.exclude("password_salt").only("email").as_pymongo()[0] ) - self.assertEqual({"_id", "email"}, set(serialized_user.keys())) + assert {"_id", "email"} == set(serialized_user.keys()) serialized_user = ( User.objects.exclude("password_salt", "id").only("email").as_pymongo()[0] ) - self.assertEqual({"email"}, set(serialized_user.keys())) + assert {"email"} == set(serialized_user.keys()) serialized_user = ( User.objects.exclude("password_salt", "id").only("email").to_json() ) - self.assertEqual('[{"email": "ross@example.com"}]', serialized_user) + assert '[{"email": "ross@example.com"}]' == serialized_user def test_only_after_count(self): """Test that only() works after count()""" @@ -4869,13 +4852,13 @@ class TestQueryset(unittest.TestCase): user_queryset = User.objects(age=50) result = user_queryset.only("name", "age").as_pymongo().first() - self.assertEqual(result, {"_id": user.id, "name": "User", "age": 50}) + assert result == {"_id": user.id, "name": "User", "age": 50} result = user_queryset.count() - self.assertEqual(result, 1) + assert result == 1 result = user_queryset.only("name", "age").as_pymongo().first() - self.assertEqual(result, {"_id": user.id, "name": "User", "age": 50}) + assert result == {"_id": user.id, "name": "User", "age": 50} def test_no_dereference(self): class Organization(Document): @@ -4894,12 +4877,12 @@ class TestQueryset(unittest.TestCase): qs = User.objects() qs_user = qs.first() - self.assertIsInstance(qs.first().organization, Organization) + assert isinstance(qs.first().organization, Organization) - self.assertIsInstance(qs.no_dereference().first().organization, DBRef) + assert isinstance(qs.no_dereference().first().organization, DBRef) - self.assertIsInstance(qs_user.organization, Organization) - self.assertIsInstance(qs.first().organization, Organization) + assert isinstance(qs_user.organization, Organization) + assert isinstance(qs.first().organization, Organization) def test_no_dereference_internals(self): # Test the internals on which queryset.no_dereference relies on @@ -4913,24 +4896,24 @@ class TestQueryset(unittest.TestCase): Organization.drop_collection() cls_organization_field = User.organization - self.assertTrue(cls_organization_field._auto_dereference, True) # default + assert cls_organization_field._auto_dereference, True # default org = Organization(name="whatever").save() User(organization=org).save() qs_no_deref = User.objects().no_dereference() user_no_deref = qs_no_deref.first() - self.assertFalse(qs_no_deref._auto_dereference) + assert not qs_no_deref._auto_dereference # Make sure the instance field is different from the class field instance_org_field = user_no_deref._fields["organization"] - self.assertIsNot(instance_org_field, cls_organization_field) - self.assertFalse(instance_org_field._auto_dereference) + assert instance_org_field is not cls_organization_field + assert not instance_org_field._auto_dereference - self.assertIsInstance(user_no_deref.organization, DBRef) - self.assertTrue( - cls_organization_field._auto_dereference, True - ) # Make sure the class Field wasn't altered + assert isinstance(user_no_deref.organization, DBRef) + assert ( + cls_organization_field._auto_dereference + ), True # Make sure the class Field wasn't altered def test_no_dereference_no_side_effect_on_existing_instance(self): # Relates to issue #1677 - ensures no regression of the bug @@ -4956,13 +4939,13 @@ class TestQueryset(unittest.TestCase): # ReferenceField no_derf_org = user_no_deref.organization # was triggering the bug - self.assertIsInstance(no_derf_org, DBRef) - self.assertIsInstance(user.organization, Organization) + assert isinstance(no_derf_org, DBRef) + assert isinstance(user.organization, Organization) # GenericReferenceField no_derf_org_gen = user_no_deref.organization_gen - self.assertIsInstance(no_derf_org_gen, dict) - self.assertIsInstance(user.organization_gen, Organization) + assert isinstance(no_derf_org_gen, dict) + assert isinstance(user.organization_gen, Organization) def test_no_dereference_embedded_doc(self): class User(Document): @@ -4994,13 +4977,13 @@ class TestQueryset(unittest.TestCase): org = Organization.objects().no_dereference().first() - self.assertNotEqual(id(org._fields["admins"]), id(Organization.admins)) - self.assertFalse(org._fields["admins"]._auto_dereference) + assert id(org._fields["admins"]) != id(Organization.admins) + assert not org._fields["admins"]._auto_dereference admin = org.admins[0] - self.assertIsInstance(admin, DBRef) - self.assertIsInstance(org.member.user, DBRef) - self.assertIsInstance(org.members[0].user, DBRef) + assert isinstance(admin, DBRef) + assert isinstance(org.member.user, DBRef) + assert isinstance(org.members[0].user, DBRef) def test_cached_queryset(self): class Person(Document): @@ -5011,11 +4994,11 @@ class TestQueryset(unittest.TestCase): Person(name="No: %s" % i).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 people = Person.objects [x for x in people] - self.assertEqual(100, len(people._result_cache)) + assert 100 == len(people._result_cache) import platform @@ -5023,15 +5006,15 @@ class TestQueryset(unittest.TestCase): # PyPy evaluates __len__ when iterating with list comprehensions while CPython does not. # This may be a bug in PyPy (PyPy/#1802) but it does not affect # the behavior of MongoEngine. - self.assertEqual(None, people._len) - self.assertEqual(q, 1) + assert None == people._len + assert q == 1 list(people) - self.assertEqual(100, people._len) # Caused by list calling len - self.assertEqual(q, 1) + assert 100 == people._len # Caused by list calling len + assert q == 1 people.count(with_limit_and_skip=True) # count is cached - self.assertEqual(q, 1) + assert q == 1 def test_no_cached_queryset(self): class Person(Document): @@ -5042,17 +5025,17 @@ class TestQueryset(unittest.TestCase): Person(name="No: %s" % i).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 people = Person.objects.no_cache() [x for x in people] - self.assertEqual(q, 1) + assert q == 1 list(people) - self.assertEqual(q, 2) + assert q == 2 people.count() - self.assertEqual(q, 3) + assert q == 3 def test_no_cached_queryset__repr__(self): class Person(Document): @@ -5060,7 +5043,7 @@ class TestQueryset(unittest.TestCase): Person.drop_collection() qs = Person.objects.no_cache() - self.assertEqual(repr(qs), "[]") + assert repr(qs) == "[]" def test_no_cached_on_a_cached_queryset_raise_error(self): class Person(Document): @@ -5070,9 +5053,9 @@ class TestQueryset(unittest.TestCase): Person(name="a").save() qs = Person.objects() _ = list(qs) - with self.assertRaises(OperationError) as ctx_err: + with pytest.raises(OperationError) as ctx_err: qs.no_cache() - self.assertEqual("QuerySet already cached", str(ctx_err.exception)) + assert "QuerySet already cached" == str(ctx_err.exception) def test_no_cached_queryset_no_cache_back_to_cache(self): class Person(Document): @@ -5080,11 +5063,11 @@ class TestQueryset(unittest.TestCase): Person.drop_collection() qs = Person.objects() - self.assertIsInstance(qs, QuerySet) + assert isinstance(qs, QuerySet) qs = qs.no_cache() - self.assertIsInstance(qs, QuerySetNoCache) + assert isinstance(qs, QuerySetNoCache) qs = qs.cache() - self.assertIsInstance(qs, QuerySet) + assert isinstance(qs, QuerySet) def test_cache_not_cloned(self): class User(Document): @@ -5099,12 +5082,12 @@ class TestQueryset(unittest.TestCase): User(name="Bob").save() users = User.objects.all().order_by("name") - self.assertEqual("%s" % users, "[, ]") - self.assertEqual(2, len(users._result_cache)) + assert "%s" % users == "[, ]" + assert 2 == len(users._result_cache) users = users.filter(name="Bob") - self.assertEqual("%s" % users, "[]") - self.assertEqual(1, len(users._result_cache)) + assert "%s" % users == "[]" + assert 1 == len(users._result_cache) def test_no_cache(self): """Ensure you can add meta data to file""" @@ -5122,23 +5105,23 @@ class TestQueryset(unittest.TestCase): docs = Noddy.objects.no_cache() counter = len([1 for i in docs]) - self.assertEqual(counter, 100) + assert counter == 100 - self.assertEqual(len(list(docs)), 100) + assert len(list(docs)) == 100 # Can't directly get a length of a no-cache queryset. - with self.assertRaises(TypeError): + with pytest.raises(TypeError): len(docs) # Another iteration over the queryset should result in another db op. with query_counter() as q: list(docs) - self.assertEqual(q, 1) + assert q == 1 # ... and another one to double-check. with query_counter() as q: list(docs) - self.assertEqual(q, 1) + assert q == 1 def test_nested_queryset_iterator(self): # Try iterating the same queryset twice, nested. @@ -5161,32 +5144,32 @@ class TestQueryset(unittest.TestCase): inner_total_count = 0 with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 - self.assertEqual(users.count(with_limit_and_skip=True), 7) + assert users.count(with_limit_and_skip=True) == 7 for i, outer_user in enumerate(users): - self.assertEqual(outer_user.name, names[i]) + assert outer_user.name == names[i] outer_count += 1 inner_count = 0 # Calling len might disrupt the inner loop if there are bugs - self.assertEqual(users.count(with_limit_and_skip=True), 7) + assert users.count(with_limit_and_skip=True) == 7 for j, inner_user in enumerate(users): - self.assertEqual(inner_user.name, names[j]) + assert inner_user.name == names[j] inner_count += 1 inner_total_count += 1 # inner loop should always be executed seven times - self.assertEqual(inner_count, 7) + assert inner_count == 7 # outer loop should be executed seven times total - self.assertEqual(outer_count, 7) + assert outer_count == 7 # inner loop should be executed fourtynine times total - self.assertEqual(inner_total_count, 7 * 7) + assert inner_total_count == 7 * 7 - self.assertEqual(q, 2) + assert q == 2 def test_no_sub_classes(self): class A(Document): @@ -5209,23 +5192,23 @@ class TestQueryset(unittest.TestCase): B(x=30, y=50).save() C(x=40, y=60).save() - self.assertEqual(A.objects.no_sub_classes().count(), 2) - self.assertEqual(A.objects.count(), 5) + assert A.objects.no_sub_classes().count() == 2 + assert A.objects.count() == 5 - self.assertEqual(B.objects.no_sub_classes().count(), 2) - self.assertEqual(B.objects.count(), 3) + assert B.objects.no_sub_classes().count() == 2 + assert B.objects.count() == 3 - self.assertEqual(C.objects.no_sub_classes().count(), 1) - self.assertEqual(C.objects.count(), 1) + assert C.objects.no_sub_classes().count() == 1 + assert C.objects.count() == 1 for obj in A.objects.no_sub_classes(): - self.assertEqual(obj.__class__, A) + assert obj.__class__ == A for obj in B.objects.no_sub_classes(): - self.assertEqual(obj.__class__, B) + assert obj.__class__ == B for obj in C.objects.no_sub_classes(): - self.assertEqual(obj.__class__, C) + assert obj.__class__ == C def test_query_generic_embedded_document(self): """Ensure that querying sub field on generic_embedded_field works @@ -5245,10 +5228,10 @@ class TestQueryset(unittest.TestCase): Doc(document=B(b_name="B doc")).save() # Using raw in filter working fine - self.assertEqual(Doc.objects(__raw__={"document.a_name": "A doc"}).count(), 1) - self.assertEqual(Doc.objects(__raw__={"document.b_name": "B doc"}).count(), 1) - self.assertEqual(Doc.objects(document__a_name="A doc").count(), 1) - self.assertEqual(Doc.objects(document__b_name="B doc").count(), 1) + assert Doc.objects(__raw__={"document.a_name": "A doc"}).count() == 1 + assert Doc.objects(__raw__={"document.b_name": "B doc"}).count() == 1 + assert Doc.objects(document__a_name="A doc").count() == 1 + assert Doc.objects(document__b_name="B doc").count() == 1 def test_query_reference_to_custom_pk_doc(self): class A(Document): @@ -5263,9 +5246,9 @@ class TestQueryset(unittest.TestCase): a = A.objects.create(id="custom_id") B.objects.create(a=a) - self.assertEqual(B.objects.count(), 1) - self.assertEqual(B.objects.get(a=a).a, a) - self.assertEqual(B.objects.get(a=a.id).a, a) + assert B.objects.count() == 1 + assert B.objects.get(a=a).a == a + assert B.objects.get(a=a.id).a == a def test_cls_query_in_subclassed_docs(self): class Animal(Document): @@ -5279,21 +5262,18 @@ class TestQueryset(unittest.TestCase): class Cat(Animal): pass - self.assertEqual( - Animal.objects(name="Charlie")._query, - { - "name": "Charlie", - "_cls": {"$in": ("Animal", "Animal.Dog", "Animal.Cat")}, - }, - ) - self.assertEqual( - Dog.objects(name="Charlie")._query, - {"name": "Charlie", "_cls": "Animal.Dog"}, - ) - self.assertEqual( - Cat.objects(name="Charlie")._query, - {"name": "Charlie", "_cls": "Animal.Cat"}, - ) + assert Animal.objects(name="Charlie")._query == { + "name": "Charlie", + "_cls": {"$in": ("Animal", "Animal.Dog", "Animal.Cat")}, + } + assert Dog.objects(name="Charlie")._query == { + "name": "Charlie", + "_cls": "Animal.Dog", + } + assert Cat.objects(name="Charlie")._query == { + "name": "Charlie", + "_cls": "Animal.Cat", + } def test_can_have_field_same_name_as_query_operator(self): class Size(Document): @@ -5308,8 +5288,8 @@ class TestQueryset(unittest.TestCase): instance_size = Size(name="Large").save() Example(size=instance_size).save() - self.assertEqual(Example.objects(size=instance_size).count(), 1) - self.assertEqual(Example.objects(size__in=[instance_size]).count(), 1) + assert Example.objects(size=instance_size).count() == 1 + assert Example.objects(size__in=[instance_size]).count() == 1 def test_cursor_in_an_if_stmt(self): class Test(Document): @@ -5347,12 +5327,12 @@ class TestQueryset(unittest.TestCase): if Person.objects: pass - self.assertEqual(q, 1) + assert q == 1 op = q.db.system.profile.find( {"ns": {"$ne": "%s.system.indexes" % q.db.name}} )[0] - self.assertEqual(op["nreturned"], 1) + assert op["nreturned"] == 1 def test_bool_with_ordering(self): ORDER_BY_KEY, CMD_QUERY_KEY = get_key_compat(self.mongodb_version) @@ -5375,7 +5355,7 @@ class TestQueryset(unittest.TestCase): {"ns": {"$ne": "%s.system.indexes" % q.db.name}} )[0] - self.assertNotIn(ORDER_BY_KEY, op[CMD_QUERY_KEY]) + assert ORDER_BY_KEY not in op[CMD_QUERY_KEY] # Check that normal query uses orderby qs2 = Person.objects.order_by("name") @@ -5388,7 +5368,7 @@ class TestQueryset(unittest.TestCase): {"ns": {"$ne": "%s.system.indexes" % q.db.name}} )[0] - self.assertIn(ORDER_BY_KEY, op[CMD_QUERY_KEY]) + assert ORDER_BY_KEY in op[CMD_QUERY_KEY] def test_bool_with_ordering_from_meta_dict(self): ORDER_BY_KEY, CMD_QUERY_KEY = get_key_compat(self.mongodb_version) @@ -5412,16 +5392,12 @@ class TestQueryset(unittest.TestCase): {"ns": {"$ne": "%s.system.indexes" % q.db.name}} )[0] - self.assertNotIn( - "$orderby", - op[CMD_QUERY_KEY], - "BaseQuerySet must remove orderby from meta in boolen test", - ) + assert ( + "$orderby" not in op[CMD_QUERY_KEY] + ), "BaseQuerySet must remove orderby from meta in boolen test" - self.assertEqual(Person.objects.first().name, "A") - self.assertTrue( - Person.objects._has_data(), "Cursor has data and returned False" - ) + assert Person.objects.first().name == "A" + assert Person.objects._has_data(), "Cursor has data and returned False" def test_queryset_aggregation_framework(self): class Person(Document): @@ -5439,13 +5415,10 @@ class TestQueryset(unittest.TestCase): {"$project": {"name": {"$toUpper": "$name"}}} ) - self.assertEqual( - list(data), - [ - {"_id": p1.pk, "name": "ISABELLA LUANNA"}, - {"_id": p2.pk, "name": "WILSON JUNIOR"}, - ], - ) + assert list(data) == [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + ] data = ( Person.objects(age__lte=22) @@ -5453,13 +5426,10 @@ class TestQueryset(unittest.TestCase): .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) ) - self.assertEqual( - list(data), - [ - {"_id": p2.pk, "name": "WILSON JUNIOR"}, - {"_id": p1.pk, "name": "ISABELLA LUANNA"}, - ], - ) + assert list(data) == [ + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + ] data = ( Person.objects(age__gte=17, age__lte=40) @@ -5468,12 +5438,10 @@ class TestQueryset(unittest.TestCase): {"$group": {"_id": None, "total": {"$sum": 1}, "avg": {"$avg": "$age"}}} ) ) - self.assertEqual(list(data), [{"_id": None, "avg": 29, "total": 2}]) + assert list(data) == [{"_id": None, "avg": 29, "total": 2}] data = Person.objects().aggregate({"$match": {"name": "Isabella Luanna"}}) - self.assertEqual( - list(data), [{u"_id": p1.pk, u"age": 16, u"name": u"Isabella Luanna"}] - ) + assert list(data) == [{u"_id": p1.pk, u"age": 16, u"name": u"Isabella Luanna"}] def test_queryset_aggregation_with_skip(self): class Person(Document): @@ -5491,13 +5459,10 @@ class TestQueryset(unittest.TestCase): {"$project": {"name": {"$toUpper": "$name"}}} ) - self.assertEqual( - list(data), - [ - {"_id": p2.pk, "name": "WILSON JUNIOR"}, - {"_id": p3.pk, "name": "SANDRA MARA"}, - ], - ) + assert list(data) == [ + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + ] def test_queryset_aggregation_with_limit(self): class Person(Document): @@ -5515,7 +5480,7 @@ class TestQueryset(unittest.TestCase): {"$project": {"name": {"$toUpper": "$name"}}} ) - self.assertEqual(list(data), [{"_id": p1.pk, "name": "ISABELLA LUANNA"}]) + assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}] def test_queryset_aggregation_with_sort(self): class Person(Document): @@ -5533,14 +5498,11 @@ class TestQueryset(unittest.TestCase): {"$project": {"name": {"$toUpper": "$name"}}} ) - self.assertEqual( - list(data), - [ - {"_id": p1.pk, "name": "ISABELLA LUANNA"}, - {"_id": p3.pk, "name": "SANDRA MARA"}, - {"_id": p2.pk, "name": "WILSON JUNIOR"}, - ], - ) + assert list(data) == [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + ] def test_queryset_aggregation_with_skip_with_limit(self): class Person(Document): @@ -5560,7 +5522,7 @@ class TestQueryset(unittest.TestCase): .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) ) - self.assertEqual(list(data), [{"_id": p2.pk, "name": "WILSON JUNIOR"}]) + assert list(data) == [{"_id": p2.pk, "name": "WILSON JUNIOR"}] # Make sure limit/skip chaining order has no impact data2 = ( @@ -5569,7 +5531,7 @@ class TestQueryset(unittest.TestCase): .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) ) - self.assertEqual(data, list(data2)) + assert data == list(data2) def test_queryset_aggregation_with_sort_with_limit(self): class Person(Document): @@ -5589,13 +5551,10 @@ class TestQueryset(unittest.TestCase): .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) ) - self.assertEqual( - list(data), - [ - {"_id": p1.pk, "name": "ISABELLA LUANNA"}, - {"_id": p3.pk, "name": "SANDRA MARA"}, - ], - ) + assert list(data) == [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + ] # Verify adding limit/skip steps works as expected data = ( @@ -5604,7 +5563,7 @@ class TestQueryset(unittest.TestCase): .aggregate({"$project": {"name": {"$toUpper": "$name"}}}, {"$limit": 1}) ) - self.assertEqual(list(data), [{"_id": p1.pk, "name": "ISABELLA LUANNA"}]) + assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}] data = ( Person.objects.order_by("name") @@ -5616,7 +5575,7 @@ class TestQueryset(unittest.TestCase): ) ) - self.assertEqual(list(data), [{"_id": p3.pk, "name": "SANDRA MARA"}]) + assert list(data) == [{"_id": p3.pk, "name": "SANDRA MARA"}] def test_queryset_aggregation_with_sort_with_skip(self): class Person(Document): @@ -5636,7 +5595,7 @@ class TestQueryset(unittest.TestCase): .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) ) - self.assertEqual(list(data), [{"_id": p2.pk, "name": "WILSON JUNIOR"}]) + assert list(data) == [{"_id": p2.pk, "name": "WILSON JUNIOR"}] def test_queryset_aggregation_with_sort_with_skip_with_limit(self): class Person(Document): @@ -5657,30 +5616,29 @@ class TestQueryset(unittest.TestCase): .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) ) - self.assertEqual(list(data), [{"_id": p3.pk, "name": "SANDRA MARA"}]) + assert list(data) == [{"_id": p3.pk, "name": "SANDRA MARA"}] def test_delete_count(self): [self.Person(name="User {0}".format(i), age=i * 10).save() for i in range(1, 4)] - self.assertEqual( - self.Person.objects().delete(), 3 + assert ( + self.Person.objects().delete() == 3 ) # test ordinary QuerySey delete count [self.Person(name="User {0}".format(i), age=i * 10).save() for i in range(1, 4)] - self.assertEqual( - self.Person.objects().skip(1).delete(), 2 + assert ( + self.Person.objects().skip(1).delete() == 2 ) # test Document delete with existing documents self.Person.objects().delete() - self.assertEqual( - self.Person.objects().skip(1).delete(), 0 + assert ( + self.Person.objects().skip(1).delete() == 0 ) # test Document delete without existing documents def test_max_time_ms(self): # 778: max_time_ms can get only int or None as input - self.assertRaises( - TypeError, self.Person.objects(name="name").max_time_ms, "not a number" - ) + with pytest.raises(TypeError): + self.Person.objects(name="name").max_time_ms("not a number") def test_subclass_field_query(self): class Animal(Document): @@ -5698,8 +5656,8 @@ class TestQueryset(unittest.TestCase): Animal(is_mamal=False).save() Cat(is_mamal=True, whiskers_length=5.1).save() ScottishCat(is_mamal=True, folded_ears=True).save() - self.assertEqual(Animal.objects(folded_ears=True).count(), 1) - self.assertEqual(Animal.objects(whiskers_length=5.1).count(), 1) + assert Animal.objects(folded_ears=True).count() == 1 + assert Animal.objects(whiskers_length=5.1).count() == 1 def test_loop_over_invalid_id_does_not_crash(self): class Person(Document): @@ -5709,7 +5667,7 @@ class TestQueryset(unittest.TestCase): Person._get_collection().insert_one({"name": "a", "id": ""}) for p in Person.objects(): - self.assertEqual(p.name, "a") + assert p.name == "a" def test_len_during_iteration(self): """Tests that calling len on a queyset during iteration doesn't @@ -5733,7 +5691,7 @@ class TestQueryset(unittest.TestCase): for i, r in enumerate(records): if i == 58: len(records) - self.assertEqual(i, 249) + assert i == 249 # Assert the same behavior is true even if we didn't pre-populate the # result cache. @@ -5741,7 +5699,7 @@ class TestQueryset(unittest.TestCase): for i, r in enumerate(records): if i == 58: len(records) - self.assertEqual(i, 249) + assert i == 249 def test_iteration_within_iteration(self): """You should be able to reliably iterate over all the documents @@ -5760,8 +5718,8 @@ class TestQueryset(unittest.TestCase): for j, doc2 in enumerate(qs): pass - self.assertEqual(i, 249) - self.assertEqual(j, 249) + assert i == 249 + assert j == 249 def test_in_operator_on_non_iterable(self): """Ensure that using the `__in` operator on a non-iterable raises an @@ -5785,24 +5743,26 @@ class TestQueryset(unittest.TestCase): # Make sure using `__in` with a list works blog_posts = BlogPost.objects(authors__in=[author]) - self.assertEqual(list(blog_posts), [post]) + assert list(blog_posts) == [post] # Using `__in` with a non-iterable should raise a TypeError - self.assertRaises(TypeError, BlogPost.objects(authors__in=author.pk).count) + with pytest.raises(TypeError): + BlogPost.objects(authors__in=author.pk).count() # Using `__in` with a `Document` (which is seemingly iterable but not # in a way we'd expect) should raise a TypeError, too - self.assertRaises(TypeError, BlogPost.objects(authors__in=author).count) + with pytest.raises(TypeError): + BlogPost.objects(authors__in=author).count() def test_create_count(self): self.Person.drop_collection() self.Person.objects.create(name="Foo") self.Person.objects.create(name="Bar") self.Person.objects.create(name="Baz") - self.assertEqual(self.Person.objects.count(with_limit_and_skip=True), 3) + assert self.Person.objects.count(with_limit_and_skip=True) == 3 - self.Person.objects.create(name="Foo_1") - self.assertEqual(self.Person.objects.count(with_limit_and_skip=True), 4) + newPerson = self.Person.objects.create(name="Foo_1") + assert self.Person.objects.count(with_limit_and_skip=True) == 4 def test_no_cursor_timeout(self): qs = self.Person.objects() diff --git a/tests/queryset/test_transform.py b/tests/queryset/test_transform.py index 8207351d..be28c3b8 100644 --- a/tests/queryset/test_transform.py +++ b/tests/queryset/test_transform.py @@ -4,6 +4,7 @@ from bson.son import SON from mongoengine import * from mongoengine.queryset import Q, transform +import pytest class TestTransform(unittest.TestCase): @@ -13,23 +14,16 @@ class TestTransform(unittest.TestCase): def test_transform_query(self): """Ensure that the _transform_query function operates correctly. """ - self.assertEqual( - transform.query(name="test", age=30), {"name": "test", "age": 30} - ) - self.assertEqual(transform.query(age__lt=30), {"age": {"$lt": 30}}) - self.assertEqual( - transform.query(age__gt=20, age__lt=50), {"age": {"$gt": 20, "$lt": 50}} - ) - self.assertEqual( - transform.query(age=20, age__gt=50), - {"$and": [{"age": {"$gt": 50}}, {"age": 20}]}, - ) - self.assertEqual( - transform.query(friend__age__gte=30), {"friend.age": {"$gte": 30}} - ) - self.assertEqual( - transform.query(name__exists=True), {"name": {"$exists": True}} - ) + assert transform.query(name="test", age=30) == {"name": "test", "age": 30} + assert transform.query(age__lt=30) == {"age": {"$lt": 30}} + assert transform.query(age__gt=20, age__lt=50) == { + "age": {"$gt": 20, "$lt": 50} + } + assert transform.query(age=20, age__gt=50) == { + "$and": [{"age": {"$gt": 50}}, {"age": 20}] + } + assert transform.query(friend__age__gte=30) == {"friend.age": {"$gte": 30}} + assert transform.query(name__exists=True) == {"name": {"$exists": True}} def test_transform_update(self): class LisDoc(Document): @@ -54,17 +48,17 @@ class TestTransform(unittest.TestCase): ("push", "$push"), ): update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc}) - self.assertIsInstance(update[v]["dictField.test"], dict) + assert isinstance(update[v]["dictField.test"], dict) # Update special cases update = transform.update(DicDoc, unset__dictField__test=doc) - self.assertEqual(update["$unset"]["dictField.test"], 1) + assert update["$unset"]["dictField.test"] == 1 update = transform.update(DicDoc, pull__dictField__test=doc) - self.assertIsInstance(update["$pull"]["dictField"]["test"], dict) + assert isinstance(update["$pull"]["dictField"]["test"], dict) update = transform.update(LisDoc, pull__foo__in=["a"]) - self.assertEqual(update, {"$pull": {"foo": {"$in": ["a"]}}}) + assert update == {"$pull": {"foo": {"$in": ["a"]}}} def test_transform_update_push(self): """Ensure the differences in behvaior between 'push' and 'push_all'""" @@ -73,10 +67,10 @@ class TestTransform(unittest.TestCase): tags = ListField(StringField()) update = transform.update(BlogPost, push__tags=["mongo", "db"]) - self.assertEqual(update, {"$push": {"tags": ["mongo", "db"]}}) + assert update == {"$push": {"tags": ["mongo", "db"]}} update = transform.update(BlogPost, push_all__tags=["mongo", "db"]) - self.assertEqual(update, {"$push": {"tags": {"$each": ["mongo", "db"]}}}) + assert update == {"$push": {"tags": {"$each": ["mongo", "db"]}}} def test_transform_update_no_operator_default_to_set(self): """Ensure the differences in behvaior between 'push' and 'push_all'""" @@ -85,7 +79,7 @@ class TestTransform(unittest.TestCase): tags = ListField(StringField()) update = transform.update(BlogPost, tags=["mongo", "db"]) - self.assertEqual(update, {"$set": {"tags": ["mongo", "db"]}}) + assert update == {"$set": {"tags": ["mongo", "db"]}} def test_query_field_name(self): """Ensure that the correct field name is used when querying. @@ -106,18 +100,18 @@ class TestTransform(unittest.TestCase): post = BlogPost(**data) post.save() - self.assertIn("postTitle", BlogPost.objects(title=data["title"])._query) - self.assertFalse("title" in BlogPost.objects(title=data["title"])._query) - self.assertEqual(BlogPost.objects(title=data["title"]).count(), 1) + assert "postTitle" in BlogPost.objects(title=data["title"])._query + assert not ("title" in BlogPost.objects(title=data["title"])._query) + assert BlogPost.objects(title=data["title"]).count() == 1 - self.assertIn("_id", BlogPost.objects(pk=post.id)._query) - self.assertEqual(BlogPost.objects(pk=post.id).count(), 1) + assert "_id" in BlogPost.objects(pk=post.id)._query + assert BlogPost.objects(pk=post.id).count() == 1 - self.assertIn( - "postComments.commentContent", - BlogPost.objects(comments__content="test")._query, + assert ( + "postComments.commentContent" + in BlogPost.objects(comments__content="test")._query ) - self.assertEqual(BlogPost.objects(comments__content="test").count(), 1) + assert BlogPost.objects(comments__content="test").count() == 1 BlogPost.drop_collection() @@ -135,9 +129,9 @@ class TestTransform(unittest.TestCase): post = BlogPost(**data) post.save() - self.assertIn("_id", BlogPost.objects(pk=data["title"])._query) - self.assertIn("_id", BlogPost.objects(title=data["title"])._query) - self.assertEqual(BlogPost.objects(pk=data["title"]).count(), 1) + assert "_id" in BlogPost.objects(pk=data["title"])._query + assert "_id" in BlogPost.objects(title=data["title"])._query + assert BlogPost.objects(pk=data["title"]).count() == 1 BlogPost.drop_collection() @@ -163,7 +157,7 @@ class TestTransform(unittest.TestCase): q2 = B.objects.filter(a__in=[a1, a2]) q2 = q2.filter(a=a1)._query - self.assertEqual(q1, q2) + assert q1 == q2 def test_raw_query_and_Q_objects(self): """ @@ -179,11 +173,11 @@ class TestTransform(unittest.TestCase): meta = {"allow_inheritance": False} query = Foo.objects(__raw__={"$nor": [{"name": "bar"}]})._query - self.assertEqual(query, {"$nor": [{"name": "bar"}]}) + assert query == {"$nor": [{"name": "bar"}]} q1 = {"$or": [{"a": 1}, {"b": 1}]} query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query - self.assertEqual(query, {"$or": [{"a": 1}, {"b": 1}], "c": 1}) + assert query == {"$or": [{"a": 1}, {"b": 1}], "c": 1} def test_raw_and_merging(self): class Doc(Document): @@ -200,51 +194,39 @@ class TestTransform(unittest.TestCase): } )._query - self.assertEqual( - raw_query, - { - "deleted": False, - "scraped": "yes", - "$nor": [ - {"views.extracted": "no"}, - {"attachments.views.extracted": "no"}, - ], - }, - ) + assert raw_query == { + "deleted": False, + "scraped": "yes", + "$nor": [{"views.extracted": "no"}, {"attachments.views.extracted": "no"}], + } def test_geojson_PointField(self): class Location(Document): loc = PointField() update = transform.update(Location, set__loc=[1, 2]) - self.assertEqual( - update, {"$set": {"loc": {"type": "Point", "coordinates": [1, 2]}}} - ) + assert update == {"$set": {"loc": {"type": "Point", "coordinates": [1, 2]}}} update = transform.update( Location, set__loc={"type": "Point", "coordinates": [1, 2]} ) - self.assertEqual( - update, {"$set": {"loc": {"type": "Point", "coordinates": [1, 2]}}} - ) + assert update == {"$set": {"loc": {"type": "Point", "coordinates": [1, 2]}}} def test_geojson_LineStringField(self): class Location(Document): line = LineStringField() update = transform.update(Location, set__line=[[1, 2], [2, 2]]) - self.assertEqual( - update, - {"$set": {"line": {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}}, - ) + assert update == { + "$set": {"line": {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}} + } update = transform.update( Location, set__line={"type": "LineString", "coordinates": [[1, 2], [2, 2]]} ) - self.assertEqual( - update, - {"$set": {"line": {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}}, - ) + assert update == { + "$set": {"line": {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}} + } def test_geojson_PolygonField(self): class Location(Document): @@ -253,17 +235,14 @@ class TestTransform(unittest.TestCase): update = transform.update( Location, set__poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]] ) - self.assertEqual( - update, - { - "$set": { - "poly": { - "type": "Polygon", - "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], - } + assert update == { + "$set": { + "poly": { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], } - }, - ) + } + } update = transform.update( Location, @@ -272,17 +251,14 @@ class TestTransform(unittest.TestCase): "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], }, ) - self.assertEqual( - update, - { - "$set": { - "poly": { - "type": "Polygon", - "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], - } + assert update == { + "$set": { + "poly": { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], } - }, - ) + } + } def test_type(self): class Doc(Document): @@ -291,10 +267,10 @@ class TestTransform(unittest.TestCase): Doc(df=True).save() Doc(df=7).save() Doc(df="df").save() - self.assertEqual(Doc.objects(df__type=1).count(), 0) # double - self.assertEqual(Doc.objects(df__type=8).count(), 1) # bool - self.assertEqual(Doc.objects(df__type=2).count(), 1) # str - self.assertEqual(Doc.objects(df__type=16).count(), 1) # int + assert Doc.objects(df__type=1).count() == 0 # double + assert Doc.objects(df__type=8).count() == 1 # bool + assert Doc.objects(df__type=2).count() == 1 # str + assert Doc.objects(df__type=16).count() == 1 # int def test_last_field_name_like_operator(self): class EmbeddedItem(EmbeddedDocument): @@ -309,12 +285,12 @@ class TestTransform(unittest.TestCase): doc = Doc(item=EmbeddedItem(type="axe", name="Heroic axe")) doc.save() - self.assertEqual(1, Doc.objects(item__type__="axe").count()) - self.assertEqual(1, Doc.objects(item__name__="Heroic axe").count()) + assert 1 == Doc.objects(item__type__="axe").count() + assert 1 == Doc.objects(item__name__="Heroic axe").count() Doc.objects(id=doc.id).update(set__item__type__="sword") - self.assertEqual(1, Doc.objects(item__type__="sword").count()) - self.assertEqual(0, Doc.objects(item__type__="axe").count()) + assert 1 == Doc.objects(item__type__="sword").count() + assert 0 == Doc.objects(item__type__="axe").count() def test_understandable_error_raised(self): class Event(Document): @@ -324,7 +300,7 @@ class TestTransform(unittest.TestCase): box = [(35.0, -125.0), (40.0, -100.0)] # I *meant* to execute location__within_box=box events = Event.objects(location__within=box) - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): events.count() def test_update_pull_for_list_fields(self): @@ -347,24 +323,20 @@ class TestTransform(unittest.TestCase): word = Word(word="abc", index=1) update = transform.update(MainDoc, pull__content__text=word) - self.assertEqual( - update, {"$pull": {"content.text": SON([("word", u"abc"), ("index", 1)])}} - ) + assert update == { + "$pull": {"content.text": SON([("word", u"abc"), ("index", 1)])} + } update = transform.update(MainDoc, pull__content__heading="xyz") - self.assertEqual(update, {"$pull": {"content.heading": "xyz"}}) + assert update == {"$pull": {"content.heading": "xyz"}} update = transform.update(MainDoc, pull__content__text__word__in=["foo", "bar"]) - self.assertEqual( - update, {"$pull": {"content.text": {"word": {"$in": ["foo", "bar"]}}}} - ) + assert update == {"$pull": {"content.text": {"word": {"$in": ["foo", "bar"]}}}} update = transform.update( MainDoc, pull__content__text__word__nin=["foo", "bar"] ) - self.assertEqual( - update, {"$pull": {"content.text": {"word": {"$nin": ["foo", "bar"]}}}} - ) + assert update == {"$pull": {"content.text": {"word": {"$nin": ["foo", "bar"]}}}} if __name__ == "__main__": diff --git a/tests/queryset/test_visitor.py b/tests/queryset/test_visitor.py index acadabd4..a41f9278 100644 --- a/tests/queryset/test_visitor.py +++ b/tests/queryset/test_visitor.py @@ -7,6 +7,7 @@ from bson import ObjectId from mongoengine import * from mongoengine.errors import InvalidQueryError from mongoengine.queryset import Q +import pytest class TestQ(unittest.TestCase): @@ -35,10 +36,10 @@ class TestQ(unittest.TestCase): age = IntField() query = {"$or": [{"age": {"$gte": 18}}, {"name": "test"}]} - self.assertEqual((q1 | q2 | q3 | q4 | q5).to_query(Person), query) + assert (q1 | q2 | q3 | q4 | q5).to_query(Person) == query query = {"age": {"$gte": 18}, "name": "test"} - self.assertEqual((q1 & q2 & q3 & q4 & q5).to_query(Person), query) + assert (q1 & q2 & q3 & q4 & q5).to_query(Person) == query def test_q_with_dbref(self): """Ensure Q objects handle DBRefs correctly""" @@ -53,8 +54,8 @@ class TestQ(unittest.TestCase): user = User.objects.create() Post.objects.create(created_user=user) - self.assertEqual(Post.objects.filter(created_user=user).count(), 1) - self.assertEqual(Post.objects.filter(Q(created_user=user)).count(), 1) + assert Post.objects.filter(created_user=user).count() == 1 + assert Post.objects.filter(Q(created_user=user)).count() == 1 def test_and_combination(self): """Ensure that Q-objects correctly AND together. @@ -65,12 +66,10 @@ class TestQ(unittest.TestCase): y = StringField() query = (Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc) - self.assertEqual(query, {"$and": [{"x": {"$lt": 7}}, {"x": {"$lt": 3}}]}) + assert query == {"$and": [{"x": {"$lt": 7}}, {"x": {"$lt": 3}}]} query = (Q(y="a") & Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc) - self.assertEqual( - query, {"$and": [{"y": "a"}, {"x": {"$lt": 7}}, {"x": {"$lt": 3}}]} - ) + assert query == {"$and": [{"y": "a"}, {"x": {"$lt": 7}}, {"x": {"$lt": 3}}]} # Check normal cases work without an error query = Q(x__lt=7) & Q(x__gt=3) @@ -78,7 +77,7 @@ class TestQ(unittest.TestCase): q1 = Q(x__lt=7) q2 = Q(x__gt=3) query = (q1 & q2).to_query(TestDoc) - self.assertEqual(query, {"x": {"$lt": 7, "$gt": 3}}) + assert query == {"x": {"$lt": 7, "$gt": 3}} # More complex nested example query = Q(x__lt=100) & Q(y__ne="NotMyString") @@ -87,7 +86,7 @@ class TestQ(unittest.TestCase): "x": {"$lt": 100, "$gt": -100}, "y": {"$ne": "NotMyString", "$in": ["a", "b", "c"]}, } - self.assertEqual(query.to_query(TestDoc), mongo_query) + assert query.to_query(TestDoc) == mongo_query def test_or_combination(self): """Ensure that Q-objects correctly OR together. @@ -99,7 +98,7 @@ class TestQ(unittest.TestCase): q1 = Q(x__lt=3) q2 = Q(x__gt=7) query = (q1 | q2).to_query(TestDoc) - self.assertEqual(query, {"$or": [{"x": {"$lt": 3}}, {"x": {"$gt": 7}}]}) + assert query == {"$or": [{"x": {"$lt": 3}}, {"x": {"$gt": 7}}]} def test_and_or_combination(self): """Ensure that Q-objects handle ANDing ORed components. @@ -113,15 +112,12 @@ class TestQ(unittest.TestCase): query = Q(x__gt=0) | Q(x__exists=False) query &= Q(x__lt=100) - self.assertEqual( - query.to_query(TestDoc), - { - "$and": [ - {"$or": [{"x": {"$gt": 0}}, {"x": {"$exists": False}}]}, - {"x": {"$lt": 100}}, - ] - }, - ) + assert query.to_query(TestDoc) == { + "$and": [ + {"$or": [{"x": {"$gt": 0}}, {"x": {"$exists": False}}]}, + {"x": {"$lt": 100}}, + ] + } q1 = Q(x__gt=0) | Q(x__exists=False) q2 = Q(x__lt=100) | Q(y=True) @@ -131,16 +127,13 @@ class TestQ(unittest.TestCase): TestDoc(x=10).save() TestDoc(y=True).save() - self.assertEqual( - query, - { - "$and": [ - {"$or": [{"x": {"$gt": 0}}, {"x": {"$exists": False}}]}, - {"$or": [{"x": {"$lt": 100}}, {"y": True}]}, - ] - }, - ) - self.assertEqual(2, TestDoc.objects(q1 & q2).count()) + assert query == { + "$and": [ + {"$or": [{"x": {"$gt": 0}}, {"x": {"$exists": False}}]}, + {"$or": [{"x": {"$lt": 100}}, {"y": True}]}, + ] + } + assert 2 == TestDoc.objects(q1 & q2).count() def test_or_and_or_combination(self): """Ensure that Q-objects handle ORing ANDed ORed components. :) @@ -160,26 +153,23 @@ class TestQ(unittest.TestCase): q2 = Q(x__lt=100) & (Q(y=False) | Q(y__exists=False)) query = (q1 | q2).to_query(TestDoc) - self.assertEqual( - query, - { - "$or": [ - { - "$and": [ - {"x": {"$gt": 0}}, - {"$or": [{"y": True}, {"y": {"$exists": False}}]}, - ] - }, - { - "$and": [ - {"x": {"$lt": 100}}, - {"$or": [{"y": False}, {"y": {"$exists": False}}]}, - ] - }, - ] - }, - ) - self.assertEqual(2, TestDoc.objects(q1 | q2).count()) + assert query == { + "$or": [ + { + "$and": [ + {"x": {"$gt": 0}}, + {"$or": [{"y": True}, {"y": {"$exists": False}}]}, + ] + }, + { + "$and": [ + {"x": {"$lt": 100}}, + {"$or": [{"y": False}, {"y": {"$exists": False}}]}, + ] + }, + ] + } + assert 2 == TestDoc.objects(q1 | q2).count() def test_multiple_occurence_in_field(self): class Test(Document): @@ -192,8 +182,8 @@ class TestQ(unittest.TestCase): q3 = q1 & q2 query = q3.to_query(Test) - self.assertEqual(query["$and"][0], q1.to_query(Test)) - self.assertEqual(query["$and"][1], q2.to_query(Test)) + assert query["$and"][0] == q1.to_query(Test) + assert query["$and"][1] == q2.to_query(Test) def test_q_clone(self): class TestDoc(Document): @@ -207,15 +197,15 @@ class TestQ(unittest.TestCase): # Check normal cases work without an error test = TestDoc.objects(Q(x__lt=7) & Q(x__gt=3)) - self.assertEqual(test.count(), 3) + assert test.count() == 3 test2 = test.clone() - self.assertEqual(test2.count(), 3) - self.assertNotEqual(test2, test) + assert test2.count() == 3 + assert test2 != test test3 = test2.filter(x=6) - self.assertEqual(test3.count(), 1) - self.assertEqual(test.count(), 3) + assert test3.count() == 1 + assert test.count() == 3 def test_q(self): """Ensure that Q objects may be used to query for documents. @@ -252,19 +242,19 @@ class TestQ(unittest.TestCase): # Check ObjectId lookup works obj = BlogPost.objects(id=post1.id).first() - self.assertEqual(obj, post1) + assert obj == post1 # Check Q object combination with one does not exist q = BlogPost.objects(Q(title="Test 5") | Q(published=True)) posts = [post.id for post in q] published_posts = (post2, post3) - self.assertTrue(all(obj.id in posts for obj in published_posts)) + assert all(obj.id in posts for obj in published_posts) q = BlogPost.objects(Q(title="Test 1") | Q(published=True)) posts = [post.id for post in q] published_posts = (post1, post2, post3, post5, post6) - self.assertTrue(all(obj.id in posts for obj in published_posts)) + assert all(obj.id in posts for obj in published_posts) # Check Q object combination date = datetime.datetime(2010, 1, 10) @@ -272,9 +262,9 @@ class TestQ(unittest.TestCase): posts = [post.id for post in q] published_posts = (post1, post2, post3, post4) - self.assertTrue(all(obj.id in posts for obj in published_posts)) + assert all(obj.id in posts for obj in published_posts) - self.assertFalse(any(obj.id in posts for obj in [post5, post6])) + assert not any(obj.id in posts for obj in [post5, post6]) BlogPost.drop_collection() @@ -284,15 +274,15 @@ class TestQ(unittest.TestCase): self.Person(name="user3", age=30).save() self.Person(name="user4", age=40).save() - self.assertEqual(self.Person.objects(Q(age__in=[20])).count(), 2) - self.assertEqual(self.Person.objects(Q(age__in=[20, 30])).count(), 3) + assert self.Person.objects(Q(age__in=[20])).count() == 2 + assert self.Person.objects(Q(age__in=[20, 30])).count() == 3 # Test invalid query objs - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): self.Person.objects("user1") # filter should fail, too - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): self.Person.objects.filter("user1") def test_q_regex(self): @@ -302,31 +292,31 @@ class TestQ(unittest.TestCase): person.save() obj = self.Person.objects(Q(name=re.compile("^Gui"))).first() - self.assertEqual(obj, person) + assert obj == person obj = self.Person.objects(Q(name=re.compile("^gui"))).first() - self.assertEqual(obj, None) + assert obj == None obj = self.Person.objects(Q(name=re.compile("^gui", re.I))).first() - self.assertEqual(obj, person) + assert obj == person obj = self.Person.objects(Q(name__not=re.compile("^bob"))).first() - self.assertEqual(obj, person) + assert obj == person obj = self.Person.objects(Q(name__not=re.compile("^Gui"))).first() - self.assertEqual(obj, None) + assert obj == None def test_q_repr(self): - self.assertEqual(repr(Q()), "Q(**{})") - self.assertEqual(repr(Q(name="test")), "Q(**{'name': 'test'})") + assert repr(Q()) == "Q(**{})" + assert repr(Q(name="test")) == "Q(**{'name': 'test'})" - self.assertEqual( - repr(Q(name="test") & Q(age__gte=18)), - "(Q(**{'name': 'test'}) & Q(**{'age__gte': 18}))", + assert ( + repr(Q(name="test") & Q(age__gte=18)) + == "(Q(**{'name': 'test'}) & Q(**{'age__gte': 18}))" ) - self.assertEqual( - repr(Q(name="test") | Q(age__gte=18)), - "(Q(**{'name': 'test'}) | Q(**{'age__gte': 18}))", + assert ( + repr(Q(name="test") | Q(age__gte=18)) + == "(Q(**{'name': 'test'}) | Q(**{'age__gte': 18}))" ) def test_q_lists(self): @@ -341,8 +331,8 @@ class TestQ(unittest.TestCase): BlogPost(tags=["python", "mongo"]).save() BlogPost(tags=["python"]).save() - self.assertEqual(BlogPost.objects(Q(tags="mongo")).count(), 1) - self.assertEqual(BlogPost.objects(Q(tags="python")).count(), 2) + assert BlogPost.objects(Q(tags="mongo")).count() == 1 + assert BlogPost.objects(Q(tags="python")).count() == 2 BlogPost.drop_collection() @@ -355,12 +345,12 @@ class TestQ(unittest.TestCase): pk = ObjectId() User(email="example@example.com", pk=pk).save() - self.assertEqual( - 1, - User.objects.filter(Q(email="example@example.com") | Q(name="John Doe")) + assert ( + 1 + == User.objects.filter(Q(email="example@example.com") | Q(name="John Doe")) .limit(2) .filter(pk=pk) - .count(), + .count() ) def test_chained_q_or_filtering(self): @@ -376,14 +366,12 @@ class TestQ(unittest.TestCase): Item(postables=[Post(name="a"), Post(name="c")]).save() Item(postables=[Post(name="a"), Post(name="b"), Post(name="c")]).save() - self.assertEqual( - Item.objects(Q(postables__name="a") & Q(postables__name="b")).count(), 2 + assert ( + Item.objects(Q(postables__name="a") & Q(postables__name="b")).count() == 2 ) - self.assertEqual( - Item.objects.filter(postables__name="a") - .filter(postables__name="b") - .count(), - 2, + assert ( + Item.objects.filter(postables__name="a").filter(postables__name="b").count() + == 2 ) diff --git a/tests/test_common.py b/tests/test_common.py index 28f0b992..6b6f18de 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -1,5 +1,7 @@ import unittest +import pytest + from mongoengine import Document from mongoengine.common import _import_class @@ -7,8 +9,8 @@ from mongoengine.common import _import_class class TestCommon(unittest.TestCase): def test__import_class(self): doc_cls = _import_class("Document") - self.assertIs(doc_cls, Document) + assert doc_cls is Document def test__import_class_raise_if_not_known(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _import_class("UnknownClass") diff --git a/tests/test_connection.py b/tests/test_connection.py index 1519a835..c73b67d1 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -29,6 +29,7 @@ from mongoengine.connection import ( get_connection, get_db, ) +import pytest def get_tz_awareness(connection): @@ -54,15 +55,15 @@ class ConnectionTest(unittest.TestCase): connect("mongoenginetest") conn = get_connection() - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, "mongoenginetest") + assert isinstance(db, pymongo.database.Database) + assert db.name == "mongoenginetest" connect("mongoenginetest2", alias="testdb") conn = get_connection("testdb") - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) def test_connect_disconnect_works_properly(self): class History1(Document): @@ -82,31 +83,27 @@ class ConnectionTest(unittest.TestCase): h = History1(name="default").save() h1 = History2(name="db1").save() - self.assertEqual( - list(History1.objects().as_pymongo()), [{"_id": h.id, "name": "default"}] - ) - self.assertEqual( - list(History2.objects().as_pymongo()), [{"_id": h1.id, "name": "db1"}] - ) + assert list(History1.objects().as_pymongo()) == [ + {"_id": h.id, "name": "default"} + ] + assert list(History2.objects().as_pymongo()) == [{"_id": h1.id, "name": "db1"}] disconnect("db1") disconnect("db2") - with self.assertRaises(ConnectionFailure): + with pytest.raises(ConnectionFailure): list(History1.objects().as_pymongo()) - with self.assertRaises(ConnectionFailure): + with pytest.raises(ConnectionFailure): list(History2.objects().as_pymongo()) connect("db1", alias="db1") connect("db2", alias="db2") - self.assertEqual( - list(History1.objects().as_pymongo()), [{"_id": h.id, "name": "default"}] - ) - self.assertEqual( - list(History2.objects().as_pymongo()), [{"_id": h1.id, "name": "db1"}] - ) + assert list(History1.objects().as_pymongo()) == [ + {"_id": h.id, "name": "default"} + ] + assert list(History2.objects().as_pymongo()) == [{"_id": h1.id, "name": "db1"}] def test_connect_different_documents_to_different_database(self): class History(Document): @@ -132,39 +129,35 @@ class ConnectionTest(unittest.TestCase): h1 = History1(name="db1").save() h2 = History2(name="db2").save() - self.assertEqual(History._collection.database.name, DEFAULT_DATABASE_NAME) - self.assertEqual(History1._collection.database.name, "db1") - self.assertEqual(History2._collection.database.name, "db2") + assert History._collection.database.name == DEFAULT_DATABASE_NAME + assert History1._collection.database.name == "db1" + assert History2._collection.database.name == "db2" - self.assertEqual( - list(History.objects().as_pymongo()), [{"_id": h.id, "name": "default"}] - ) - self.assertEqual( - list(History1.objects().as_pymongo()), [{"_id": h1.id, "name": "db1"}] - ) - self.assertEqual( - list(History2.objects().as_pymongo()), [{"_id": h2.id, "name": "db2"}] - ) + assert list(History.objects().as_pymongo()) == [ + {"_id": h.id, "name": "default"} + ] + assert list(History1.objects().as_pymongo()) == [{"_id": h1.id, "name": "db1"}] + assert list(History2.objects().as_pymongo()) == [{"_id": h2.id, "name": "db2"}] def test_connect_fails_if_connect_2_times_with_default_alias(self): connect("mongoenginetest") - with self.assertRaises(ConnectionFailure) as ctx_err: + with pytest.raises(ConnectionFailure) as ctx_err: connect("mongoenginetest2") - self.assertEqual( - "A different connection with alias `default` was already registered. Use disconnect() first", - str(ctx_err.exception), + assert ( + "A different connection with alias `default` was already registered. Use disconnect() first" + == str(ctx_err.exception) ) def test_connect_fails_if_connect_2_times_with_custom_alias(self): connect("mongoenginetest", alias="alias1") - with self.assertRaises(ConnectionFailure) as ctx_err: + with pytest.raises(ConnectionFailure) as ctx_err: connect("mongoenginetest2", alias="alias1") - self.assertEqual( - "A different connection with alias `alias1` was already registered. Use disconnect() first", - str(ctx_err.exception), + assert ( + "A different connection with alias `alias1` was already registered. Use disconnect() first" + == str(ctx_err.exception) ) def test_connect_fails_if_similar_connection_settings_arent_defined_the_same_way( @@ -175,25 +168,25 @@ class ConnectionTest(unittest.TestCase): db_alias = "alias1" connect(db=db_name, alias=db_alias, host="localhost", port=27017) - with self.assertRaises(ConnectionFailure): + with pytest.raises(ConnectionFailure): connect(host="mongodb://localhost:27017/%s" % db_name, alias=db_alias) def test_connect_passes_silently_connect_multiple_times_with_same_config(self): # test default connection to `test` connect() connect() - self.assertEqual(len(mongoengine.connection._connections), 1) + assert len(mongoengine.connection._connections) == 1 connect("test01", alias="test01") connect("test01", alias="test01") - self.assertEqual(len(mongoengine.connection._connections), 2) + assert len(mongoengine.connection._connections) == 2 connect(host="mongodb://localhost:27017/mongoenginetest02", alias="test02") connect(host="mongodb://localhost:27017/mongoenginetest02", alias="test02") - self.assertEqual(len(mongoengine.connection._connections), 3) + assert len(mongoengine.connection._connections) == 3 def test_connect_with_invalid_db_name(self): """Ensure that connect() method fails fast if db name is invalid """ - with self.assertRaises(InvalidName): + with pytest.raises(InvalidName): connect("mongomock://localhost") def test_connect_with_db_name_external(self): @@ -203,20 +196,20 @@ class ConnectionTest(unittest.TestCase): connect("$external") conn = get_connection() - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, "$external") + assert isinstance(db, pymongo.database.Database) + assert db.name == "$external" connect("$external", alias="testdb") conn = get_connection("testdb") - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) def test_connect_with_invalid_db_name_type(self): """Ensure that connect() method fails fast if db name has invalid type """ - with self.assertRaises(TypeError): + with pytest.raises(TypeError): non_string_db_name = ["e. g. list instead of a string"] connect(non_string_db_name) @@ -230,11 +223,11 @@ class ConnectionTest(unittest.TestCase): connect("mongoenginetest", host="mongomock://localhost") conn = get_connection() - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) connect("mongoenginetest2", host="mongomock://localhost", alias="testdb2") conn = get_connection("testdb2") - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) connect( "mongoenginetest3", @@ -243,11 +236,11 @@ class ConnectionTest(unittest.TestCase): alias="testdb3", ) conn = get_connection("testdb3") - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) connect("mongoenginetest4", is_mock=True, alias="testdb4") conn = get_connection("testdb4") - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) connect( host="mongodb://localhost:27017/mongoenginetest5", @@ -255,11 +248,11 @@ class ConnectionTest(unittest.TestCase): alias="testdb5", ) conn = get_connection("testdb5") - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) connect(host="mongomock://localhost:27017/mongoenginetest6", alias="testdb6") conn = get_connection("testdb6") - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) connect( host="mongomock://localhost:27017/mongoenginetest7", @@ -267,7 +260,7 @@ class ConnectionTest(unittest.TestCase): alias="testdb7", ) conn = get_connection("testdb7") - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) def test_default_database_with_mocking(self): """Ensure that the default database is correctly set when using mongomock. @@ -286,8 +279,8 @@ class ConnectionTest(unittest.TestCase): some_document = SomeDocument() # database won't exist until we save a document some_document.save() - self.assertEqual(conn.get_default_database().name, "mongoenginetest") - self.assertEqual(conn.list_database_names()[0], "mongoenginetest") + assert conn.get_default_database().name == "mongoenginetest" + assert conn.database_names()[0] == "mongoenginetest" def test_connect_with_host_list(self): """Ensure that the connect() method works when host is a list @@ -301,22 +294,22 @@ class ConnectionTest(unittest.TestCase): connect(host=["mongomock://localhost"]) conn = get_connection() - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) connect(host=["mongodb://localhost"], is_mock=True, alias="testdb2") conn = get_connection("testdb2") - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) connect(host=["localhost"], is_mock=True, alias="testdb3") conn = get_connection("testdb3") - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) connect( host=["mongomock://localhost:27017", "mongomock://localhost:27018"], alias="testdb4", ) conn = get_connection("testdb4") - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) connect( host=["mongodb://localhost:27017", "mongodb://localhost:27018"], @@ -324,13 +317,13 @@ class ConnectionTest(unittest.TestCase): alias="testdb5", ) conn = get_connection("testdb5") - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) connect( host=["localhost:27017", "localhost:27018"], is_mock=True, alias="testdb6" ) conn = get_connection("testdb6") - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) def test_disconnect_cleans_globals(self): """Ensure that the disconnect() method cleans the globals objects""" @@ -340,20 +333,20 @@ class ConnectionTest(unittest.TestCase): connect("mongoenginetest") - self.assertEqual(len(connections), 1) - self.assertEqual(len(dbs), 0) - self.assertEqual(len(connection_settings), 1) + assert len(connections) == 1 + assert len(dbs) == 0 + assert len(connection_settings) == 1 class TestDoc(Document): pass TestDoc.drop_collection() # triggers the db - self.assertEqual(len(dbs), 1) + assert len(dbs) == 1 disconnect() - self.assertEqual(len(connections), 0) - self.assertEqual(len(dbs), 0) - self.assertEqual(len(connection_settings), 0) + assert len(connections) == 0 + assert len(dbs) == 0 + assert len(connection_settings) == 0 def test_disconnect_cleans_cached_collection_attribute_in_document(self): """Ensure that the disconnect() method works properly""" @@ -362,22 +355,20 @@ class ConnectionTest(unittest.TestCase): class History(Document): pass - self.assertIsNone(History._collection) + assert History._collection is None History.drop_collection() History.objects.first() # will trigger the caching of _collection attribute - self.assertIsNotNone(History._collection) + assert History._collection is not None disconnect() - self.assertIsNone(History._collection) + assert History._collection is None - with self.assertRaises(ConnectionFailure) as ctx_err: + with pytest.raises(ConnectionFailure) as ctx_err: History.objects.first() - self.assertEqual( - "You have not defined a default connection", str(ctx_err.exception) - ) + assert "You have not defined a default connection" == str(ctx_err.exception) def test_connect_disconnect_works_on_same_document(self): """Ensure that the connect/disconnect works properly with a single Document""" @@ -399,7 +390,7 @@ class ConnectionTest(unittest.TestCase): disconnect() # Make sure save doesnt work at this stage - with self.assertRaises(ConnectionFailure): + with pytest.raises(ConnectionFailure): User(name="Wont work").save() # Save in db2 @@ -408,13 +399,13 @@ class ConnectionTest(unittest.TestCase): disconnect() db1_users = list(client[db1].user.find()) - self.assertEqual(db1_users, [{"_id": user1.id, "name": "John is in db1"}]) + assert db1_users == [{"_id": user1.id, "name": "John is in db1"}] db2_users = list(client[db2].user.find()) - self.assertEqual(db2_users, [{"_id": user2.id, "name": "Bob is in db2"}]) + assert db2_users == [{"_id": user2.id, "name": "Bob is in db2"}] def test_disconnect_silently_pass_if_alias_does_not_exist(self): connections = mongoengine.connection._connections - self.assertEqual(len(connections), 0) + assert len(connections) == 0 disconnect(alias="not_exist") def test_disconnect_all(self): @@ -437,26 +428,26 @@ class ConnectionTest(unittest.TestCase): History1.drop_collection() History1.objects.first() - self.assertIsNotNone(History._collection) - self.assertIsNotNone(History1._collection) + assert History._collection is not None + assert History1._collection is not None - self.assertEqual(len(connections), 2) - self.assertEqual(len(dbs), 2) - self.assertEqual(len(connection_settings), 2) + assert len(connections) == 2 + assert len(dbs) == 2 + assert len(connection_settings) == 2 disconnect_all() - self.assertIsNone(History._collection) - self.assertIsNone(History1._collection) + assert History._collection is None + assert History1._collection is None - self.assertEqual(len(connections), 0) - self.assertEqual(len(dbs), 0) - self.assertEqual(len(connection_settings), 0) + assert len(connections) == 0 + assert len(dbs) == 0 + assert len(connection_settings) == 0 - with self.assertRaises(ConnectionFailure): + with pytest.raises(ConnectionFailure): History.objects.first() - with self.assertRaises(ConnectionFailure): + with pytest.raises(ConnectionFailure): History1.objects.first() def test_disconnect_all_silently_pass_if_no_connection_exist(self): @@ -473,7 +464,7 @@ class ConnectionTest(unittest.TestCase): expected_connection.server_info() - self.assertEqual(expected_connection, actual_connection) + assert expected_connection == actual_connection def test_connect_uri(self): """Ensure that the connect() method works properly with URIs.""" @@ -490,11 +481,11 @@ class ConnectionTest(unittest.TestCase): ) conn = get_connection() - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, "mongoenginetest") + assert isinstance(db, pymongo.database.Database) + assert db.name == "mongoenginetest" c.admin.system.users.delete_many({}) c.mongoenginetest.system.users.delete_many({}) @@ -506,11 +497,11 @@ class ConnectionTest(unittest.TestCase): connect("mongoenginetest", host="mongodb://localhost/") conn = get_connection() - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, "mongoenginetest") + assert isinstance(db, pymongo.database.Database) + assert db.name == "mongoenginetest" def test_connect_uri_default_db(self): """Ensure connect() defaults to the right database name if @@ -519,11 +510,11 @@ class ConnectionTest(unittest.TestCase): connect(host="mongodb://localhost/") conn = get_connection() - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, "test") + assert isinstance(db, pymongo.database.Database) + assert db.name == "test" def test_uri_without_credentials_doesnt_override_conn_settings(self): """Ensure connect() uses the username & password params if the URI @@ -536,7 +527,8 @@ class ConnectionTest(unittest.TestCase): # OperationFailure means that mongoengine attempted authentication # w/ the provided username/password and failed - that's the desired # behavior. If the MongoDB URI would override the credentials - self.assertRaises(OperationFailure, get_db) + with pytest.raises(OperationFailure): + get_db() def test_connect_uri_with_authsource(self): """Ensure that the connect() method works well with `authSource` @@ -554,7 +546,8 @@ class ConnectionTest(unittest.TestCase): alias="test1", host="mongodb://username2:password@localhost/mongoenginetest", ) - self.assertRaises(OperationFailure, test_conn.server_info) + with pytest.raises(OperationFailure): + test_conn.server_info() # Authentication succeeds with "authSource" authd_conn = connect( @@ -566,8 +559,8 @@ class ConnectionTest(unittest.TestCase): ), ) db = get_db("test2") - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, "mongoenginetest") + assert isinstance(db, pymongo.database.Database) + assert db.name == "mongoenginetest" # Clear all users authd_conn.admin.system.users.delete_many({}) @@ -577,13 +570,14 @@ class ConnectionTest(unittest.TestCase): """ register_connection("testdb", "mongoenginetest2") - self.assertRaises(ConnectionFailure, get_connection) + with pytest.raises(ConnectionFailure): + get_connection() conn = get_connection("testdb") - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db("testdb") - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, "mongoenginetest2") + assert isinstance(db, pymongo.database.Database) + assert db.name == "mongoenginetest2" def test_register_connection_defaults(self): """Ensure that defaults are used when the host and port are None. @@ -591,18 +585,18 @@ class ConnectionTest(unittest.TestCase): register_connection("testdb", "mongoenginetest", host=None, port=None) conn = get_connection("testdb") - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) def test_connection_kwargs(self): """Ensure that connection kwargs get passed to pymongo.""" connect("mongoenginetest", alias="t1", tz_aware=True) conn = get_connection("t1") - self.assertTrue(get_tz_awareness(conn)) + assert get_tz_awareness(conn) connect("mongoenginetest2", alias="t2") conn = get_connection("t2") - self.assertFalse(get_tz_awareness(conn)) + assert not get_tz_awareness(conn) def test_connection_pool_via_kwarg(self): """Ensure we can specify a max connection pool size using @@ -613,7 +607,7 @@ class ConnectionTest(unittest.TestCase): conn = connect( "mongoenginetest", alias="max_pool_size_via_kwarg", **pool_size_kwargs ) - self.assertEqual(conn.max_pool_size, 100) + assert conn.max_pool_size == 100 def test_connection_pool_via_uri(self): """Ensure we can specify a max connection pool size using @@ -623,7 +617,7 @@ class ConnectionTest(unittest.TestCase): host="mongodb://localhost/test?maxpoolsize=100", alias="max_pool_size_via_uri", ) - self.assertEqual(conn.max_pool_size, 100) + assert conn.max_pool_size == 100 def test_write_concern(self): """Ensure write concern can be specified in connect() via @@ -642,18 +636,18 @@ class ConnectionTest(unittest.TestCase): """ c = connect(host="mongodb://localhost/test?replicaSet=local-rs") db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, "test") + assert isinstance(db, pymongo.database.Database) + assert db.name == "test" def test_connect_with_replicaset_via_kwargs(self): """Ensure connect() works when specifying a replicaSet via the connection kwargs """ c = connect(replicaset="local-rs") - self.assertEqual(c._MongoClient__options.replica_set_name, "local-rs") + assert c._MongoClient__options.replica_set_name == "local-rs" db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, "test") + assert isinstance(db, pymongo.database.Database) + assert db.name == "test" def test_connect_tz_aware(self): connect("mongoenginetest", tz_aware=True) @@ -666,13 +660,13 @@ class ConnectionTest(unittest.TestCase): DateDoc(the_date=d).save() date_doc = DateDoc.objects.first() - self.assertEqual(d, date_doc.the_date) + assert d == date_doc.the_date def test_read_preference_from_parse(self): conn = connect( host="mongodb://a1.vpc,a2.vpc,a3.vpc/prod?readPreference=secondaryPreferred" ) - self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_PREFERRED) + assert conn.read_preference == ReadPreference.SECONDARY_PREFERRED def test_multiple_connection_settings(self): connect("mongoenginetest", alias="t1", host="localhost") @@ -680,27 +674,27 @@ class ConnectionTest(unittest.TestCase): connect("mongoenginetest2", alias="t2", host="127.0.0.1") mongo_connections = mongoengine.connection._connections - self.assertEqual(len(mongo_connections.items()), 2) - self.assertIn("t1", mongo_connections.keys()) - self.assertIn("t2", mongo_connections.keys()) + assert len(mongo_connections.items()) == 2 + assert "t1" in mongo_connections.keys() + assert "t2" in mongo_connections.keys() # Handle PyMongo 3+ Async Connection # Ensure we are connected, throws ServerSelectionTimeoutError otherwise. # Purposely not catching exception to fail test if thrown. mongo_connections["t1"].server_info() mongo_connections["t2"].server_info() - self.assertEqual(mongo_connections["t1"].address[0], "localhost") - self.assertEqual(mongo_connections["t2"].address[0], "127.0.0.1") + assert mongo_connections["t1"].address[0] == "localhost" + assert mongo_connections["t2"].address[0] == "127.0.0.1" def test_connect_2_databases_uses_same_client_if_only_dbname_differs(self): c1 = connect(alias="testdb1", db="testdb1") c2 = connect(alias="testdb2", db="testdb2") - self.assertIs(c1, c2) + assert c1 is c2 def test_connect_2_databases_uses_different_client_if_different_parameters(self): c1 = connect(alias="testdb1", db="testdb1", username="u1") c2 = connect(alias="testdb2", db="testdb2", username="u2") - self.assertIsNot(c1, c2) + assert c1 is not c2 if __name__ == "__main__": diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index 32e48a70..cf4dd100 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -10,6 +10,7 @@ from mongoengine.context_managers import ( switch_db, ) from mongoengine.pymongo_support import count_documents +import pytest class ContextManagersTest(unittest.TestCase): @@ -23,20 +24,20 @@ class ContextManagersTest(unittest.TestCase): Group.drop_collection() Group(name="hello - default").save() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() with switch_db(Group, "testdb-1") as Group: - self.assertEqual(0, Group.objects.count()) + assert 0 == Group.objects.count() Group(name="hello").save() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() Group.drop_collection() - self.assertEqual(0, Group.objects.count()) + assert 0 == Group.objects.count() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() def test_switch_collection_context_manager(self): connect("mongoenginetest") @@ -51,20 +52,20 @@ class ContextManagersTest(unittest.TestCase): Group.drop_collection() # drops in group1 Group(name="hello - group").save() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() with switch_collection(Group, "group1") as Group: - self.assertEqual(0, Group.objects.count()) + assert 0 == Group.objects.count() Group(name="hello - group1").save() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() Group.drop_collection() - self.assertEqual(0, Group.objects.count()) + assert 0 == Group.objects.count() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() def test_no_dereference_context_manager_object_id(self): """Ensure that DBRef items in ListFields aren't dereferenced. @@ -89,20 +90,20 @@ class ContextManagersTest(unittest.TestCase): Group(ref=user, members=User.objects, generic=user).save() with no_dereference(Group) as NoDeRefGroup: - self.assertTrue(Group._fields["members"]._auto_dereference) - self.assertFalse(NoDeRefGroup._fields["members"]._auto_dereference) + assert Group._fields["members"]._auto_dereference + assert not NoDeRefGroup._fields["members"]._auto_dereference with no_dereference(Group) as Group: group = Group.objects.first() for m in group.members: - self.assertNotIsInstance(m, User) - self.assertNotIsInstance(group.ref, User) - self.assertNotIsInstance(group.generic, User) + assert not isinstance(m, User) + assert not isinstance(group.ref, User) + assert not isinstance(group.generic, User) for m in group.members: - self.assertIsInstance(m, User) - self.assertIsInstance(group.ref, User) - self.assertIsInstance(group.generic, User) + assert isinstance(m, User) + assert isinstance(group.ref, User) + assert isinstance(group.generic, User) def test_no_dereference_context_manager_dbref(self): """Ensure that DBRef items in ListFields aren't dereferenced. @@ -127,18 +128,18 @@ class ContextManagersTest(unittest.TestCase): Group(ref=user, members=User.objects, generic=user).save() with no_dereference(Group) as NoDeRefGroup: - self.assertTrue(Group._fields["members"]._auto_dereference) - self.assertFalse(NoDeRefGroup._fields["members"]._auto_dereference) + assert Group._fields["members"]._auto_dereference + assert not NoDeRefGroup._fields["members"]._auto_dereference with no_dereference(Group) as Group: group = Group.objects.first() - self.assertTrue(all([not isinstance(m, User) for m in group.members])) - self.assertNotIsInstance(group.ref, User) - self.assertNotIsInstance(group.generic, User) + assert all([not isinstance(m, User) for m in group.members]) + assert not isinstance(group.ref, User) + assert not isinstance(group.generic, User) - self.assertTrue(all([isinstance(m, User) for m in group.members])) - self.assertIsInstance(group.ref, User) - self.assertIsInstance(group.generic, User) + assert all([isinstance(m, User) for m in group.members]) + assert isinstance(group.ref, User) + assert isinstance(group.generic, User) def test_no_sub_classes(self): class A(Document): @@ -159,32 +160,32 @@ class ContextManagersTest(unittest.TestCase): B(x=30).save() C(x=40).save() - self.assertEqual(A.objects.count(), 5) - self.assertEqual(B.objects.count(), 3) - self.assertEqual(C.objects.count(), 1) + assert A.objects.count() == 5 + assert B.objects.count() == 3 + assert C.objects.count() == 1 with no_sub_classes(A): - self.assertEqual(A.objects.count(), 2) + assert A.objects.count() == 2 for obj in A.objects: - self.assertEqual(obj.__class__, A) + assert obj.__class__ == A with no_sub_classes(B): - self.assertEqual(B.objects.count(), 2) + assert B.objects.count() == 2 for obj in B.objects: - self.assertEqual(obj.__class__, B) + assert obj.__class__ == B with no_sub_classes(C): - self.assertEqual(C.objects.count(), 1) + assert C.objects.count() == 1 for obj in C.objects: - self.assertEqual(obj.__class__, C) + assert obj.__class__ == C # Confirm context manager exit correctly - self.assertEqual(A.objects.count(), 5) - self.assertEqual(B.objects.count(), 3) - self.assertEqual(C.objects.count(), 1) + assert A.objects.count() == 5 + assert B.objects.count() == 3 + assert C.objects.count() == 1 def test_no_sub_classes_modification_to_document_class_are_temporary(self): class A(Document): @@ -194,27 +195,27 @@ class ContextManagersTest(unittest.TestCase): class B(A): z = IntField() - self.assertEqual(A._subclasses, ("A", "A.B")) + assert A._subclasses == ("A", "A.B") with no_sub_classes(A): - self.assertEqual(A._subclasses, ("A",)) - self.assertEqual(A._subclasses, ("A", "A.B")) + assert A._subclasses == ("A",) + assert A._subclasses == ("A", "A.B") - self.assertEqual(B._subclasses, ("A.B",)) + assert B._subclasses == ("A.B",) with no_sub_classes(B): - self.assertEqual(B._subclasses, ("A.B",)) - self.assertEqual(B._subclasses, ("A.B",)) + assert B._subclasses == ("A.B",) + assert B._subclasses == ("A.B",) def test_no_subclass_context_manager_does_not_swallow_exception(self): class User(Document): name = StringField() - with self.assertRaises(TypeError): + with pytest.raises(TypeError): with no_sub_classes(User): raise TypeError() def test_query_counter_does_not_swallow_exception(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): with query_counter() as q: raise TypeError() @@ -227,10 +228,10 @@ class ContextManagersTest(unittest.TestCase): try: NEW_LEVEL = 1 db.set_profiling_level(NEW_LEVEL) - self.assertEqual(db.profiling_level(), NEW_LEVEL) + assert db.profiling_level() == NEW_LEVEL with query_counter() as q: - self.assertEqual(db.profiling_level(), 2) - self.assertEqual(db.profiling_level(), NEW_LEVEL) + assert db.profiling_level() == 2 + assert db.profiling_level() == NEW_LEVEL except Exception: db.set_profiling_level( initial_profiling_level @@ -255,33 +256,31 @@ class ContextManagersTest(unittest.TestCase): counter = 0 with query_counter() as q: - self.assertEqual(q, counter) - self.assertEqual( - q, counter - ) # Ensures previous count query did not get counted + assert q == counter + assert q == counter # Ensures previous count query did not get counted for _ in range(10): issue_1_insert_query() counter += 1 - self.assertEqual(q, counter) + assert q == counter for _ in range(4): issue_1_find_query() counter += 1 - self.assertEqual(q, counter) + assert q == counter for _ in range(3): issue_1_count_query() counter += 1 - self.assertEqual(q, counter) + assert q == counter - self.assertEqual(int(q), counter) # test __int__ - self.assertEqual(repr(q), str(int(q))) # test __repr__ - self.assertGreater(q, -1) # test __gt__ - self.assertGreaterEqual(q, int(q)) # test __gte__ - self.assertNotEqual(q, -1) - self.assertLess(q, 1000) - self.assertLessEqual(q, int(q)) + assert int(q) == counter # test __int__ + assert repr(q) == str(int(q)) # test __repr__ + assert q > -1 # test __gt__ + assert q >= int(q) # test __gte__ + assert q != -1 + assert q < 1000 + assert q <= int(q) def test_query_counter_counts_getmore_queries(self): connect("mongoenginetest") @@ -296,9 +295,9 @@ class ContextManagersTest(unittest.TestCase): ) # first batch of documents contains 101 documents with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 list(collection.find()) - self.assertEqual(q, 2) # 1st select + 1 getmore + assert q == 2 # 1st select + 1 getmore def test_query_counter_ignores_particular_queries(self): connect("mongoenginetest") @@ -308,18 +307,18 @@ class ContextManagersTest(unittest.TestCase): collection.insert_many([{"test": "garbage %s" % i} for i in range(10)]) with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 cursor = collection.find() - self.assertEqual(q, 0) # cursor wasn't opened yet + assert q == 0 # cursor wasn't opened yet _ = next(cursor) # opens the cursor and fires the find query - self.assertEqual(q, 1) + assert q == 1 cursor.close() # issues a `killcursors` query that is ignored by the context - self.assertEqual(q, 1) + assert q == 1 _ = ( db.system.indexes.find_one() ) # queries on db.system.indexes are ignored as well - self.assertEqual(q, 1) + assert q == 1 if __name__ == "__main__": diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index ff7598be..3a6029c1 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -1,6 +1,8 @@ import unittest from six import iterkeys +import pytest + from mongoengine import Document from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict @@ -31,48 +33,48 @@ class TestBaseDict(unittest.TestCase): dict_items = {"k": "v"} doc = MyDoc() base_dict = BaseDict(dict_items, instance=doc, name="my_name") - self.assertIsInstance(base_dict._instance, Document) - self.assertEqual(base_dict._name, "my_name") - self.assertEqual(base_dict, dict_items) + assert isinstance(base_dict._instance, Document) + assert base_dict._name == "my_name" + assert base_dict == dict_items def test_setdefault_calls_mark_as_changed(self): base_dict = self._get_basedict({}) base_dict.setdefault("k", "v") - self.assertEqual(base_dict._instance._changed_fields, [base_dict._name]) + assert base_dict._instance._changed_fields == [base_dict._name] def test_popitems_calls_mark_as_changed(self): base_dict = self._get_basedict({"k": "v"}) - self.assertEqual(base_dict.popitem(), ("k", "v")) - self.assertEqual(base_dict._instance._changed_fields, [base_dict._name]) - self.assertFalse(base_dict) + assert base_dict.popitem() == ("k", "v") + assert base_dict._instance._changed_fields == [base_dict._name] + assert not base_dict def test_pop_calls_mark_as_changed(self): base_dict = self._get_basedict({"k": "v"}) - self.assertEqual(base_dict.pop("k"), "v") - self.assertEqual(base_dict._instance._changed_fields, [base_dict._name]) - self.assertFalse(base_dict) + assert base_dict.pop("k") == "v" + assert base_dict._instance._changed_fields == [base_dict._name] + assert not base_dict def test_pop_calls_does_not_mark_as_changed_when_it_fails(self): base_dict = self._get_basedict({"k": "v"}) - with self.assertRaises(KeyError): + with pytest.raises(KeyError): base_dict.pop("X") - self.assertFalse(base_dict._instance._changed_fields) + assert not base_dict._instance._changed_fields def test_clear_calls_mark_as_changed(self): base_dict = self._get_basedict({"k": "v"}) base_dict.clear() - self.assertEqual(base_dict._instance._changed_fields, ["my_name"]) - self.assertEqual(base_dict, {}) + assert base_dict._instance._changed_fields == ["my_name"] + assert base_dict == {} def test___delitem___calls_mark_as_changed(self): base_dict = self._get_basedict({"k": "v"}) del base_dict["k"] - self.assertEqual(base_dict._instance._changed_fields, ["my_name.k"]) - self.assertEqual(base_dict, {}) + assert base_dict._instance._changed_fields == ["my_name.k"] + assert base_dict == {} def test___getitem____KeyError(self): base_dict = self._get_basedict({}) - with self.assertRaises(KeyError): + with pytest.raises(KeyError): base_dict["new"] def test___getitem____simple_value(self): @@ -82,62 +84,62 @@ class TestBaseDict(unittest.TestCase): def test___getitem____sublist_gets_converted_to_BaseList(self): base_dict = self._get_basedict({"k": [0, 1, 2]}) sub_list = base_dict["k"] - self.assertEqual(sub_list, [0, 1, 2]) - self.assertIsInstance(sub_list, BaseList) - self.assertIs(sub_list._instance, base_dict._instance) - self.assertEqual(sub_list._name, "my_name.k") - self.assertEqual(base_dict._instance._changed_fields, []) + assert sub_list == [0, 1, 2] + assert isinstance(sub_list, BaseList) + assert sub_list._instance is base_dict._instance + assert sub_list._name == "my_name.k" + assert base_dict._instance._changed_fields == [] # Challenge mark_as_changed from sublist sub_list[1] = None - self.assertEqual(base_dict._instance._changed_fields, ["my_name.k.1"]) + assert base_dict._instance._changed_fields == ["my_name.k.1"] def test___getitem____subdict_gets_converted_to_BaseDict(self): base_dict = self._get_basedict({"k": {"subk": "subv"}}) sub_dict = base_dict["k"] - self.assertEqual(sub_dict, {"subk": "subv"}) - self.assertIsInstance(sub_dict, BaseDict) - self.assertIs(sub_dict._instance, base_dict._instance) - self.assertEqual(sub_dict._name, "my_name.k") - self.assertEqual(base_dict._instance._changed_fields, []) + assert sub_dict == {"subk": "subv"} + assert isinstance(sub_dict, BaseDict) + assert sub_dict._instance is base_dict._instance + assert sub_dict._name == "my_name.k" + assert base_dict._instance._changed_fields == [] # Challenge mark_as_changed from subdict sub_dict["subk"] = None - self.assertEqual(base_dict._instance._changed_fields, ["my_name.k.subk"]) + assert base_dict._instance._changed_fields == ["my_name.k.subk"] def test_get_sublist_gets_converted_to_BaseList_just_like__getitem__(self): base_dict = self._get_basedict({"k": [0, 1, 2]}) sub_list = base_dict.get("k") - self.assertEqual(sub_list, [0, 1, 2]) - self.assertIsInstance(sub_list, BaseList) + assert sub_list == [0, 1, 2] + assert isinstance(sub_list, BaseList) def test_get_returns_the_same_as___getitem__(self): base_dict = self._get_basedict({"k": [0, 1, 2]}) get_ = base_dict.get("k") getitem_ = base_dict["k"] - self.assertEqual(get_, getitem_) + assert get_ == getitem_ def test_get_default(self): base_dict = self._get_basedict({}) sentinel = object() - self.assertEqual(base_dict.get("new"), None) - self.assertIs(base_dict.get("new", sentinel), sentinel) + assert base_dict.get("new") == None + assert base_dict.get("new", sentinel) is sentinel def test___setitem___calls_mark_as_changed(self): base_dict = self._get_basedict({}) base_dict["k"] = "v" - self.assertEqual(base_dict._instance._changed_fields, ["my_name.k"]) - self.assertEqual(base_dict, {"k": "v"}) + assert base_dict._instance._changed_fields == ["my_name.k"] + assert base_dict == {"k": "v"} def test_update_calls_mark_as_changed(self): base_dict = self._get_basedict({}) base_dict.update({"k": "v"}) - self.assertEqual(base_dict._instance._changed_fields, ["my_name"]) + assert base_dict._instance._changed_fields == ["my_name"] def test___setattr____not_tracked_by_changes(self): base_dict = self._get_basedict({}) base_dict.a_new_attr = "test" - self.assertEqual(base_dict._instance._changed_fields, []) + assert base_dict._instance._changed_fields == [] def test___delattr____tracked_by_changes(self): # This is probably a bug as __setattr__ is not tracked @@ -146,7 +148,7 @@ class TestBaseDict(unittest.TestCase): base_dict = self._get_basedict({}) base_dict.a_new_attr = "test" del base_dict.a_new_attr - self.assertEqual(base_dict._instance._changed_fields, ["my_name.a_new_attr"]) + assert base_dict._instance._changed_fields == ["my_name.a_new_attr"] class TestBaseList(unittest.TestCase): @@ -167,14 +169,14 @@ class TestBaseList(unittest.TestCase): list_items = [True] doc = MyDoc() base_list = BaseList(list_items, instance=doc, name="my_name") - self.assertIsInstance(base_list._instance, Document) - self.assertEqual(base_list._name, "my_name") - self.assertEqual(base_list, list_items) + assert isinstance(base_list._instance, Document) + assert base_list._name == "my_name" + assert base_list == list_items def test___iter__(self): values = [True, False, True, False] base_list = BaseList(values, instance=None, name="my_name") - self.assertEqual(values, list(base_list)) + assert values == list(base_list) def test___iter___allow_modification_while_iterating_withou_error(self): # regular list allows for this, thus this subclass must comply to that @@ -185,9 +187,9 @@ class TestBaseList(unittest.TestCase): def test_append_calls_mark_as_changed(self): base_list = self._get_baselist([]) - self.assertFalse(base_list._instance._changed_fields) + assert not base_list._instance._changed_fields base_list.append(True) - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) + assert base_list._instance._changed_fields == ["my_name"] def test_subclass_append(self): # Due to the way mark_as_changed_wrapper is implemented @@ -200,85 +202,85 @@ class TestBaseList(unittest.TestCase): def test___getitem__using_simple_index(self): base_list = self._get_baselist([0, 1, 2]) - self.assertEqual(base_list[0], 0) - self.assertEqual(base_list[1], 1) - self.assertEqual(base_list[-1], 2) + assert base_list[0] == 0 + assert base_list[1] == 1 + assert base_list[-1] == 2 def test___getitem__using_slice(self): base_list = self._get_baselist([0, 1, 2]) - self.assertEqual(base_list[1:3], [1, 2]) - self.assertEqual(base_list[0:3:2], [0, 2]) + assert base_list[1:3] == [1, 2] + assert base_list[0:3:2] == [0, 2] def test___getitem___using_slice_returns_list(self): # Bug: using slice does not properly handles the instance # and mark_as_changed behaviour. base_list = self._get_baselist([0, 1, 2]) sliced = base_list[1:3] - self.assertEqual(sliced, [1, 2]) - self.assertIsInstance(sliced, list) - self.assertEqual(base_list._instance._changed_fields, []) + assert sliced == [1, 2] + assert isinstance(sliced, list) + assert base_list._instance._changed_fields == [] def test___getitem__sublist_returns_BaseList_bound_to_instance(self): base_list = self._get_baselist([[1, 2], [3, 4]]) sub_list = base_list[0] - self.assertEqual(sub_list, [1, 2]) - self.assertIsInstance(sub_list, BaseList) - self.assertIs(sub_list._instance, base_list._instance) - self.assertEqual(sub_list._name, "my_name.0") - self.assertEqual(base_list._instance._changed_fields, []) + assert sub_list == [1, 2] + assert isinstance(sub_list, BaseList) + assert sub_list._instance is base_list._instance + assert sub_list._name == "my_name.0" + assert base_list._instance._changed_fields == [] # Challenge mark_as_changed from sublist sub_list[1] = None - self.assertEqual(base_list._instance._changed_fields, ["my_name.0.1"]) + assert base_list._instance._changed_fields == ["my_name.0.1"] def test___getitem__subdict_returns_BaseList_bound_to_instance(self): base_list = self._get_baselist([{"subk": "subv"}]) sub_dict = base_list[0] - self.assertEqual(sub_dict, {"subk": "subv"}) - self.assertIsInstance(sub_dict, BaseDict) - self.assertIs(sub_dict._instance, base_list._instance) - self.assertEqual(sub_dict._name, "my_name.0") - self.assertEqual(base_list._instance._changed_fields, []) + assert sub_dict == {"subk": "subv"} + assert isinstance(sub_dict, BaseDict) + assert sub_dict._instance is base_list._instance + assert sub_dict._name == "my_name.0" + assert base_list._instance._changed_fields == [] # Challenge mark_as_changed from subdict sub_dict["subk"] = None - self.assertEqual(base_list._instance._changed_fields, ["my_name.0.subk"]) + assert base_list._instance._changed_fields == ["my_name.0.subk"] def test_extend_calls_mark_as_changed(self): base_list = self._get_baselist([]) base_list.extend([True]) - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) + assert base_list._instance._changed_fields == ["my_name"] def test_insert_calls_mark_as_changed(self): base_list = self._get_baselist([]) base_list.insert(0, True) - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) + assert base_list._instance._changed_fields == ["my_name"] def test_remove_calls_mark_as_changed(self): base_list = self._get_baselist([True]) base_list.remove(True) - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) + assert base_list._instance._changed_fields == ["my_name"] def test_remove_not_mark_as_changed_when_it_fails(self): base_list = self._get_baselist([True]) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): base_list.remove(False) - self.assertFalse(base_list._instance._changed_fields) + assert not base_list._instance._changed_fields def test_pop_calls_mark_as_changed(self): base_list = self._get_baselist([True]) base_list.pop() - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) + assert base_list._instance._changed_fields == ["my_name"] def test_reverse_calls_mark_as_changed(self): base_list = self._get_baselist([True, False]) base_list.reverse() - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) + assert base_list._instance._changed_fields == ["my_name"] def test___delitem___calls_mark_as_changed(self): base_list = self._get_baselist([True]) del base_list[0] - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) + assert base_list._instance._changed_fields == ["my_name"] def test___setitem___calls_with_full_slice_mark_as_changed(self): base_list = self._get_baselist([]) @@ -286,8 +288,8 @@ class TestBaseList(unittest.TestCase): 0, 1, ] # Will use __setslice__ under py2 and __setitem__ under py3 - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) - self.assertEqual(base_list, [0, 1]) + assert base_list._instance._changed_fields == ["my_name"] + assert base_list == [0, 1] def test___setitem___calls_with_partial_slice_mark_as_changed(self): base_list = self._get_baselist([0, 1, 2]) @@ -295,66 +297,66 @@ class TestBaseList(unittest.TestCase): 1, 0, ] # Will use __setslice__ under py2 and __setitem__ under py3 - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) - self.assertEqual(base_list, [1, 0, 2]) + assert base_list._instance._changed_fields == ["my_name"] + assert base_list == [1, 0, 2] def test___setitem___calls_with_step_slice_mark_as_changed(self): base_list = self._get_baselist([0, 1, 2]) base_list[0:3:2] = [-1, -2] # uses __setitem__ in both py2 & 3 - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) - self.assertEqual(base_list, [-1, 1, -2]) + assert base_list._instance._changed_fields == ["my_name"] + assert base_list == [-1, 1, -2] def test___setitem___with_slice(self): base_list = self._get_baselist([0, 1, 2, 3, 4, 5]) base_list[0:6:2] = [None, None, None] - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) - self.assertEqual(base_list, [None, 1, None, 3, None, 5]) + assert base_list._instance._changed_fields == ["my_name"] + assert base_list == [None, 1, None, 3, None, 5] def test___setitem___item_0_calls_mark_as_changed(self): base_list = self._get_baselist([True]) base_list[0] = False - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) - self.assertEqual(base_list, [False]) + assert base_list._instance._changed_fields == ["my_name"] + assert base_list == [False] def test___setitem___item_1_calls_mark_as_changed(self): base_list = self._get_baselist([True, True]) base_list[1] = False - self.assertEqual(base_list._instance._changed_fields, ["my_name.1"]) - self.assertEqual(base_list, [True, False]) + assert base_list._instance._changed_fields == ["my_name.1"] + assert base_list == [True, False] def test___delslice___calls_mark_as_changed(self): base_list = self._get_baselist([0, 1]) del base_list[0:1] - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) - self.assertEqual(base_list, [1]) + assert base_list._instance._changed_fields == ["my_name"] + assert base_list == [1] def test___iadd___calls_mark_as_changed(self): base_list = self._get_baselist([True]) base_list += [False] - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) + assert base_list._instance._changed_fields == ["my_name"] def test___imul___calls_mark_as_changed(self): base_list = self._get_baselist([True]) - self.assertEqual(base_list._instance._changed_fields, []) + assert base_list._instance._changed_fields == [] base_list *= 2 - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) + assert base_list._instance._changed_fields == ["my_name"] def test_sort_calls_not_marked_as_changed_when_it_fails(self): base_list = self._get_baselist([True]) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): base_list.sort(key=1) - self.assertEqual(base_list._instance._changed_fields, []) + assert base_list._instance._changed_fields == [] def test_sort_calls_mark_as_changed(self): base_list = self._get_baselist([True, False]) base_list.sort() - self.assertEqual(base_list._instance._changed_fields, ["my_name"]) + assert base_list._instance._changed_fields == ["my_name"] def test_sort_calls_with_key(self): base_list = self._get_baselist([1, 2, 11]) base_list.sort(key=lambda i: str(i)) - self.assertEqual(base_list, [1, 11, 2]) + assert base_list == [1, 11, 2] class TestStrictDict(unittest.TestCase): @@ -366,32 +368,32 @@ class TestStrictDict(unittest.TestCase): def test_init(self): d = self.dtype(a=1, b=1, c=1) - self.assertEqual((d.a, d.b, d.c), (1, 1, 1)) + assert (d.a, d.b, d.c) == (1, 1, 1) def test_iterkeys(self): d = self.dtype(a=1) - self.assertEqual(list(iterkeys(d)), ["a"]) + assert list(iterkeys(d)) == ["a"] def test_len(self): d = self.dtype(a=1) - self.assertEqual(len(d), 1) + assert len(d) == 1 def test_pop(self): d = self.dtype(a=1) - self.assertIn("a", d) + assert "a" in d d.pop("a") - self.assertNotIn("a", d) + assert "a" not in d def test_repr(self): d = self.dtype(a=1, b=2, c=3) - self.assertEqual(repr(d), '{"a": 1, "b": 2, "c": 3}') + assert repr(d) == '{"a": 1, "b": 2, "c": 3}' # make sure quotes are escaped properly d = self.dtype(a='"', b="'", c="") - self.assertEqual(repr(d), '{"a": \'"\', "b": "\'", "c": \'\'}') + assert repr(d) == '{"a": \'"\', "b": "\'", "c": \'\'}' def test_init_fails_on_nonexisting_attrs(self): - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): self.dtype(a=1, b=2, d=3) def test_eq(self): @@ -403,45 +405,46 @@ class TestStrictDict(unittest.TestCase): h = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=1) i = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=2) - self.assertEqual(d, dd) - self.assertNotEqual(d, e) - self.assertNotEqual(d, f) - self.assertNotEqual(d, g) - self.assertNotEqual(f, d) - self.assertEqual(d, h) - self.assertNotEqual(d, i) + assert d == dd + assert d != e + assert d != f + assert d != g + assert f != d + assert d == h + assert d != i def test_setattr_getattr(self): d = self.dtype() d.a = 1 - self.assertEqual(d.a, 1) - self.assertRaises(AttributeError, getattr, d, "b") + assert d.a == 1 + with pytest.raises(AttributeError): + getattr(d, "b") def test_setattr_raises_on_nonexisting_attr(self): d = self.dtype() - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): d.x = 1 def test_setattr_getattr_special(self): d = self.strict_dict_class(["items"]) d.items = 1 - self.assertEqual(d.items, 1) + assert d.items == 1 def test_get(self): d = self.dtype(a=1) - self.assertEqual(d.get("a"), 1) - self.assertEqual(d.get("b", "bla"), "bla") + assert d.get("a") == 1 + assert d.get("b", "bla") == "bla" def test_items(self): d = self.dtype(a=1) - self.assertEqual(d.items(), [("a", 1)]) + assert d.items() == [("a", 1)] d = self.dtype(a=1, b=2) - self.assertEqual(d.items(), [("a", 1), ("b", 2)]) + assert d.items() == [("a", 1), ("b", 2)] def test_mappings_protocol(self): d = self.dtype(a=1, b=2) - self.assertEqual(dict(d), {"a": 1, "b": 2}) - self.assertEqual(dict(**d), {"a": 1, "b": 2}) + assert dict(d) == {"a": 1, "b": 2} + assert dict(**d) == {"a": 1, "b": 2} if __name__ == "__main__": diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 4730e2e3..b9d92883 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -42,37 +42,37 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 len(group_obj._data["members"]) - self.assertEqual(q, 1) + assert q == 1 len(group_obj.members) - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 2) + assert q == 2 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 User.drop_collection() Group.drop_collection() @@ -99,40 +99,40 @@ class FieldTest(unittest.TestCase): group.reload() # Confirm reload works with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 2) - self.assertTrue(group_obj._data["members"]._dereferenced) + assert q == 2 + assert group_obj._data["members"]._dereferenced # verifies that no additional queries gets executed # if we re-iterate over the ListField once it is # dereferenced [m for m in group_obj.members] - self.assertEqual(q, 2) - self.assertTrue(group_obj._data["members"]._dereferenced) + assert q == 2 + assert group_obj._data["members"]._dereferenced # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 2) + assert q == 2 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 def test_list_item_dereference_orphan_dbref(self): """Ensure that orphan DBRef items in ListFields are dereferenced. @@ -159,21 +159,21 @@ class FieldTest(unittest.TestCase): # Group.members list is an orphan DBRef User.objects[0].delete() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 2) - self.assertTrue(group_obj._data["members"]._dereferenced) + assert q == 2 + assert group_obj._data["members"]._dereferenced # verifies that no additional queries gets executed # if we re-iterate over the ListField once it is # dereferenced [m for m in group_obj.members] - self.assertEqual(q, 2) - self.assertTrue(group_obj._data["members"]._dereferenced) + assert q == 2 + assert group_obj._data["members"]._dereferenced User.drop_collection() Group.drop_collection() @@ -197,8 +197,8 @@ class FieldTest(unittest.TestCase): Group(members=User.objects).save() group = Group.objects.first() - self.assertEqual(Group._get_collection().find_one()["members"], [1]) - self.assertEqual(group.members, [user]) + assert Group._get_collection().find_one()["members"] == [1] + assert group.members == [user] def test_handle_old_style_references(self): """Ensure that DBRef items in ListFields are dereferenced. @@ -231,8 +231,8 @@ class FieldTest(unittest.TestCase): group.save() group = Group.objects.first() - self.assertEqual(group.members[0].name, "user 1") - self.assertEqual(group.members[-1].name, "String!") + assert group.members[0].name == "user 1" + assert group.members[-1].name == "String!" def test_migrate_references(self): """Example of migrating ReferenceField storage @@ -253,12 +253,12 @@ class FieldTest(unittest.TestCase): group = Group(author=user, members=[user]).save() raw_data = Group._get_collection().find_one() - self.assertIsInstance(raw_data["author"], DBRef) - self.assertIsInstance(raw_data["members"][0], DBRef) + assert isinstance(raw_data["author"], DBRef) + assert isinstance(raw_data["members"][0], DBRef) group = Group.objects.first() - self.assertEqual(group.author, user) - self.assertEqual(group.members, [user]) + assert group.author == user + assert group.members == [user] # Migrate the model definition class Group(Document): @@ -273,12 +273,12 @@ class FieldTest(unittest.TestCase): g.save() group = Group.objects.first() - self.assertEqual(group.author, user) - self.assertEqual(group.members, [user]) + assert group.author == user + assert group.members == [user] raw_data = Group._get_collection().find_one() - self.assertIsInstance(raw_data["author"], ObjectId) - self.assertIsInstance(raw_data["members"][0], ObjectId) + assert isinstance(raw_data["author"], ObjectId) + assert isinstance(raw_data["members"][0], ObjectId) def test_recursive_reference(self): """Ensure that ReferenceFields can reference their own documents. @@ -309,43 +309,43 @@ class FieldTest(unittest.TestCase): Employee(name="Funky Gibbon", boss=bill, friends=friends).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 peter = Employee.objects.with_id(peter.id) - self.assertEqual(q, 1) + assert q == 1 peter.boss - self.assertEqual(q, 2) + assert q == 2 peter.friends - self.assertEqual(q, 3) + assert q == 3 # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 peter = Employee.objects.with_id(peter.id).select_related() - self.assertEqual(q, 2) + assert q == 2 - self.assertEqual(peter.boss, bill) - self.assertEqual(q, 2) + assert peter.boss == bill + assert q == 2 - self.assertEqual(peter.friends, friends) - self.assertEqual(q, 2) + assert peter.friends == friends + assert q == 2 # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 employees = Employee.objects(boss=bill).select_related() - self.assertEqual(q, 2) + assert q == 2 for employee in employees: - self.assertEqual(employee.boss, bill) - self.assertEqual(q, 2) + assert employee.boss == bill + assert q == 2 - self.assertEqual(employee.friends, friends) - self.assertEqual(q, 2) + assert employee.friends == friends + assert q == 2 def test_list_of_lists_of_references(self): class User(Document): @@ -366,10 +366,10 @@ class FieldTest(unittest.TestCase): u3 = User.objects.create(name="u3") SimpleList.objects.create(users=[u1, u2, u3]) - self.assertEqual(SimpleList.objects.all()[0].users, [u1, u2, u3]) + assert SimpleList.objects.all()[0].users == [u1, u2, u3] Post.objects.create(user_lists=[[u1, u2], [u3]]) - self.assertEqual(Post.objects.all()[0].user_lists, [[u1, u2], [u3]]) + assert Post.objects.all()[0].user_lists == [[u1, u2], [u3]] def test_circular_reference(self): """Ensure you can handle circular references @@ -403,9 +403,7 @@ class FieldTest(unittest.TestCase): daughter.relations.append(self_rel) daughter.save() - self.assertEqual( - "[, ]", "%s" % Person.objects() - ) + assert "[, ]" == "%s" % Person.objects() def test_circular_reference_on_self(self): """Ensure you can handle circular references @@ -432,9 +430,7 @@ class FieldTest(unittest.TestCase): daughter.relations.append(daughter) daughter.save() - self.assertEqual( - "[, ]", "%s" % Person.objects() - ) + assert "[, ]" == "%s" % Person.objects() def test_circular_tree_reference(self): """Ensure you can handle circular references with more than one level @@ -473,9 +469,9 @@ class FieldTest(unittest.TestCase): anna.other.name = "Anna's friends" anna.save() - self.assertEqual( - "[, , , ]", - "%s" % Person.objects(), + assert ( + "[, , , ]" + == "%s" % Person.objects() ) def test_generic_reference(self): @@ -516,52 +512,52 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for m in group_obj.members: - self.assertIn("User", m.__class__.__name__) + assert "User" in m.__class__.__name__ # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for m in group_obj.members: - self.assertIn("User", m.__class__.__name__) + assert "User" in m.__class__.__name__ # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 4) + assert q == 4 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for m in group_obj.members: - self.assertIn("User", m.__class__.__name__) + assert "User" in m.__class__.__name__ def test_generic_reference_orphan_dbref(self): """Ensure that generic orphan DBRef items in ListFields are dereferenced. @@ -604,18 +600,18 @@ class FieldTest(unittest.TestCase): # an orphan DBRef in the GenericReference ListField UserA.objects[0].delete() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 4) - self.assertTrue(group_obj._data["members"]._dereferenced) + assert q == 4 + assert group_obj._data["members"]._dereferenced [m for m in group_obj.members] - self.assertEqual(q, 4) - self.assertTrue(group_obj._data["members"]._dereferenced) + assert q == 4 + assert group_obj._data["members"]._dereferenced UserA.drop_collection() UserB.drop_collection() @@ -660,52 +656,52 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for m in group_obj.members: - self.assertIn("User", m.__class__.__name__) + assert "User" in m.__class__.__name__ # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for m in group_obj.members: - self.assertIn("User", m.__class__.__name__) + assert "User" in m.__class__.__name__ # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 4) + assert q == 4 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for m in group_obj.members: - self.assertIn("User", m.__class__.__name__) + assert "User" in m.__class__.__name__ UserA.drop_collection() UserB.drop_collection() @@ -735,43 +731,43 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 for k, m in iteritems(group_obj.members): - self.assertIsInstance(m, User) + assert isinstance(m, User) # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 for k, m in iteritems(group_obj.members): - self.assertIsInstance(m, User) + assert isinstance(m, User) # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 2) + assert q == 2 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 for k, m in iteritems(group_obj.members): - self.assertIsInstance(m, User) + assert isinstance(m, User) User.drop_collection() Group.drop_collection() @@ -813,65 +809,65 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for k, m in iteritems(group_obj.members): - self.assertIn("User", m.__class__.__name__) + assert "User" in m.__class__.__name__ # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for k, m in iteritems(group_obj.members): - self.assertIn("User", m.__class__.__name__) + assert "User" in m.__class__.__name__ # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 4) + assert q == 4 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for k, m in iteritems(group_obj.members): - self.assertIn("User", m.__class__.__name__) + assert "User" in m.__class__.__name__ Group.objects.delete() Group().save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 1) - self.assertEqual(group_obj.members, {}) + assert q == 1 + assert group_obj.members == {} UserA.drop_collection() UserB.drop_collection() @@ -903,52 +899,52 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 for k, m in iteritems(group_obj.members): - self.assertIsInstance(m, UserA) + assert isinstance(m, UserA) # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 for k, m in iteritems(group_obj.members): - self.assertIsInstance(m, UserA) + assert isinstance(m, UserA) # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 2) + assert q == 2 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 for k, m in iteritems(group_obj.members): - self.assertIsInstance(m, UserA) + assert isinstance(m, UserA) UserA.drop_collection() Group.drop_collection() @@ -990,64 +986,64 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for k, m in iteritems(group_obj.members): - self.assertIn("User", m.__class__.__name__) + assert "User" in m.__class__.__name__ # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for k, m in iteritems(group_obj.members): - self.assertIn("User", m.__class__.__name__) + assert "User" in m.__class__.__name__ # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 4) + assert q == 4 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for k, m in iteritems(group_obj.members): - self.assertIn("User", m.__class__.__name__) + assert "User" in m.__class__.__name__ Group.objects.delete() Group().save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 1) + assert q == 1 UserA.drop_collection() UserB.drop_collection() @@ -1075,8 +1071,8 @@ class FieldTest(unittest.TestCase): root.save() root = root.reload() - self.assertEqual(root.children, [company]) - self.assertEqual(company.parents, [root]) + assert root.children == [company] + assert company.parents == [root] def test_dict_in_dbref_instance(self): class Person(Document): @@ -1102,8 +1098,8 @@ class FieldTest(unittest.TestCase): room_101.save() room = Room.objects.first().select_related() - self.assertEqual(room.staffs_with_position[0]["staff"], sarah) - self.assertEqual(room.staffs_with_position[1]["staff"], bob) + assert room.staffs_with_position[0]["staff"] == sarah + assert room.staffs_with_position[1]["staff"] == bob def test_document_reload_no_inheritance(self): class Foo(Document): @@ -1133,8 +1129,8 @@ class FieldTest(unittest.TestCase): foo.save() foo.reload() - self.assertEqual(type(foo.bar), Bar) - self.assertEqual(type(foo.baz), Baz) + assert type(foo.bar) == Bar + assert type(foo.baz) == Baz def test_document_reload_reference_integrity(self): """ @@ -1166,13 +1162,13 @@ class FieldTest(unittest.TestCase): concurrent_change_user = User.objects.get(id=1) concurrent_change_user.name = "new-name" concurrent_change_user.save() - self.assertNotEqual(user.name, "new-name") + assert user.name != "new-name" msg = Message.objects.get(id=1) msg.reload() - self.assertEqual(msg.topic, topic) - self.assertEqual(msg.author, user) - self.assertEqual(msg.author.name, "new-name") + assert msg.topic == topic + assert msg.author == user + assert msg.author.name == "new-name" def test_list_lookup_not_checked_in_map(self): """Ensure we dereference list data correctly @@ -1194,8 +1190,8 @@ class FieldTest(unittest.TestCase): Message(id=1, comments=[c1, c2]).save() msg = Message.objects.get(id=1) - self.assertEqual(0, msg.comments[0].id) - self.assertEqual(1, msg.comments[1].id) + assert 0 == msg.comments[0].id + assert 1 == msg.comments[1].id def test_list_item_dereference_dref_false_save_doesnt_cause_extra_queries(self): """Ensure that DBRef items in ListFields are dereferenced. @@ -1217,15 +1213,15 @@ class FieldTest(unittest.TestCase): Group(name="Test", members=User.objects).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 group_obj.name = "new test" group_obj.save() - self.assertEqual(q, 2) + assert q == 2 def test_list_item_dereference_dref_true_save_doesnt_cause_extra_queries(self): """Ensure that DBRef items in ListFields are dereferenced. @@ -1247,15 +1243,15 @@ class FieldTest(unittest.TestCase): Group(name="Test", members=User.objects).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 group_obj.name = "new test" group_obj.save() - self.assertEqual(q, 2) + assert q == 2 def test_generic_reference_save_doesnt_cause_extra_queries(self): class UserA(Document): @@ -1287,15 +1283,15 @@ class FieldTest(unittest.TestCase): Group(name="test", members=members).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 group_obj.name = "new test" group_obj.save() - self.assertEqual(q, 2) + assert q == 2 def test_objectid_reference_across_databases(self): # mongoenginetest - Is default connection alias from setUp() @@ -1319,10 +1315,10 @@ class FieldTest(unittest.TestCase): # Can't use query_counter across databases - so test the _data object book = Book.objects.first() - self.assertNotIsInstance(book._data["author"], User) + assert not isinstance(book._data["author"], User) book.select_related() - self.assertIsInstance(book._data["author"], User) + assert isinstance(book._data["author"], User) def test_non_ascii_pk(self): """ @@ -1346,7 +1342,7 @@ class FieldTest(unittest.TestCase): BrandGroup(title="top_brands", brands=[brand1, brand2]).save() brand_groups = BrandGroup.objects().all() - self.assertEqual(2, len([brand for bg in brand_groups for brand in bg.brands])) + assert 2 == len([brand for bg in brand_groups for brand in bg.brands]) def test_dereferencing_embedded_listfield_referencefield(self): class Tag(Document): @@ -1370,7 +1366,7 @@ class FieldTest(unittest.TestCase): Page(tags=[tag], posts=[post]).save() page = Page.objects.first() - self.assertEqual(page.tags[0], page.posts[0].tags[0]) + assert page.tags[0] == page.posts[0].tags[0] def test_select_related_follows_embedded_referencefields(self): class Song(Document): @@ -1390,12 +1386,12 @@ class FieldTest(unittest.TestCase): playlist = Playlist.objects.create(items=items) with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 playlist = Playlist.objects.first().select_related() songs = [item.song for item in playlist.items] - self.assertEqual(q, 2) + assert q == 2 if __name__ == "__main__": diff --git a/tests/test_replicaset_connection.py b/tests/test_replicaset_connection.py index e92f3d09..c1ea407c 100644 --- a/tests/test_replicaset_connection.py +++ b/tests/test_replicaset_connection.py @@ -39,7 +39,7 @@ class ConnectionTest(unittest.TestCase): # really??? return - self.assertEqual(conn.read_preference, READ_PREF) + assert conn.read_preference == READ_PREF if __name__ == "__main__": diff --git a/tests/test_signals.py b/tests/test_signals.py index 1d0607d7..b217712b 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -245,7 +245,7 @@ class SignalTests(unittest.TestCase): # Note that there is a chance that the following assert fails in case # some receivers (eventually created in other tests) # gets garbage collected (https://pythonhosted.org/blinker/#blinker.base.Signal.connect) - self.assertEqual(self.pre_signals, post_signals) + assert self.pre_signals == post_signals def test_model_signals(self): """ Model saves should throw some signals. """ @@ -267,97 +267,76 @@ class SignalTests(unittest.TestCase): self.get_signal_output(lambda: None) # eliminate signal output a1 = self.Author.objects(name="Bill Shakespeare")[0] - self.assertEqual( - self.get_signal_output(create_author), - [ - "pre_init signal, Author", - {"name": "Bill Shakespeare"}, - "post_init signal, Bill Shakespeare, document._created = True", - ], - ) + assert self.get_signal_output(create_author) == [ + "pre_init signal, Author", + {"name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = True", + ] a1 = self.Author(name="Bill Shakespeare") - self.assertEqual( - self.get_signal_output(a1.save), - [ - "pre_save signal, Bill Shakespeare", - {}, - "pre_save_post_validation signal, Bill Shakespeare", - "Is created", - {}, - "post_save signal, Bill Shakespeare", - "post_save dirty keys, ['name']", - "Is created", - {}, - ], - ) + assert self.get_signal_output(a1.save) == [ + "pre_save signal, Bill Shakespeare", + {}, + "pre_save_post_validation signal, Bill Shakespeare", + "Is created", + {}, + "post_save signal, Bill Shakespeare", + "post_save dirty keys, ['name']", + "Is created", + {}, + ] a1.reload() a1.name = "William Shakespeare" - self.assertEqual( - self.get_signal_output(a1.save), - [ - "pre_save signal, William Shakespeare", - {}, - "pre_save_post_validation signal, William Shakespeare", - "Is updated", - {}, - "post_save signal, William Shakespeare", - "post_save dirty keys, ['name']", - "Is updated", - {}, - ], - ) + assert self.get_signal_output(a1.save) == [ + "pre_save signal, William Shakespeare", + {}, + "pre_save_post_validation signal, William Shakespeare", + "Is updated", + {}, + "post_save signal, William Shakespeare", + "post_save dirty keys, ['name']", + "Is updated", + {}, + ] - self.assertEqual( - self.get_signal_output(a1.delete), - [ - "pre_delete signal, William Shakespeare", - {}, - "post_delete signal, William Shakespeare", - {}, - ], - ) + assert self.get_signal_output(a1.delete) == [ + "pre_delete signal, William Shakespeare", + {}, + "post_delete signal, William Shakespeare", + {}, + ] - self.assertEqual( - self.get_signal_output(load_existing_author), - [ - "pre_init signal, Author", - {"id": 2, "name": "Bill Shakespeare"}, - "post_init signal, Bill Shakespeare, document._created = False", - ], - ) + assert self.get_signal_output(load_existing_author) == [ + "pre_init signal, Author", + {"id": 2, "name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = False", + ] - self.assertEqual( - self.get_signal_output(bulk_create_author_with_load), - [ - "pre_init signal, Author", - {"name": "Bill Shakespeare"}, - "post_init signal, Bill Shakespeare, document._created = True", - "pre_bulk_insert signal, []", - {}, - "pre_init signal, Author", - {"id": 3, "name": "Bill Shakespeare"}, - "post_init signal, Bill Shakespeare, document._created = False", - "post_bulk_insert signal, []", - "Is loaded", - {}, - ], - ) + assert self.get_signal_output(bulk_create_author_with_load) == [ + "pre_init signal, Author", + {"name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = True", + "pre_bulk_insert signal, []", + {}, + "pre_init signal, Author", + {"id": 3, "name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = False", + "post_bulk_insert signal, []", + "Is loaded", + {}, + ] - self.assertEqual( - self.get_signal_output(bulk_create_author_without_load), - [ - "pre_init signal, Author", - {"name": "Bill Shakespeare"}, - "post_init signal, Bill Shakespeare, document._created = True", - "pre_bulk_insert signal, []", - {}, - "post_bulk_insert signal, []", - "Not loaded", - {}, - ], - ) + assert self.get_signal_output(bulk_create_author_without_load) == [ + "pre_init signal, Author", + {"name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = True", + "pre_bulk_insert signal, []", + {}, + "post_bulk_insert signal, []", + "Not loaded", + {}, + ] def test_signal_kwargs(self): """ Make sure signal_kwargs is passed to signals calls. """ @@ -367,83 +346,74 @@ class SignalTests(unittest.TestCase): a.save(signal_kwargs={"live": True, "die": False}) a.delete(signal_kwargs={"live": False, "die": True}) - self.assertEqual( - self.get_signal_output(live_and_let_die), - [ - "pre_init signal, Author", - {"name": "Bill Shakespeare"}, - "post_init signal, Bill Shakespeare, document._created = True", - "pre_save signal, Bill Shakespeare", - {"die": False, "live": True}, - "pre_save_post_validation signal, Bill Shakespeare", - "Is created", - {"die": False, "live": True}, - "post_save signal, Bill Shakespeare", - "post_save dirty keys, ['name']", - "Is created", - {"die": False, "live": True}, - "pre_delete signal, Bill Shakespeare", - {"die": True, "live": False}, - "post_delete signal, Bill Shakespeare", - {"die": True, "live": False}, - ], - ) + assert self.get_signal_output(live_and_let_die) == [ + "pre_init signal, Author", + {"name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = True", + "pre_save signal, Bill Shakespeare", + {"die": False, "live": True}, + "pre_save_post_validation signal, Bill Shakespeare", + "Is created", + {"die": False, "live": True}, + "post_save signal, Bill Shakespeare", + "post_save dirty keys, ['name']", + "Is created", + {"die": False, "live": True}, + "pre_delete signal, Bill Shakespeare", + {"die": True, "live": False}, + "post_delete signal, Bill Shakespeare", + {"die": True, "live": False}, + ] def bulk_create_author(): a1 = self.Author(name="Bill Shakespeare") self.Author.objects.insert([a1], signal_kwargs={"key": True}) - self.assertEqual( - self.get_signal_output(bulk_create_author), - [ - "pre_init signal, Author", - {"name": "Bill Shakespeare"}, - "post_init signal, Bill Shakespeare, document._created = True", - "pre_bulk_insert signal, []", - {"key": True}, - "pre_init signal, Author", - {"id": 2, "name": "Bill Shakespeare"}, - "post_init signal, Bill Shakespeare, document._created = False", - "post_bulk_insert signal, []", - "Is loaded", - {"key": True}, - ], - ) + assert self.get_signal_output(bulk_create_author) == [ + "pre_init signal, Author", + {"name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = True", + "pre_bulk_insert signal, []", + {"key": True}, + "pre_init signal, Author", + {"id": 2, "name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = False", + "post_bulk_insert signal, []", + "Is loaded", + {"key": True}, + ] def test_queryset_delete_signals(self): """ Queryset delete should throw some signals. """ self.Another(name="Bill Shakespeare").save() - self.assertEqual( - self.get_signal_output(self.Another.objects.delete), - [ - "pre_delete signal, Bill Shakespeare", - {}, - "post_delete signal, Bill Shakespeare", - {}, - ], - ) + assert self.get_signal_output(self.Another.objects.delete) == [ + "pre_delete signal, Bill Shakespeare", + {}, + "post_delete signal, Bill Shakespeare", + {}, + ] def test_signals_with_explicit_doc_ids(self): """ Model saves must have a created flag the first time.""" ei = self.ExplicitId(id=123) # post save must received the created flag, even if there's already # an object id present - self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) + assert self.get_signal_output(ei.save) == ["Is created"] # second time, it must be an update - self.assertEqual(self.get_signal_output(ei.save), ["Is updated"]) + assert self.get_signal_output(ei.save) == ["Is updated"] def test_signals_with_switch_collection(self): ei = self.ExplicitId(id=123) ei.switch_collection("explicit__1") - self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) + assert self.get_signal_output(ei.save) == ["Is created"] ei.switch_collection("explicit__1") - self.assertEqual(self.get_signal_output(ei.save), ["Is updated"]) + assert self.get_signal_output(ei.save) == ["Is updated"] ei.switch_collection("explicit__1", keep_created=False) - self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) + assert self.get_signal_output(ei.save) == ["Is created"] ei.switch_collection("explicit__1", keep_created=False) - self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) + assert self.get_signal_output(ei.save) == ["Is created"] def test_signals_with_switch_db(self): connect("mongoenginetest") @@ -451,14 +421,14 @@ class SignalTests(unittest.TestCase): ei = self.ExplicitId(id=123) ei.switch_db("testdb-1") - self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) + assert self.get_signal_output(ei.save) == ["Is created"] ei.switch_db("testdb-1") - self.assertEqual(self.get_signal_output(ei.save), ["Is updated"]) + assert self.get_signal_output(ei.save) == ["Is updated"] ei.switch_db("testdb-1", keep_created=False) - self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) + assert self.get_signal_output(ei.save) == ["Is created"] ei.switch_db("testdb-1", keep_created=False) - self.assertEqual(self.get_signal_output(ei.save), ["Is created"]) + assert self.get_signal_output(ei.save) == ["Is created"] def test_signals_bulk_insert(self): def bulk_set_active_post(): @@ -470,16 +440,13 @@ class SignalTests(unittest.TestCase): self.Post.objects.insert(posts) results = self.get_signal_output(bulk_set_active_post) - self.assertEqual( - results, - [ - "pre_bulk_insert signal, [(, {'active': False}), (, {'active': False}), (, {'active': False})]", - {}, - "post_bulk_insert signal, [(, {'active': True}), (, {'active': True}), (, {'active': True})]", - "Is loaded", - {}, - ], - ) + assert results == [ + "pre_bulk_insert signal, [(, {'active': False}), (, {'active': False}), (, {'active': False})]", + {}, + "post_bulk_insert signal, [(, {'active': True}), (, {'active': True}), (, {'active': True})]", + "Is loaded", + {}, + ] if __name__ == "__main__": diff --git a/tests/test_utils.py b/tests/test_utils.py index 897c19b2..ccb44aac 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -2,6 +2,7 @@ import re import unittest from mongoengine.base.utils import LazyRegexCompiler +import pytest signal_output = [] @@ -12,21 +13,21 @@ class LazyRegexCompilerTest(unittest.TestCase): EMAIL_REGEX = LazyRegexCompiler("@", flags=32) descriptor = UserEmail.__dict__["EMAIL_REGEX"] - self.assertIsNone(descriptor._compiled_regex) + assert descriptor._compiled_regex is None regex = UserEmail.EMAIL_REGEX - self.assertEqual(regex, re.compile("@", flags=32)) - self.assertEqual(regex.search("user@domain.com").group(), "@") + assert regex == re.compile("@", flags=32) + assert regex.search("user@domain.com").group() == "@" user_email = UserEmail() - self.assertIs(user_email.EMAIL_REGEX, UserEmail.EMAIL_REGEX) + assert user_email.EMAIL_REGEX is UserEmail.EMAIL_REGEX def test_lazy_regex_compiler_verify_cannot_set_descriptor_on_instance(self): class UserEmail(object): EMAIL_REGEX = LazyRegexCompiler("@") user_email = UserEmail() - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): user_email.EMAIL_REGEX = re.compile("@") def test_lazy_regex_compiler_verify_can_override_class_attr(self): @@ -34,6 +35,4 @@ class LazyRegexCompilerTest(unittest.TestCase): EMAIL_REGEX = LazyRegexCompiler("@") UserEmail.EMAIL_REGEX = re.compile("cookies") - self.assertEqual( - UserEmail.EMAIL_REGEX.search("Cake & cookies").group(), "cookies" - ) + assert UserEmail.EMAIL_REGEX.search("Cake & cookies").group() == "cookies" From 3e764d068c2b09c500b6226505e662389e6427b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 31 Aug 2019 22:40:54 +0300 Subject: [PATCH 078/216] fix remaining assertRaises --- tests/document/test_indexes.py | 4 +- tests/document/test_inheritance.py | 10 ++--- tests/document/test_instance.py | 36 +++++++-------- tests/fields/test_dict_field.py | 8 ++-- tests/fields/test_email_field.py | 5 ++- tests/fields/test_embedded_document_field.py | 20 ++++----- tests/fields/test_fields.py | 46 +++++++++----------- tests/fields/test_url_field.py | 4 +- tests/queryset/test_queryset.py | 11 +++-- tests/test_connection.py | 12 ++--- 10 files changed, 76 insertions(+), 80 deletions(-) diff --git a/tests/document/test_indexes.py b/tests/document/test_indexes.py index cc1aae52..6c31054a 100644 --- a/tests/document/test_indexes.py +++ b/tests/document/test_indexes.py @@ -595,12 +595,12 @@ class TestIndexes(unittest.TestCase): Blog.drop_collection() - with pytest.raises(OperationFailure) as ctx_err: + with pytest.raises(OperationFailure) as exc_info: Blog(id="garbage").save() # One of the errors below should happen. Which one depends on the # PyMongo version and dict order. - err_msg = str(ctx_err.exception) + err_msg = str(exc_info.value) assert any( [ "The field 'unique' is not valid for an _id index specification" diff --git a/tests/document/test_inheritance.py b/tests/document/test_inheritance.py index 6a913b3e..3e515653 100644 --- a/tests/document/test_inheritance.py +++ b/tests/document/test_inheritance.py @@ -335,13 +335,13 @@ class TestInheritance(MongoDBTestCase): name = StringField() # can't inherit because Animal didn't explicitly allow inheritance - with pytest.raises(ValueError) as cm: + with pytest.raises( + ValueError, match="Document Animal may not be subclassed" + ) as exc_info: class Dog(Animal): pass - assert "Document Animal may not be subclassed" in str(cm.exception) - # Check that _cls etc aren't present on simple documents dog = Animal(name="dog").save() assert dog.to_mongo().keys() == ["_id", "name"] @@ -358,13 +358,13 @@ class TestInheritance(MongoDBTestCase): name = StringField() meta = {"allow_inheritance": True} - with pytest.raises(ValueError) as cm: + with pytest.raises(ValueError) as exc_info: class Mammal(Animal): meta = {"allow_inheritance": False} assert ( - str(cm.exception) + str(exc_info.value) == 'Only direct subclasses of Document may set "allow_inheritance" to False' ) diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 01dc492b..c7bc113e 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -350,14 +350,11 @@ class TestInstance(MongoDBTestCase): name = StringField() meta = {"allow_inheritance": True} - with pytest.raises(ValueError) as e: + with pytest.raises(ValueError, match="Cannot override primary key field") as e: class EmailUser(User): email = StringField(primary_key=True) - exc = e.exception - assert str(exc) == "Cannot override primary key field" - def test_custom_id_field_is_required(self): """Ensure the custom primary key field is required.""" @@ -365,10 +362,9 @@ class TestInstance(MongoDBTestCase): username = StringField(primary_key=True) name = StringField() - with pytest.raises(ValidationError) as e: + with pytest.raises(ValidationError) as exc_info: User(name="test").save() - exc = e.exception - assert "Field is required: ['username']" in str(exc) + assert "Field is required: ['username']" in str(exc_info.value) def test_document_not_registered(self): class Place(Document): @@ -870,12 +866,12 @@ class TestInstance(MongoDBTestCase): t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25, z=15)) - with pytest.raises(ValidationError) as cm: + with pytest.raises(ValidationError) as exc_info: t.save() expected_msg = "Value of z != x + y" - assert expected_msg in cm.exception.message - assert cm.exception.to_dict() == {"doc": {"__all__": expected_msg}} + assert expected_msg in str(exc_info.value) + assert exc_info.value.to_dict() == {"doc": {"__all__": expected_msg}} t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save() assert t.doc.z == 35 @@ -3208,43 +3204,47 @@ class TestInstance(MongoDBTestCase): def test_positional_creation(self): """Document cannot be instantiated using positional arguments.""" - with pytest.raises(TypeError) as e: + with pytest.raises(TypeError) as exc_info: person = self.Person("Test User", 42) + expected_msg = ( "Instantiating a document with positional arguments is not " "supported. Please use `field_name=value` keyword arguments." ) - assert str(e.exception) == expected_msg + assert str(exc_info.value) == expected_msg def test_mixed_creation(self): """Document cannot be instantiated using mixed arguments.""" - with pytest.raises(TypeError) as e: + with pytest.raises(TypeError) as exc_info: person = self.Person("Test User", age=42) + expected_msg = ( "Instantiating a document with positional arguments is not " "supported. Please use `field_name=value` keyword arguments." ) - assert str(e.exception) == expected_msg + assert str(exc_info.value) == expected_msg def test_positional_creation_embedded(self): """Embedded document cannot be created using positional arguments.""" - with pytest.raises(TypeError) as e: + with pytest.raises(TypeError) as exc_info: job = self.Job("Test Job", 4) + expected_msg = ( "Instantiating a document with positional arguments is not " "supported. Please use `field_name=value` keyword arguments." ) - assert str(e.exception) == expected_msg + assert str(exc_info.value) == expected_msg def test_mixed_creation_embedded(self): """Embedded document cannot be created using mixed arguments.""" - with pytest.raises(TypeError) as e: + with pytest.raises(TypeError) as exc_info: job = self.Job("Test Job", years=4) + expected_msg = ( "Instantiating a document with positional arguments is not " "supported. Please use `field_name=value` keyword arguments." ) - assert str(e.exception) == expected_msg + assert str(exc_info.value) == expected_msg def test_data_contains_id_field(self): """Ensure that asking for _data returns 'id'.""" diff --git a/tests/fields/test_dict_field.py b/tests/fields/test_dict_field.py index 56df682f..7dda2a9c 100644 --- a/tests/fields/test_dict_field.py +++ b/tests/fields/test_dict_field.py @@ -270,10 +270,12 @@ class TestDictField(MongoDBTestCase): embed = Embedded(name="garbage") doc = DictFieldTest(dictionary=embed) - with pytest.raises(ValidationError) as ctx_err: + with pytest.raises(ValidationError) as exc_info: doc.validate() - assert "'dictionary'" in str(ctx_err.exception) - assert "Only dictionaries may be used in a DictField" in str(ctx_err.exception) + + error_msg = str(exc_info.value) + assert "'dictionary'" in error_msg + assert "Only dictionaries may be used in a DictField" in error_msg def test_atomic_update_dict_field(self): """Ensure that the entire DictField can be atomically updated.""" diff --git a/tests/fields/test_email_field.py b/tests/fields/test_email_field.py index b8d3d169..902a7c42 100644 --- a/tests/fields/test_email_field.py +++ b/tests/fields/test_email_field.py @@ -88,9 +88,10 @@ class TestEmailField(MongoDBTestCase): invalid_idn = ".google.com" user = User(email="me@%s" % invalid_idn) - with pytest.raises(ValidationError) as ctx_err: + + with pytest.raises(ValidationError) as exc_info: user.validate() - assert "domain failed IDN encoding" in str(ctx_err.exception) + assert "domain failed IDN encoding" in str(exc_info.value) def test_email_field_ip_domain(self): class User(Document): diff --git a/tests/fields/test_embedded_document_field.py b/tests/fields/test_embedded_document_field.py index 4fcf6bf1..9e6871cc 100644 --- a/tests/fields/test_embedded_document_field.py +++ b/tests/fields/test_embedded_document_field.py @@ -36,11 +36,11 @@ class TestEmbeddedDocumentField(MongoDBTestCase): name = StringField() emb = EmbeddedDocumentField("MyDoc") - with pytest.raises(ValidationError) as ctx: + with pytest.raises(ValidationError) as exc_info: emb.document_type assert ( "Invalid embedded document class provided to an EmbeddedDocumentField" - in str(ctx.exception) + in str(exc_info.value) ) def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self): @@ -72,9 +72,9 @@ class TestEmbeddedDocumentField(MongoDBTestCase): p = Person(settings=AdminSettings(foo1="bar1", foo2="bar2"), name="John").save() # Test non exiting attribute - with pytest.raises(InvalidQueryError) as ctx_err: + with pytest.raises(InvalidQueryError) as exc_info: Person.objects(settings__notexist="bar").first() - assert unicode(ctx_err.exception) == u'Cannot resolve field "notexist"' + assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' with pytest.raises(LookUpError): Person.objects.only("settings.notexist") @@ -108,9 +108,9 @@ class TestEmbeddedDocumentField(MongoDBTestCase): p.save() # Test non exiting attribute - with pytest.raises(InvalidQueryError) as ctx_err: + with pytest.raises(InvalidQueryError) as exc_info: assert Person.objects(settings__notexist="bar").first().id == p.id - assert unicode(ctx_err.exception) == u'Cannot resolve field "notexist"' + assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' # Test existing attribute assert Person.objects(settings__base_foo="basefoo").first().id == p.id @@ -316,9 +316,9 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): p2 = Person(settings=NonAdminSettings(foo2="bar2")).save() # Test non exiting attribute - with pytest.raises(InvalidQueryError) as ctx_err: + with pytest.raises(InvalidQueryError) as exc_info: Person.objects(settings__notexist="bar").first() - assert unicode(ctx_err.exception) == u'Cannot resolve field "notexist"' + assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' with pytest.raises(LookUpError): Person.objects.only("settings.notexist") @@ -344,9 +344,9 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): p.save() # Test non exiting attribute - with pytest.raises(InvalidQueryError) as ctx_err: + with pytest.raises(InvalidQueryError) as exc_info: assert Person.objects(settings__notexist="bar").first().id == p.id - assert unicode(ctx_err.exception) == u'Cannot resolve field "notexist"' + assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' # Test existing attribute assert Person.objects(settings__base_foo="basefoo").first().id == p.id diff --git a/tests/fields/test_fields.py b/tests/fields/test_fields.py index b27d95d2..0ce65087 100644 --- a/tests/fields/test_fields.py +++ b/tests/fields/test_fields.py @@ -96,13 +96,13 @@ class TestField(MongoDBTestCase): "it should raise a ValidationError if validation fails" ) - with pytest.raises(DeprecatedError) as ctx_err: + with pytest.raises(DeprecatedError) as exc_info: Person(name="").validate() - assert str(ctx_err.exception) == error + assert str(exc_info.value) == error - with pytest.raises(DeprecatedError) as ctx_err: + with pytest.raises(DeprecatedError) as exc_info: Person(name="").save() - assert str(ctx_err.exception) == error + assert str(exc_info.value) == error def test_custom_field_validation_raise_validation_error(self): def _not_empty(z): @@ -114,16 +114,10 @@ class TestField(MongoDBTestCase): Person.drop_collection() - with pytest.raises(ValidationError) as ctx_err: + with pytest.raises(ValidationError) as exc_info: Person(name="").validate() assert "ValidationError (Person:None) (cantbeempty: ['name'])" == str( - ctx_err.exception - ) - - with pytest.raises(ValidationError): - Person(name="").save() - assert "ValidationError (Person:None) (cantbeempty: ['name'])" == str( - ctx_err.exception + exc_info.value ) Person(name="garbage").validate() @@ -1029,9 +1023,9 @@ class TestField(MongoDBTestCase): if i < 6: foo.save() else: - with pytest.raises(ValidationError) as cm: + with pytest.raises(ValidationError) as exc_info: foo.save() - assert "List is too long" in str(cm.exception) + assert "List is too long" in str(exc_info.value) def test_list_field_max_length_set_operator(self): """Ensure ListField's max_length is respected for a "set" operator.""" @@ -1040,9 +1034,9 @@ class TestField(MongoDBTestCase): items = ListField(IntField(), max_length=3) foo = Foo.objects.create(items=[1, 2, 3]) - with pytest.raises(ValidationError) as cm: + with pytest.raises(ValidationError) as exc_info: foo.modify(set__items=[1, 2, 3, 4]) - assert "List is too long" in str(cm.exception) + assert "List is too long" in str(exc_info.value) def test_list_field_rejects_strings(self): """Strings aren't valid list field data types.""" @@ -2325,21 +2319,21 @@ class TestEmbeddedDocumentListField(MongoDBTestCase): # Test with an embeddedDocument instead of a list(embeddedDocument) # It's an edge case but it used to fail with a vague error, making it difficult to troubleshoot it post = self.BlogPost(comments=comment) - with pytest.raises(ValidationError) as ctx_err: + with pytest.raises(ValidationError) as exc_info: post.validate() - assert "'comments'" in str(ctx_err.exception) - assert "Only lists and tuples may be used in a list field" in str( - ctx_err.exception - ) + + error_msg = str(exc_info.value) + assert "'comments'" in error_msg + assert "Only lists and tuples may be used in a list field" in error_msg # Test with a Document post = self.BlogPost(comments=Title(content="garbage")) - with pytest.raises(ValidationError): + with pytest.raises(ValidationError) as exc_info: post.validate() - assert "'comments'" in str(ctx_err.exception) - assert "Only lists and tuples may be used in a list field" in str( - ctx_err.exception - ) + + error_msg = str(exc_info.value) + assert "'comments'" in error_msg + assert "Only lists and tuples may be used in a list field" in error_msg def test_no_keyword_filter(self): """ diff --git a/tests/fields/test_url_field.py b/tests/fields/test_url_field.py index e7df0e08..e125f56a 100644 --- a/tests/fields/test_url_field.py +++ b/tests/fields/test_url_field.py @@ -31,10 +31,10 @@ class TestURLField(MongoDBTestCase): # TODO fix URL validation - this *IS* a valid URL # For now we just want to make sure that the error message is correct - with pytest.raises(ValidationError) as ctx_err: + with pytest.raises(ValidationError) as exc_info: link.validate() assert ( - unicode(ctx_err.exception) + unicode(exc_info.value) == u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])" ) diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index d154de8d..31abb42f 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -908,20 +908,20 @@ class TestQueryset(unittest.TestCase): assert Blog.objects.count() == 2 # test inserting an existing document (shouldn't be allowed) - with pytest.raises(OperationError) as cm: + with pytest.raises(OperationError) as exc_info: blog = Blog.objects.first() Blog.objects.insert(blog) assert ( - str(cm.exception) + str(exc_info.value) == "Some documents have ObjectIds, use doc.update() instead" ) # test inserting a query set - with pytest.raises(OperationError) as cm: + with pytest.raises(OperationError) as exc_info: blogs_qs = Blog.objects Blog.objects.insert(blogs_qs) assert ( - str(cm.exception) + str(exc_info.value) == "Some documents have ObjectIds, use doc.update() instead" ) @@ -5053,9 +5053,8 @@ class TestQueryset(unittest.TestCase): Person(name="a").save() qs = Person.objects() _ = list(qs) - with pytest.raises(OperationError) as ctx_err: + with pytest.raises(OperationError, match="QuerySet already cached") as ctx_err: qs.no_cache() - assert "QuerySet already cached" == str(ctx_err.exception) def test_no_cached_queryset_no_cache_back_to_cache(self): class Person(Document): diff --git a/tests/test_connection.py b/tests/test_connection.py index c73b67d1..8db69b0c 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -142,22 +142,22 @@ class ConnectionTest(unittest.TestCase): def test_connect_fails_if_connect_2_times_with_default_alias(self): connect("mongoenginetest") - with pytest.raises(ConnectionFailure) as ctx_err: + with pytest.raises(ConnectionFailure) as exc_info: connect("mongoenginetest2") assert ( "A different connection with alias `default` was already registered. Use disconnect() first" - == str(ctx_err.exception) + == str(exc_info.value) ) def test_connect_fails_if_connect_2_times_with_custom_alias(self): connect("mongoenginetest", alias="alias1") - with pytest.raises(ConnectionFailure) as ctx_err: + with pytest.raises(ConnectionFailure) as exc_info: connect("mongoenginetest2", alias="alias1") assert ( "A different connection with alias `alias1` was already registered. Use disconnect() first" - == str(ctx_err.exception) + == str(exc_info.value) ) def test_connect_fails_if_similar_connection_settings_arent_defined_the_same_way( @@ -366,9 +366,9 @@ class ConnectionTest(unittest.TestCase): assert History._collection is None - with pytest.raises(ConnectionFailure) as ctx_err: + with pytest.raises(ConnectionFailure) as exc_info: History.objects.first() - assert "You have not defined a default connection" == str(ctx_err.exception) + assert "You have not defined a default connection" == str(exc_info.value) def test_connect_disconnect_works_on_same_document(self): """Ensure that the connect/disconnect works properly with a single Document""" From c61c6a85253e76fe5ef8d7da48af94d248e3786f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 31 Aug 2019 22:51:13 +0300 Subject: [PATCH 079/216] fix == None assertions --- tests/document/test_class_methods.py | 2 +- tests/document/test_inheritance.py | 4 +-- tests/document/test_instance.py | 32 +++++++++--------- tests/document/test_validation.py | 2 +- tests/fields/test_fields.py | 10 +++--- tests/queryset/test_field_list.py | 50 ++++++++++++++-------------- tests/queryset/test_modify.py | 6 ++-- tests/queryset/test_queryset.py | 28 ++++++++-------- tests/queryset/test_visitor.py | 4 +-- tests/test_datastructures.py | 2 +- 10 files changed, 70 insertions(+), 70 deletions(-) diff --git a/tests/document/test_class_methods.py b/tests/document/test_class_methods.py index 98909d2f..be883b2a 100644 --- a/tests/document/test_class_methods.py +++ b/tests/document/test_class_methods.py @@ -72,7 +72,7 @@ class TestClassMethods(unittest.TestCase): class Job(Document): employee = ReferenceField(self.Person) - assert self.Person._meta.get("delete_rules") == None + assert self.Person._meta.get("delete_rules") is None self.Person.register_delete_rule(Job, "employee", NULLIFY) assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY} diff --git a/tests/document/test_inheritance.py b/tests/document/test_inheritance.py index 3e515653..b6b6088a 100644 --- a/tests/document/test_inheritance.py +++ b/tests/document/test_inheritance.py @@ -559,8 +559,8 @@ class TestInheritance(MongoDBTestCase): assert "collection" not in Animal._meta assert "collection" not in Mammal._meta - assert Animal._get_collection_name() == None - assert Mammal._get_collection_name() == None + assert Animal._get_collection_name() is None + assert Mammal._get_collection_name() is None assert Fish._get_collection_name() == "fish" assert Guppy._get_collection_name() == "fish" diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index c7bc113e..57815355 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -1391,7 +1391,7 @@ class TestInstance(MongoDBTestCase): person.reload() assert person.name == "User" - assert person.age == None + assert person.age is None person = self.Person.objects.get() person.name = None @@ -1399,8 +1399,8 @@ class TestInstance(MongoDBTestCase): person.save() person.reload() - assert person.name == None - assert person.age == None + assert person.name is None + assert person.age is None def test_update_rename_operator(self): """Test the $rename operator.""" @@ -2018,7 +2018,7 @@ class TestInstance(MongoDBTestCase): promoted_employee.save() promoted_employee.reload() - assert promoted_employee.details == None + assert promoted_employee.details is None def test_object_mixins(self): class NameMixin(object): @@ -2154,7 +2154,7 @@ class TestInstance(MongoDBTestCase): reviewer.delete() # No effect on the BlogPost assert BlogPost.objects.count() == 1 - assert BlogPost.objects.get().reviewer == None + assert BlogPost.objects.get().reviewer is None # Delete the Person, which should lead to deletion of the BlogPost, too author.delete() @@ -2200,7 +2200,7 @@ class TestInstance(MongoDBTestCase): reviewer.delete() assert Book.objects.count() == 1 - assert Book.objects.get().reviewer == None + assert Book.objects.get().reviewer is None user.delete() assert Book.objects.count() == 0 @@ -2267,7 +2267,7 @@ class TestInstance(MongoDBTestCase): reviewer.delete() assert BlogPost.objects.count() == 1 - assert BlogPost.objects.get().reviewer == None + assert BlogPost.objects.get().reviewer is None # Delete the Writer should lead to deletion of the BlogPost author.delete() @@ -2378,7 +2378,7 @@ class TestInstance(MongoDBTestCase): f.delete() assert Bar.objects.count() == 1 # No effect on the BlogPost - assert Bar.objects.get().foo == None + assert Bar.objects.get().foo is None def test_invalid_reverse_delete_rule_raise_errors(self): with pytest.raises(InvalidDocumentError): @@ -3464,7 +3464,7 @@ class TestInstance(MongoDBTestCase): p = Person.from_json('{"name": "name"}', created=False) assert p._created == False - assert p.id == None + assert p.id is None # Make sure the document is subsequently persisted correctly. p.save() @@ -3540,13 +3540,13 @@ class TestInstance(MongoDBTestCase): u_from_db = User.objects.get(name="user") u_from_db.height = None u_from_db.save() - assert u_from_db.height == None + assert u_from_db.height is None # 864 - assert u_from_db.str_fld == None - assert u_from_db.int_fld == None - assert u_from_db.flt_fld == None - assert u_from_db.dt_fld == None - assert u_from_db.cdt_fld == None + assert u_from_db.str_fld is None + assert u_from_db.int_fld is None + assert u_from_db.flt_fld is None + assert u_from_db.dt_fld is None + assert u_from_db.cdt_fld is None # 735 User.objects.delete() @@ -3554,7 +3554,7 @@ class TestInstance(MongoDBTestCase): u.save() User.objects(name="user").update_one(set__height=None, upsert=True) u_from_db = User.objects.get(name="user") - assert u_from_db.height == None + assert u_from_db.height is None def test_not_saved_eq(self): """Ensure we can compare documents not saved. diff --git a/tests/document/test_validation.py b/tests/document/test_validation.py index 80601994..dfae5bae 100644 --- a/tests/document/test_validation.py +++ b/tests/document/test_validation.py @@ -110,7 +110,7 @@ class TestValidatorError(MongoDBTestCase): comment.date = datetime.now() comment.validate() - assert comment._instance == None + assert comment._instance is None def test_embedded_db_field_validate(self): class SubDoc(EmbeddedDocument): diff --git a/tests/fields/test_fields.py b/tests/fields/test_fields.py index 0ce65087..21cc78be 100644 --- a/tests/fields/test_fields.py +++ b/tests/fields/test_fields.py @@ -180,7 +180,7 @@ class TestField(MongoDBTestCase): assert person.validate() is None - assert person.name == None + assert person.name is None assert person.age == 30 assert person.userid == "test" assert isinstance(person.created, datetime.datetime) @@ -250,7 +250,7 @@ class TestField(MongoDBTestCase): assert person.validate() is None - assert person.name == None + assert person.name is None assert person.age == 30 assert person.userid == "test" assert isinstance(person.created, datetime.datetime) @@ -363,7 +363,7 @@ class TestField(MongoDBTestCase): name = StringField() person = Person(name="Test User") - assert person.id == None + assert person.id is None person.id = 47 with pytest.raises(ValidationError): @@ -1970,7 +1970,7 @@ class TestField(MongoDBTestCase): shirt2 = Shirt() # Make sure get__display returns the default value (or None) - assert shirt1.get_size_display() == None + assert shirt1.get_size_display() is None assert shirt1.get_style_display() == "Wide" shirt1.size = "XXL" @@ -2024,7 +2024,7 @@ class TestField(MongoDBTestCase): shirt = Shirt() - assert shirt.get_size_display() == None + assert shirt.get_size_display() is None assert shirt.get_style_display() == "Small" shirt.size = "XXL" diff --git a/tests/queryset/test_field_list.py b/tests/queryset/test_field_list.py index d33c4c86..a2bf6f1f 100644 --- a/tests/queryset/test_field_list.py +++ b/tests/queryset/test_field_list.py @@ -154,10 +154,10 @@ class TestOnlyExcludeAll(unittest.TestCase): obj = self.Person.objects.only("name").get() assert obj.name == person.name - assert obj.age == None + assert obj.age is None obj = self.Person.objects.only("age").get() - assert obj.name == None + assert obj.name is None assert obj.age == person.age obj = self.Person.objects.only("name", "age").get() @@ -166,7 +166,7 @@ class TestOnlyExcludeAll(unittest.TestCase): obj = self.Person.objects.only(*("id", "name")).get() assert obj.name == person.name - assert obj.age == None + assert obj.age is None # Check polymorphism still works class Employee(self.Person): @@ -181,7 +181,7 @@ class TestOnlyExcludeAll(unittest.TestCase): # Check field names are looked up properly obj = Employee.objects(id=employee.id).only("salary").get() assert obj.salary == employee.salary - assert obj.name == None + assert obj.name is None def test_only_with_subfields(self): class User(EmbeddedDocument): @@ -215,8 +215,8 @@ class TestOnlyExcludeAll(unittest.TestCase): post.save() obj = BlogPost.objects.only("author.name").get() - assert obj.content == None - assert obj.author.email == None + assert obj.content is None + assert obj.author.email is None assert obj.author.name == "Test User" assert obj.comments == [] @@ -225,15 +225,15 @@ class TestOnlyExcludeAll(unittest.TestCase): obj = BlogPost.objects.only("content", "comments.title").get() assert obj.content == "Had a good coffee today..." - assert obj.author == None + assert obj.author is None assert obj.comments[0].title == "I aggree" assert obj.comments[1].title == "Coffee" - assert obj.comments[0].text == None - assert obj.comments[1].text == None + assert obj.comments[0].text is None + assert obj.comments[1].text is None obj = BlogPost.objects.only("comments").get() - assert obj.content == None - assert obj.author == None + assert obj.content is None + assert obj.author is None assert obj.comments[0].title == "I aggree" assert obj.comments[1].title == "Coffee" assert obj.comments[0].text == "Great post!" @@ -266,10 +266,10 @@ class TestOnlyExcludeAll(unittest.TestCase): post.save() obj = BlogPost.objects.exclude("author", "comments.text").get() - assert obj.author == None + assert obj.author is None assert obj.content == "Had a good coffee today..." assert obj.comments[0].title == "I aggree" - assert obj.comments[0].text == None + assert obj.comments[0].text is None BlogPost.drop_collection() @@ -304,15 +304,15 @@ class TestOnlyExcludeAll(unittest.TestCase): assert obj.sender == "me" assert obj.to == "you" assert obj.subject == "From Russia with Love" - assert obj.body == None - assert obj.content_type == None + assert obj.body is None + assert obj.content_type is None obj = Email.objects.only("sender", "to").exclude("body", "sender").get() - assert obj.sender == None + assert obj.sender is None assert obj.to == "you" - assert obj.subject == None - assert obj.body == None - assert obj.content_type == None + assert obj.subject is None + assert obj.body is None + assert obj.content_type is None obj = ( Email.objects.exclude("attachments.content") @@ -321,12 +321,12 @@ class TestOnlyExcludeAll(unittest.TestCase): .get() ) assert obj.attachments[0].name == "file1.doc" - assert obj.attachments[0].content == None - assert obj.sender == None + assert obj.attachments[0].content is None + assert obj.sender is None assert obj.to == "you" - assert obj.subject == None - assert obj.body == None - assert obj.content_type == None + assert obj.subject is None + assert obj.body is None + assert obj.content_type is None Email.drop_collection() @@ -456,7 +456,7 @@ class TestOnlyExcludeAll(unittest.TestCase): User(username="mongodb", password="secret").save() user = Base.objects().exclude("password", "wibble").first() - assert user.password == None + assert user.password is None with pytest.raises(LookUpError): Base.objects.exclude("made_up") diff --git a/tests/queryset/test_modify.py b/tests/queryset/test_modify.py index 293a463e..556e6d9e 100644 --- a/tests/queryset/test_modify.py +++ b/tests/queryset/test_modify.py @@ -35,13 +35,13 @@ class TestFindAndModify(unittest.TestCase): def test_modify_not_existing(self): Doc(id=0, value=0).save() - assert Doc.objects(id=1).modify(set__value=-1) == None + assert Doc.objects(id=1).modify(set__value=-1) is None self.assertDbEqual([{"_id": 0, "value": 0}]) def test_modify_with_upsert(self): Doc(id=0, value=0).save() old_doc = Doc.objects(id=1).modify(set__value=1, upsert=True) - assert old_doc == None + assert old_doc is None self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}]) def test_modify_with_upsert_existing(self): @@ -68,7 +68,7 @@ class TestFindAndModify(unittest.TestCase): def test_find_and_modify_with_remove_not_existing(self): Doc(id=0, value=0).save() - assert Doc.objects(id=1).modify(remove=True) == None + assert Doc.objects(id=1).modify(remove=True) is None self.assertDbEqual([{"_id": 0, "value": 0}]) def test_modify_with_order_by(self): diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 31abb42f..f3606609 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -142,7 +142,7 @@ class TestQueryset(unittest.TestCase): person = self.Person.objects().limit(1).only("name").first() assert person == user_a assert person.name == "User A" - assert person.age == None + assert person.age is None def test_skip(self): """Ensure that QuerySet.skip works as expected.""" @@ -166,7 +166,7 @@ class TestQueryset(unittest.TestCase): person = self.Person.objects().skip(1).only("name").first() assert person == user_b assert person.name == "User B" - assert person.age == None + assert person.age is None def test___getitem___invalid_index(self): """Ensure slicing a queryset works as expected.""" @@ -444,7 +444,7 @@ class TestQueryset(unittest.TestCase): assert result == 2 result = self.Person.objects.update(set__name="Ross", write_concern={"w": 0}) - assert result == None + assert result is None result = self.Person.objects.update_one( set__name="Test User", write_concern={"w": 1} @@ -453,7 +453,7 @@ class TestQueryset(unittest.TestCase): result = self.Person.objects.update_one( set__name="Test User", write_concern={"w": 0} ) - assert result == None + assert result is None def test_update_update_has_a_value(self): """Test to ensure that update is passed a value to update to""" @@ -1148,7 +1148,7 @@ class TestQueryset(unittest.TestCase): obj = self.Person.objects(name__contains="van").first() assert obj == person obj = self.Person.objects(name__contains="Van").first() - assert obj == None + assert obj is None # Test icontains obj = self.Person.objects(name__icontains="Van").first() @@ -1158,7 +1158,7 @@ class TestQueryset(unittest.TestCase): obj = self.Person.objects(name__startswith="Guido").first() assert obj == person obj = self.Person.objects(name__startswith="guido").first() - assert obj == None + assert obj is None # Test istartswith obj = self.Person.objects(name__istartswith="guido").first() @@ -1168,7 +1168,7 @@ class TestQueryset(unittest.TestCase): obj = self.Person.objects(name__endswith="Rossum").first() assert obj == person obj = self.Person.objects(name__endswith="rossuM").first() - assert obj == None + assert obj is None # Test iendswith obj = self.Person.objects(name__iendswith="rossuM").first() @@ -1178,15 +1178,15 @@ class TestQueryset(unittest.TestCase): obj = self.Person.objects(name__exact="Guido van Rossum").first() assert obj == person obj = self.Person.objects(name__exact="Guido van rossum").first() - assert obj == None + assert obj is None obj = self.Person.objects(name__exact="Guido van Rossu").first() - assert obj == None + assert obj is None # Test iexact obj = self.Person.objects(name__iexact="gUIDO VAN rOSSUM").first() assert obj == person obj = self.Person.objects(name__iexact="gUIDO VAN rOSSU").first() - assert obj == None + assert obj is None # Test unsafe expressions person = self.Person(name="Guido van Rossum [.'Geek']") @@ -1205,7 +1205,7 @@ class TestQueryset(unittest.TestCase): assert obj == alice obj = self.Person.objects(name__not__iexact="alice").first() - assert obj == None + assert obj is None def test_filter_chaining(self): """Ensure filters can be chained together. @@ -1430,7 +1430,7 @@ class TestQueryset(unittest.TestCase): BlogPost.objects.create(content="Anonymous post...") result = BlogPost.objects.get(author=None) - assert result.author == None + assert result.author is None def test_find_dict_item(self): """Ensure that DictField items may be found. @@ -2050,7 +2050,7 @@ class TestQueryset(unittest.TestCase): assert post.title != None BlogPost.objects.update_one(unset__title=1) post.reload() - assert post.title == None + assert post.title is None pymongo_doc = BlogPost.objects.as_pymongo().first() assert "title" not in pymongo_doc @@ -4041,7 +4041,7 @@ class TestQueryset(unittest.TestCase): assert post.comment == comment Post.objects.update(unset__comment=1) post.reload() - assert post.comment == None + assert post.comment is None Comment.drop_collection() Post.drop_collection() diff --git a/tests/queryset/test_visitor.py b/tests/queryset/test_visitor.py index a41f9278..9706d012 100644 --- a/tests/queryset/test_visitor.py +++ b/tests/queryset/test_visitor.py @@ -294,7 +294,7 @@ class TestQ(unittest.TestCase): obj = self.Person.objects(Q(name=re.compile("^Gui"))).first() assert obj == person obj = self.Person.objects(Q(name=re.compile("^gui"))).first() - assert obj == None + assert obj is None obj = self.Person.objects(Q(name=re.compile("^gui", re.I))).first() assert obj == person @@ -303,7 +303,7 @@ class TestQ(unittest.TestCase): assert obj == person obj = self.Person.objects(Q(name__not=re.compile("^Gui"))).first() - assert obj == None + assert obj is None def test_q_repr(self): assert repr(Q()) == "Q(**{})" diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 3a6029c1..24cda40d 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -122,7 +122,7 @@ class TestBaseDict(unittest.TestCase): def test_get_default(self): base_dict = self._get_basedict({}) sentinel = object() - assert base_dict.get("new") == None + assert base_dict.get("new") is None assert base_dict.get("new", sentinel) is sentinel def test___setitem___calls_mark_as_changed(self): From bc0c55e49a58f1a8104ffa77f1b7b87c605504da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 1 Sep 2019 15:03:29 +0300 Subject: [PATCH 080/216] improve tests health (flake8 warnings) --- tests/document/test_dynamic.py | 3 +- tests/document/test_indexes.py | 2 +- tests/document/test_inheritance.py | 8 ++-- tests/document/test_instance.py | 40 ++++++++++---------- tests/document/test_validation.py | 3 +- tests/fields/test_binary_field.py | 3 +- tests/fields/test_boolean_field.py | 6 +-- tests/fields/test_cached_reference_field.py | 6 +-- tests/fields/test_date_field.py | 4 +- tests/fields/test_datetime_field.py | 3 +- tests/fields/test_decimal_field.py | 6 +-- tests/fields/test_dict_field.py | 5 ++- tests/fields/test_email_field.py | 6 +-- tests/fields/test_embedded_document_field.py | 3 +- tests/fields/test_fields.py | 4 +- tests/fields/test_file_field.py | 3 +- tests/fields/test_float_field.py | 2 +- tests/fields/test_int_field.py | 3 +- tests/fields/test_lazy_reference_field.py | 2 +- tests/fields/test_long_field.py | 2 +- tests/fields/test_map_field.py | 6 +-- tests/fields/test_reference_field.py | 18 +-------- tests/fields/test_url_field.py | 3 +- tests/fields/test_uuid_field.py | 6 +-- tests/fixtures.py | 4 +- tests/queryset/test_field_list.py | 5 ++- tests/queryset/test_queryset.py | 26 ++++++------- tests/queryset/test_transform.py | 2 +- tests/queryset/test_visitor.py | 2 +- tests/test_connection.py | 7 ++-- tests/test_context_managers.py | 4 +- tests/test_datastructures.py | 2 +- tests/test_replicaset_connection.py | 6 +-- tests/test_utils.py | 3 +- 34 files changed, 97 insertions(+), 111 deletions(-) diff --git a/tests/document/test_dynamic.py b/tests/document/test_dynamic.py index a6f46862..0032dfd9 100644 --- a/tests/document/test_dynamic.py +++ b/tests/document/test_dynamic.py @@ -1,8 +1,9 @@ import unittest +import pytest + from mongoengine import * from tests.utils import MongoDBTestCase -import pytest __all__ = ("TestDynamicDocument",) diff --git a/tests/document/test_indexes.py b/tests/document/test_indexes.py index 6c31054a..dc6c5c8e 100644 --- a/tests/document/test_indexes.py +++ b/tests/document/test_indexes.py @@ -5,11 +5,11 @@ from datetime import datetime from nose.plugins.skip import SkipTest from pymongo.collation import Collation from pymongo.errors import OperationFailure +import pytest from six import iteritems from mongoengine import * from mongoengine.connection import get_db -import pytest class TestIndexes(unittest.TestCase): diff --git a/tests/document/test_inheritance.py b/tests/document/test_inheritance.py index b6b6088a..5072f841 100644 --- a/tests/document/test_inheritance.py +++ b/tests/document/test_inheritance.py @@ -2,6 +2,7 @@ import unittest import warnings +import pytest from six import iteritems from mongoengine import ( @@ -17,7 +18,6 @@ from mongoengine import ( from mongoengine.pymongo_support import list_collection_names from tests.fixtures import Base from tests.utils import MongoDBTestCase -import pytest class TestInheritance(MongoDBTestCase): @@ -335,9 +335,7 @@ class TestInheritance(MongoDBTestCase): name = StringField() # can't inherit because Animal didn't explicitly allow inheritance - with pytest.raises( - ValueError, match="Document Animal may not be subclassed" - ) as exc_info: + with pytest.raises(ValueError, match="Document Animal may not be subclassed"): class Dog(Animal): pass @@ -475,7 +473,7 @@ class TestInheritance(MongoDBTestCase): meta = {"abstract": True, "allow_inheritance": False} city = City(continent="asia") - assert None == city.pk + assert city.pk is None # TODO: expected error? Shouldn't we create a new error type? with pytest.raises(KeyError): setattr(city, "pk", 1) diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 57815355..9d533129 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -9,6 +9,7 @@ from datetime import datetime import bson from bson import DBRef, ObjectId from pymongo.errors import DuplicateKeyError +import pytest from six import iteritems from mongoengine import * @@ -36,7 +37,6 @@ from tests.fixtures import ( PickleTest, ) from tests.utils import MongoDBTestCase, get_as_pymongo -import pytest TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "../fields/mongoengine.png") @@ -96,7 +96,7 @@ class TestInstance(MongoDBTestCase): assert Log.objects.count() == 10 options = Log.objects._collection.options() - assert options["capped"] == True + assert options["capped"] is True assert options["max"] == 10 assert options["size"] == 4096 @@ -122,7 +122,7 @@ class TestInstance(MongoDBTestCase): Log().save() options = Log.objects._collection.options() - assert options["capped"] == True + assert options["capped"] is True assert options["max"] == 10 assert options["size"] == 10 * 2 ** 20 @@ -150,7 +150,7 @@ class TestInstance(MongoDBTestCase): Log().save() options = Log.objects._collection.options() - assert options["capped"] == True + assert options["capped"] is True assert options["size"] >= 10000 # Check that the document with odd max_size value can be recreated @@ -350,7 +350,7 @@ class TestInstance(MongoDBTestCase): name = StringField() meta = {"allow_inheritance": True} - with pytest.raises(ValueError, match="Cannot override primary key field") as e: + with pytest.raises(ValueError, match="Cannot override primary key field"): class EmailUser(User): email = StringField(primary_key=True) @@ -620,7 +620,7 @@ class TestInstance(MongoDBTestCase): f.reload() def test_reload_of_non_strict_with_special_field_name(self): - """Ensures reloading works for documents with meta strict == False.""" + """Ensures reloading works for documents with meta strict is False.""" class Post(Document): meta = {"strict": False} @@ -832,13 +832,13 @@ class TestInstance(MongoDBTestCase): t = TestDocument(status="published") t.save(clean=False) assert t.status == "published" - assert t.cleaned == False + assert t.cleaned is False t = TestDocument(status="published") - assert t.cleaned == False + assert t.cleaned is False t.save(clean=True) assert t.status == "published" - assert t.cleaned == True + assert t.cleaned is True raw_doc = get_as_pymongo(t) # Make sure clean changes makes it to the db assert raw_doc == {"status": "published", "cleaned": True, "_id": t.id} @@ -1600,7 +1600,7 @@ class TestInstance(MongoDBTestCase): person = self.Person.objects.get() assert person.name == "User" assert person.age == 21 - assert person.active == False + assert person.active is False def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_embedded_doc( self, @@ -2521,9 +2521,9 @@ class TestInstance(MongoDBTestCase): assert all_user_dic.get(u1, False) == "OK" assert all_user_dic.get(u2, False) == "OK" assert all_user_dic.get(u3, False) == "OK" - assert all_user_dic.get(u4, False) == False # New object - assert all_user_dic.get(b1, False) == False # Other object - assert all_user_dic.get(b2, False) == False # Other object + assert all_user_dic.get(u4, False) is False # New object + assert all_user_dic.get(b1, False) is False # Other object + assert all_user_dic.get(b2, False) is False # Other object # Make sure docs are properly identified in a set (__hash__ is used # for hashing the docs). @@ -3216,7 +3216,7 @@ class TestInstance(MongoDBTestCase): def test_mixed_creation(self): """Document cannot be instantiated using mixed arguments.""" with pytest.raises(TypeError) as exc_info: - person = self.Person("Test User", age=42) + self.Person("Test User", age=42) expected_msg = ( "Instantiating a document with positional arguments is not " @@ -3227,7 +3227,7 @@ class TestInstance(MongoDBTestCase): def test_positional_creation_embedded(self): """Embedded document cannot be created using positional arguments.""" with pytest.raises(TypeError) as exc_info: - job = self.Job("Test Job", 4) + self.Job("Test Job", 4) expected_msg = ( "Instantiating a document with positional arguments is not " @@ -3238,7 +3238,7 @@ class TestInstance(MongoDBTestCase): def test_mixed_creation_embedded(self): """Embedded document cannot be created using mixed arguments.""" with pytest.raises(TypeError) as exc_info: - job = self.Job("Test Job", years=4) + self.Job("Test Job", years=4) expected_msg = ( "Instantiating a document with positional arguments is not " @@ -3432,7 +3432,7 @@ class TestInstance(MongoDBTestCase): meta = {"shard_key": ("id", "name")} p = Person.from_json('{"name": "name", "age": 27}', created=True) - assert p._created == True + assert p._created is True p.name = "new name" p.id = "12345" assert p.name == "new name" @@ -3450,7 +3450,7 @@ class TestInstance(MongoDBTestCase): meta = {"shard_key": ("id", "name")} p = Person._from_son({"name": "name", "age": 27}, created=True) - assert p._created == True + assert p._created is True p.name = "new name" p.id = "12345" assert p.name == "new name" @@ -3463,7 +3463,7 @@ class TestInstance(MongoDBTestCase): Person.objects.delete() p = Person.from_json('{"name": "name"}', created=False) - assert p._created == False + assert p._created is False assert p.id is None # Make sure the document is subsequently persisted correctly. @@ -3483,7 +3483,7 @@ class TestInstance(MongoDBTestCase): p = Person.from_json( '{"_id": "5b85a8b04ec5dc2da388296e", "name": "name"}', created=False ) - assert p._created == False + assert p._created is False assert p._changed_fields == [] assert p.name == "name" assert p.id == ObjectId("5b85a8b04ec5dc2da388296e") diff --git a/tests/document/test_validation.py b/tests/document/test_validation.py index dfae5bae..2439f283 100644 --- a/tests/document/test_validation.py +++ b/tests/document/test_validation.py @@ -2,9 +2,10 @@ import unittest from datetime import datetime +import pytest + from mongoengine import * from tests.utils import MongoDBTestCase -import pytest class TestValidatorError(MongoDBTestCase): diff --git a/tests/fields/test_binary_field.py b/tests/fields/test_binary_field.py index 86ee2654..e2a1b8d6 100644 --- a/tests/fields/test_binary_field.py +++ b/tests/fields/test_binary_field.py @@ -2,12 +2,11 @@ import uuid from bson import Binary -from nose.plugins.skip import SkipTest +import pytest import six from mongoengine import * from tests.utils import MongoDBTestCase -import pytest BIN_VALUE = six.b( "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5" diff --git a/tests/fields/test_boolean_field.py b/tests/fields/test_boolean_field.py index b38b5ea4..041f9f56 100644 --- a/tests/fields/test_boolean_field.py +++ b/tests/fields/test_boolean_field.py @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- -from mongoengine import * - -from tests.utils import MongoDBTestCase, get_as_pymongo import pytest +from mongoengine import * +from tests.utils import MongoDBTestCase, get_as_pymongo + class TestBooleanField(MongoDBTestCase): def test_storage(self): diff --git a/tests/fields/test_cached_reference_field.py b/tests/fields/test_cached_reference_field.py index e404aae0..bb4c57d2 100644 --- a/tests/fields/test_cached_reference_field.py +++ b/tests/fields/test_cached_reference_field.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- from decimal import Decimal -from mongoengine import * - -from tests.utils import MongoDBTestCase import pytest +from mongoengine import * +from tests.utils import MongoDBTestCase + class TestCachedReferenceField(MongoDBTestCase): def test_get_and_save(self): diff --git a/tests/fields/test_date_field.py b/tests/fields/test_date_field.py index 46fa4f0f..e94ed0ce 100644 --- a/tests/fields/test_date_field.py +++ b/tests/fields/test_date_field.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- import datetime + +import pytest import six try: @@ -8,9 +10,7 @@ except ImportError: dateutil = None from mongoengine import * - from tests.utils import MongoDBTestCase -import pytest class TestDateField(MongoDBTestCase): diff --git a/tests/fields/test_datetime_field.py b/tests/fields/test_datetime_field.py index 8db491c6..70debac5 100644 --- a/tests/fields/test_datetime_field.py +++ b/tests/fields/test_datetime_field.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- import datetime as dt + +import pytest import six try: @@ -11,7 +13,6 @@ from mongoengine import * from mongoengine import connection from tests.utils import MongoDBTestCase -import pytest class TestDateTimeField(MongoDBTestCase): diff --git a/tests/fields/test_decimal_field.py b/tests/fields/test_decimal_field.py index b5b95363..c531166f 100644 --- a/tests/fields/test_decimal_field.py +++ b/tests/fields/test_decimal_field.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- from decimal import Decimal -from mongoengine import * - -from tests.utils import MongoDBTestCase import pytest +from mongoengine import * +from tests.utils import MongoDBTestCase + class TestDecimalField(MongoDBTestCase): def test_validation(self): diff --git a/tests/fields/test_dict_field.py b/tests/fields/test_dict_field.py index 7dda2a9c..e88128f9 100644 --- a/tests/fields/test_dict_field.py +++ b/tests/fields/test_dict_field.py @@ -1,9 +1,10 @@ # -*- coding: utf-8 -*- +import pytest + from mongoengine import * from mongoengine.base import BaseDict from tests.utils import MongoDBTestCase, get_as_pymongo -import pytest class TestDictField(MongoDBTestCase): @@ -290,7 +291,7 @@ class TestDictField(MongoDBTestCase): e.save() e.update(set__mapping={"ints": [3, 4]}) e.reload() - assert BaseDict == type(e.mapping) + assert isinstance(e.mapping, BaseDict) assert {"ints": [3, 4]} == e.mapping # try creating an invalid mapping diff --git a/tests/fields/test_email_field.py b/tests/fields/test_email_field.py index 902a7c42..55255df5 100644 --- a/tests/fields/test_email_field.py +++ b/tests/fields/test_email_field.py @@ -2,11 +2,11 @@ import sys from unittest import SkipTest -from mongoengine import * - -from tests.utils import MongoDBTestCase import pytest +from mongoengine import * +from tests.utils import MongoDBTestCase + class TestEmailField(MongoDBTestCase): def test_generic_behavior(self): diff --git a/tests/fields/test_embedded_document_field.py b/tests/fields/test_embedded_document_field.py index 9e6871cc..eeddac1e 100644 --- a/tests/fields/test_embedded_document_field.py +++ b/tests/fields/test_embedded_document_field.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +import pytest + from mongoengine import ( Document, EmbeddedDocument, @@ -13,7 +15,6 @@ from mongoengine import ( ) from tests.utils import MongoDBTestCase -import pytest class TestEmbeddedDocumentField(MongoDBTestCase): diff --git a/tests/fields/test_fields.py b/tests/fields/test_fields.py index 21cc78be..b8c916f8 100644 --- a/tests/fields/test_fields.py +++ b/tests/fields/test_fields.py @@ -4,6 +4,7 @@ import unittest from bson import DBRef, ObjectId, SON from nose.plugins.skip import SkipTest +import pytest from mongoengine import ( BooleanField, @@ -39,7 +40,6 @@ from mongoengine.base import BaseField, EmbeddedDocumentList, _document_registry from mongoengine.errors import DeprecatedError from tests.utils import MongoDBTestCase -import pytest class TestField(MongoDBTestCase): @@ -1838,7 +1838,7 @@ class TestField(MongoDBTestCase): user = User.objects(bookmarks__all=[post_1]).first() - assert user != None + assert user is not None assert user.bookmarks[0] == post_1 def test_generic_reference_filter_by_dbref(self): diff --git a/tests/fields/test_file_field.py b/tests/fields/test_file_field.py index 0746db33..fb8cacff 100644 --- a/tests/fields/test_file_field.py +++ b/tests/fields/test_file_field.py @@ -137,7 +137,6 @@ class TestFileField(MongoDBTestCase): text = six.b("Hello, World!") more_text = six.b("Foo Bar") - content_type = "text/plain" streamfile = StreamFile() streamfile.save() @@ -205,7 +204,7 @@ class TestFileField(MongoDBTestCase): doc_b = GridDocument.objects.with_id(doc_a.id) doc_b.the_file.replace(f, filename="doc_b") doc_b.save() - assert doc_b.the_file.grid_id != None + assert doc_b.the_file.grid_id is not None # Test it matches doc_c = GridDocument.objects.with_id(doc_b.id) diff --git a/tests/fields/test_float_field.py b/tests/fields/test_float_field.py index d755fb4e..a1cd7a0a 100644 --- a/tests/fields/test_float_field.py +++ b/tests/fields/test_float_field.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- +import pytest import six from mongoengine import * from tests.utils import MongoDBTestCase -import pytest class TestFloatField(MongoDBTestCase): diff --git a/tests/fields/test_int_field.py b/tests/fields/test_int_field.py index 65a5fbad..1f9c5a77 100644 --- a/tests/fields/test_int_field.py +++ b/tests/fields/test_int_field.py @@ -1,8 +1,9 @@ # -*- coding: utf-8 -*- +import pytest + from mongoengine import * from tests.utils import MongoDBTestCase -import pytest class TestIntField(MongoDBTestCase): diff --git a/tests/fields/test_lazy_reference_field.py b/tests/fields/test_lazy_reference_field.py index 8150574d..b5b8690e 100644 --- a/tests/fields/test_lazy_reference_field.py +++ b/tests/fields/test_lazy_reference_field.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- from bson import DBRef, ObjectId +import pytest from mongoengine import * from mongoengine.base import LazyReference from tests.utils import MongoDBTestCase -import pytest class TestLazyReferenceField(MongoDBTestCase): diff --git a/tests/fields/test_long_field.py b/tests/fields/test_long_field.py index 51f8e255..da4f04c8 100644 --- a/tests/fields/test_long_field.py +++ b/tests/fields/test_long_field.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +import pytest import six try: @@ -10,7 +11,6 @@ from mongoengine import * from mongoengine.connection import get_db from tests.utils import MongoDBTestCase -import pytest class TestLongField(MongoDBTestCase): diff --git a/tests/fields/test_map_field.py b/tests/fields/test_map_field.py index fd56ddd0..8b8b1c46 100644 --- a/tests/fields/test_map_field.py +++ b/tests/fields/test_map_field.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- import datetime -from mongoengine import * - -from tests.utils import MongoDBTestCase import pytest +from mongoengine import * +from tests.utils import MongoDBTestCase + class TestMapField(MongoDBTestCase): def test_mapfield(self): diff --git a/tests/fields/test_reference_field.py b/tests/fields/test_reference_field.py index 783d1315..949eac67 100644 --- a/tests/fields/test_reference_field.py +++ b/tests/fields/test_reference_field.py @@ -1,10 +1,9 @@ # -*- coding: utf-8 -*- from bson import DBRef, SON +import pytest from mongoengine import * - from tests.utils import MongoDBTestCase -import pytest class TestReferenceField(MongoDBTestCase): @@ -59,21 +58,6 @@ class TestReferenceField(MongoDBTestCase): with pytest.raises(ValidationError): post1.validate() - def test_objectid_reference_fields(self): - """Make sure storing Object ID references works.""" - - class Person(Document): - name = StringField() - parent = ReferenceField("self") - - Person.drop_collection() - - p1 = Person(name="John").save() - Person(name="Ross", parent=p1.pk).save() - - p = Person.objects.get(name="Ross") - assert p.parent == p1 - def test_dbref_reference_fields(self): """Make sure storing references as bson.dbref.DBRef works.""" diff --git a/tests/fields/test_url_field.py b/tests/fields/test_url_field.py index e125f56a..948a4788 100644 --- a/tests/fields/test_url_field.py +++ b/tests/fields/test_url_field.py @@ -1,8 +1,9 @@ # -*- coding: utf-8 -*- +import pytest + from mongoengine import * from tests.utils import MongoDBTestCase -import pytest class TestURLField(MongoDBTestCase): diff --git a/tests/fields/test_uuid_field.py b/tests/fields/test_uuid_field.py index b1413f95..21b7a090 100644 --- a/tests/fields/test_uuid_field.py +++ b/tests/fields/test_uuid_field.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- import uuid -from mongoengine import * - -from tests.utils import MongoDBTestCase, get_as_pymongo import pytest +from mongoengine import * +from tests.utils import MongoDBTestCase, get_as_pymongo + class Person(Document): api_key = UUIDField(binary=False) diff --git a/tests/fixtures.py b/tests/fixtures.py index 9f06f1ab..59fc3bf3 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -42,11 +42,11 @@ class PickleSignalsTest(Document): @classmethod def post_save(self, sender, document, created, **kwargs): - pickled = pickle.dumps(document) + pickle.dumps(document) @classmethod def post_delete(self, sender, document, **kwargs): - pickled = pickle.dumps(document) + pickle.dumps(document) signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest) diff --git a/tests/queryset/test_field_list.py b/tests/queryset/test_field_list.py index a2bf6f1f..fbdde23b 100644 --- a/tests/queryset/test_field_list.py +++ b/tests/queryset/test_field_list.py @@ -1,8 +1,9 @@ import unittest +import pytest + from mongoengine import * from mongoengine.queryset import QueryFieldList -import pytest class TestQueryFieldList(unittest.TestCase): @@ -221,7 +222,7 @@ class TestOnlyExcludeAll(unittest.TestCase): assert obj.comments == [] obj = BlogPost.objects.only("various.test_dynamic.some").get() - assert obj.various["test_dynamic"].some == True + assert obj.various["test_dynamic"].some is True obj = BlogPost.objects.only("content", "comments.title").get() assert obj.content == "Had a good coffee today..." diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index f3606609..79f5793d 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -9,6 +9,7 @@ from bson import DBRef, ObjectId import pymongo from pymongo.read_preferences import ReadPreference from pymongo.results import UpdateResult +import pytest import six from six import iteritems @@ -24,7 +25,6 @@ from mongoengine.queryset import ( QuerySetManager, queryset_manager, ) -import pytest class db_ops_tracker(query_counter): @@ -1712,11 +1712,11 @@ class TestQueryset(unittest.TestCase): post = BlogPost(content="Watching TV", category=lameness) post.save() - assert 1 == BlogPost.objects.count() - assert "Lameness" == BlogPost.objects.first().category.name + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.first().category.name == "Lameness" Category.objects.delete() - assert 1 == BlogPost.objects.count() - assert None == BlogPost.objects.first().category + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.first().category is None def test_reverse_delete_rule_nullify_on_abstract_document(self): """Ensure nullification of references to deleted documents when @@ -1739,11 +1739,11 @@ class TestQueryset(unittest.TestCase): BlogPost(content="Watching TV", author=me).save() - assert 1 == BlogPost.objects.count() - assert me == BlogPost.objects.first().author + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.first().author == me self.Person.objects(name="Test User").delete() - assert 1 == BlogPost.objects.count() - assert None == BlogPost.objects.first().author + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.first().author is None def test_reverse_delete_rule_deny(self): """Ensure deletion gets denied on documents that still have references @@ -1896,7 +1896,7 @@ class TestQueryset(unittest.TestCase): """ p1 = self.Person(name="User Z", age=20).save() del_result = p1.delete(w=0) - assert None == del_result + assert del_result is None def test_reference_field_find(self): """Ensure cascading deletion of referring documents from the database. @@ -2047,7 +2047,7 @@ class TestQueryset(unittest.TestCase): post = BlogPost(title="garbage").save() - assert post.title != None + assert post.title is not None BlogPost.objects.update_one(unset__title=1) post.reload() assert post.title is None @@ -5006,7 +5006,7 @@ class TestQueryset(unittest.TestCase): # PyPy evaluates __len__ when iterating with list comprehensions while CPython does not. # This may be a bug in PyPy (PyPy/#1802) but it does not affect # the behavior of MongoEngine. - assert None == people._len + assert people._len is None assert q == 1 list(people) @@ -5053,7 +5053,7 @@ class TestQueryset(unittest.TestCase): Person(name="a").save() qs = Person.objects() _ = list(qs) - with pytest.raises(OperationError, match="QuerySet already cached") as ctx_err: + with pytest.raises(OperationError, match="QuerySet already cached"): qs.no_cache() def test_no_cached_queryset_no_cache_back_to_cache(self): diff --git a/tests/queryset/test_transform.py b/tests/queryset/test_transform.py index be28c3b8..3898809e 100644 --- a/tests/queryset/test_transform.py +++ b/tests/queryset/test_transform.py @@ -1,10 +1,10 @@ import unittest from bson.son import SON +import pytest from mongoengine import * from mongoengine.queryset import Q, transform -import pytest class TestTransform(unittest.TestCase): diff --git a/tests/queryset/test_visitor.py b/tests/queryset/test_visitor.py index 9706d012..e597e3d8 100644 --- a/tests/queryset/test_visitor.py +++ b/tests/queryset/test_visitor.py @@ -3,11 +3,11 @@ import re import unittest from bson import ObjectId +import pytest from mongoengine import * from mongoengine.errors import InvalidQueryError from mongoengine.queryset import Q -import pytest class TestQ(unittest.TestCase): diff --git a/tests/test_connection.py b/tests/test_connection.py index 8db69b0c..07edcbba 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -3,10 +3,10 @@ import datetime from bson.tz_util import utc from nose.plugins.skip import SkipTest import pymongo -from pymongo import MongoClient -from pymongo import ReadPreference -from pymongo.errors import InvalidName, OperationFailure +from pymongo import MongoClient, ReadPreference +from pymongo.errors import InvalidName, OperationFailure +import pytest try: import unittest2 as unittest @@ -29,7 +29,6 @@ from mongoengine.connection import ( get_connection, get_db, ) -import pytest def get_tz_awareness(connection): diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index cf4dd100..d68afbb0 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -1,5 +1,7 @@ import unittest +import pytest + from mongoengine import * from mongoengine.connection import get_db from mongoengine.context_managers import ( @@ -10,7 +12,6 @@ from mongoengine.context_managers import ( switch_db, ) from mongoengine.pymongo_support import count_documents -import pytest class ContextManagersTest(unittest.TestCase): @@ -214,7 +215,6 @@ class ContextManagersTest(unittest.TestCase): raise TypeError() def test_query_counter_does_not_swallow_exception(self): - with pytest.raises(TypeError): with query_counter() as q: raise TypeError() diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 24cda40d..ad421a72 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -1,7 +1,7 @@ import unittest -from six import iterkeys import pytest +from six import iterkeys from mongoengine import Document from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict diff --git a/tests/test_replicaset_connection.py b/tests/test_replicaset_connection.py index c1ea407c..5d83da00 100644 --- a/tests/test_replicaset_connection.py +++ b/tests/test_replicaset_connection.py @@ -1,7 +1,6 @@ import unittest -from pymongo import MongoClient -from pymongo import ReadPreference +from pymongo import MongoClient, ReadPreference import mongoengine from mongoengine.connection import ConnectionFailure @@ -25,14 +24,13 @@ class ConnectionTest(unittest.TestCase): def test_replicaset_uri_passes_read_preference(self): """Requires a replica set called "rs" on port 27017 """ - try: conn = mongoengine.connect( db="mongoenginetest", host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=READ_PREF, ) - except ConnectionFailure as e: + except ConnectionFailure: return if not isinstance(conn, CONN_CLASS): diff --git a/tests/test_utils.py b/tests/test_utils.py index ccb44aac..ef396571 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,9 +1,10 @@ import re import unittest -from mongoengine.base.utils import LazyRegexCompiler import pytest +from mongoengine.base.utils import LazyRegexCompiler + signal_output = [] From 799cdafae63b3ac22aafed4400bff10796a8ffee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 1 Sep 2019 15:27:11 +0300 Subject: [PATCH 081/216] remove references to nose --- CONTRIBUTING.rst | 2 +- requirements.txt | 1 - setup.py | 2 +- tests/document/test_indexes.py | 3 +-- tests/fields/test_email_field.py | 6 ------ tests/fields/test_fields.py | 13 ++++++------- tests/fields/test_file_field.py | 29 +++++++++-------------------- tests/test_connection.py | 31 +++++++++++++++---------------- tests/utils.py | 10 ++++------ tox.ini | 1 - 10 files changed, 37 insertions(+), 61 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 4711c1d3..56bae31f 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -54,7 +54,7 @@ General Guidelines should adapt to the breaking change in docs/upgrade.rst. - Write inline documentation for new classes and methods. - Write tests and make sure they pass (make sure you have a mongod - running on the default port, then execute ``python setup.py nosetests`` + running on the default port, then execute ``python setup.py test`` from the cmd line to run the test suite). - Ensure tests pass on all supported Python, PyMongo, and MongoDB versions. You can test various Python and PyMongo versions locally by executing diff --git a/requirements.txt b/requirements.txt index 46eabac3..43e5261b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,3 @@ -nose pymongo>=3.4 six==1.10.0 Sphinx==1.5.5 diff --git a/setup.py b/setup.py index 2bc1ae1c..939e8e50 100644 --- a/setup.py +++ b/setup.py @@ -120,7 +120,7 @@ extra_opts = { } if sys.version_info[0] == 3: extra_opts["use_2to3"] = True - if "test" in sys.argv or "nosetests" in sys.argv: + if "test" in sys.argv: extra_opts["packages"] = find_packages() extra_opts["package_data"] = { "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"] diff --git a/tests/document/test_indexes.py b/tests/document/test_indexes.py index dc6c5c8e..90402c46 100644 --- a/tests/document/test_indexes.py +++ b/tests/document/test_indexes.py @@ -2,7 +2,6 @@ import unittest from datetime import datetime -from nose.plugins.skip import SkipTest from pymongo.collation import Collation from pymongo.errors import OperationFailure import pytest @@ -251,7 +250,7 @@ class TestIndexes(unittest.TestCase): def test_explicit_geohaystack_index(self): """Ensure that geohaystack indexes work when created via meta[indexes] """ - raise SkipTest( + pytest.skip( "GeoHaystack index creation is not supported for now" "from meta, as it requires a bucketSize parameter." ) diff --git a/tests/fields/test_email_field.py b/tests/fields/test_email_field.py index 55255df5..5a58ede4 100644 --- a/tests/fields/test_email_field.py +++ b/tests/fields/test_email_field.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- import sys -from unittest import SkipTest import pytest @@ -46,11 +45,6 @@ class TestEmailField(MongoDBTestCase): user.validate() def test_email_field_unicode_user(self): - # Don't run this test on pypy3, which doesn't support unicode regex: - # https://bitbucket.org/pypy/pypy/issues/1821/regular-expression-doesnt-find-unicode - if sys.version_info[:2] == (3, 2): - raise SkipTest("unicode email addresses are not supported on PyPy 3") - class User(Document): email = EmailField() diff --git a/tests/fields/test_fields.py b/tests/fields/test_fields.py index b8c916f8..652f6903 100644 --- a/tests/fields/test_fields.py +++ b/tests/fields/test_fields.py @@ -3,7 +3,6 @@ import datetime import unittest from bson import DBRef, ObjectId, SON -from nose.plugins.skip import SkipTest import pytest from mongoengine import ( @@ -1239,17 +1238,17 @@ class TestField(MongoDBTestCase): a = A._from_son(SON([("fb", SON([("fc", SON([("txt", "hi")]))]))])) assert a.b.c.txt == "hi" + @pytest.mark.xfail( + reason="Using a string reference in an EmbeddedDocumentField does not work if the class isnt registerd yet", + raises=NotRegistered, + ) def test_embedded_document_field_cant_reference_using_a_str_if_it_does_not_exist_yet( self, ): - raise SkipTest( - "Using a string reference in an EmbeddedDocumentField does not work if the class isnt registerd yet" - ) - class MyDoc2(Document): - emb = EmbeddedDocumentField("MyDoc") + emb = EmbeddedDocumentField("MyFunkyDoc123") - class MyDoc(EmbeddedDocument): + class MyFunkyDoc123(EmbeddedDocument): name = StringField() def test_embedded_document_validation(self): diff --git a/tests/fields/test_file_field.py b/tests/fields/test_file_field.py index fb8cacff..bfc86511 100644 --- a/tests/fields/test_file_field.py +++ b/tests/fields/test_file_field.py @@ -5,7 +5,7 @@ import tempfile import unittest import gridfs -from nose.plugins.skip import SkipTest +import pytest import six from mongoengine import * @@ -21,6 +21,8 @@ except ImportError: from tests.utils import MongoDBTestCase +require_pil = pytest.mark.skipif(not HAS_PIL, reason="PIL not installed") + TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "mongoengine.png") TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png") @@ -377,10 +379,8 @@ class TestFileField(MongoDBTestCase): assert len(list(files)) == 0 assert len(list(chunks)) == 0 + @require_pil def test_image_field(self): - if not HAS_PIL: - raise SkipTest("PIL not installed") - class TestImage(Document): image = ImageField() @@ -411,10 +411,8 @@ class TestFileField(MongoDBTestCase): t.image.delete() + @require_pil def test_image_field_reassigning(self): - if not HAS_PIL: - raise SkipTest("PIL not installed") - class TestFile(Document): the_file = ImageField() @@ -428,10 +426,8 @@ class TestFileField(MongoDBTestCase): test_file.save() assert test_file.the_file.size == (45, 101) + @require_pil def test_image_field_resize(self): - if not HAS_PIL: - raise SkipTest("PIL not installed") - class TestImage(Document): image = ImageField(size=(185, 37)) @@ -451,10 +447,8 @@ class TestFileField(MongoDBTestCase): t.image.delete() + @require_pil def test_image_field_resize_force(self): - if not HAS_PIL: - raise SkipTest("PIL not installed") - class TestImage(Document): image = ImageField(size=(185, 37, True)) @@ -474,10 +468,8 @@ class TestFileField(MongoDBTestCase): t.image.delete() + @require_pil def test_image_field_thumbnail(self): - if not HAS_PIL: - raise SkipTest("PIL not installed") - class TestImage(Document): image = ImageField(thumbnail_size=(92, 18)) @@ -546,11 +538,8 @@ class TestFileField(MongoDBTestCase): assert putfile == copy.copy(putfile) assert putfile == copy.deepcopy(putfile) + @require_pil def test_get_image_by_grid_id(self): - - if not HAS_PIL: - raise SkipTest("PIL not installed") - class TestImage(Document): image1 = ImageField() diff --git a/tests/test_connection.py b/tests/test_connection.py index 07edcbba..acaab904 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1,7 +1,6 @@ import datetime from bson.tz_util import utc -from nose.plugins.skip import SkipTest import pymongo from pymongo import MongoClient, ReadPreference @@ -35,6 +34,18 @@ def get_tz_awareness(connection): return connection.codec_options.tz_aware +try: + import mongomock + + MONGOMOCK_INSTALLED = True +except ImportError: + MONGOMOCK_INSTALLED = False + +require_mongomock = pytest.mark.skipif( + not MONGOMOCK_INSTALLED, reason="you need mongomock installed to run this testcase" +) + + class ConnectionTest(unittest.TestCase): @classmethod def setUpClass(cls): @@ -212,14 +223,10 @@ class ConnectionTest(unittest.TestCase): non_string_db_name = ["e. g. list instead of a string"] connect(non_string_db_name) + @require_mongomock def test_connect_in_mocking(self): """Ensure that the connect() method works properly in mocking. """ - try: - import mongomock - except ImportError: - raise SkipTest("you need mongomock installed to run this testcase") - connect("mongoenginetest", host="mongomock://localhost") conn = get_connection() assert isinstance(conn, mongomock.MongoClient) @@ -261,14 +268,10 @@ class ConnectionTest(unittest.TestCase): conn = get_connection("testdb7") assert isinstance(conn, mongomock.MongoClient) + @require_mongomock def test_default_database_with_mocking(self): """Ensure that the default database is correctly set when using mongomock. """ - try: - import mongomock - except ImportError: - raise SkipTest("you need mongomock installed to run this testcase") - disconnect_all() class SomeDocument(Document): @@ -281,16 +284,12 @@ class ConnectionTest(unittest.TestCase): assert conn.get_default_database().name == "mongoenginetest" assert conn.database_names()[0] == "mongoenginetest" + @require_mongomock def test_connect_with_host_list(self): """Ensure that the connect() method works when host is a list Uses mongomock to test w/o needing multiple mongod/mongos processes """ - try: - import mongomock - except ImportError: - raise SkipTest("you need mongomock installed to run this testcase") - connect(host=["mongomock://localhost"]) conn = get_connection() assert isinstance(conn, mongomock.MongoClient) diff --git a/tests/utils.py b/tests/utils.py index 0719d6ef..7ee22c3c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,7 +1,6 @@ -import operator import unittest -from nose.plugins.skip import SkipTest +import pytest from mongoengine import connect from mongoengine.connection import disconnect_all, get_db @@ -37,7 +36,7 @@ def get_as_pymongo(doc): def _decorated_with_ver_requirement(func, mongo_version_req, oper): """Return a MongoDB version requirement decorator. - The resulting decorator will raise a SkipTest exception if the current + The resulting decorator will skip the test if the current MongoDB version doesn't match the provided version/operator. For example, if you define a decorator like so: @@ -59,9 +58,8 @@ def _decorated_with_ver_requirement(func, mongo_version_req, oper): if oper(mongodb_v, mongo_version_req): return func(*args, **kwargs) - raise SkipTest( - "Needs MongoDB v{}+".format(".".join(str(n) for n in mongo_version_req)) - ) + pretty_version = ".".join(str(n) for n in mongo_version_req) + pytest.skip("Needs MongoDB v{}+".format(pretty_version)) _inner.__name__ = func.__name__ _inner.__doc__ = func.__doc__ diff --git a/tox.ini b/tox.ini index 94ccc9cf..349b5577 100644 --- a/tox.ini +++ b/tox.ini @@ -5,7 +5,6 @@ envlist = {py27,py35,pypy,pypy3}-{mg34,mg36} commands = python setup.py test {posargs} deps = - nose mg34: pymongo>=3.4,<3.5 mg36: pymongo>=3.6,<3.7 mg39: pymongo>=3.9,<4.0 From d8924ed8920f856d3754db38c7c5e8adf0f96ece Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Mon, 2 Sep 2019 08:50:46 +0300 Subject: [PATCH 082/216] remove inheritance from unittest.TestCase on basic test classes --- tests/queryset/test_field_list.py | 2 +- tests/test_common.py | 2 +- tests/test_context_managers.py | 2 +- tests/test_datastructures.py | 10 +++++----- tests/test_signals.py | 2 +- tests/test_utils.py | 2 +- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/queryset/test_field_list.py b/tests/queryset/test_field_list.py index fbdde23b..be7903fd 100644 --- a/tests/queryset/test_field_list.py +++ b/tests/queryset/test_field_list.py @@ -6,7 +6,7 @@ from mongoengine import * from mongoengine.queryset import QueryFieldList -class TestQueryFieldList(unittest.TestCase): +class TestQueryFieldList: def test_empty(self): q = QueryFieldList() assert not q diff --git a/tests/test_common.py b/tests/test_common.py index 6b6f18de..1779a91b 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -6,7 +6,7 @@ from mongoengine import Document from mongoengine.common import _import_class -class TestCommon(unittest.TestCase): +class TestCommon: def test__import_class(self): doc_cls = _import_class("Document") assert doc_cls is Document diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index d68afbb0..c10a0224 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -14,7 +14,7 @@ from mongoengine.context_managers import ( from mongoengine.pymongo_support import count_documents -class ContextManagersTest(unittest.TestCase): +class TestContextManagers: def test_switch_db_context_manager(self): connect("mongoenginetest") register_connection("testdb-1", "mongoenginetest2") diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index ad421a72..7b5d7d11 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -15,7 +15,7 @@ class DocumentStub(object): self._changed_fields.append(key) -class TestBaseDict(unittest.TestCase): +class TestBaseDict: @staticmethod def _get_basedict(dict_items): """Get a BaseList bound to a fake document instance""" @@ -151,7 +151,7 @@ class TestBaseDict(unittest.TestCase): assert base_dict._instance._changed_fields == ["my_name.a_new_attr"] -class TestBaseList(unittest.TestCase): +class TestBaseList: @staticmethod def _get_baselist(list_items): """Get a BaseList bound to a fake document instance""" @@ -360,12 +360,12 @@ class TestBaseList(unittest.TestCase): class TestStrictDict(unittest.TestCase): - def strict_dict_class(self, *args, **kwargs): - return StrictDict.create(*args, **kwargs) - def setUp(self): self.dtype = self.strict_dict_class(("a", "b", "c")) + def strict_dict_class(self, *args, **kwargs): + return StrictDict.create(*args, **kwargs) + def test_init(self): d = self.dtype(a=1, b=1, c=1) assert (d.a, d.b, d.c) == (1, 1, 1) diff --git a/tests/test_signals.py b/tests/test_signals.py index b217712b..d79eaf75 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -7,7 +7,7 @@ from mongoengine import signals signal_output = [] -class SignalTests(unittest.TestCase): +class TestSignal(unittest.TestCase): """ Testing signals before/after saving and deleting. """ diff --git a/tests/test_utils.py b/tests/test_utils.py index ef396571..dd178273 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -8,7 +8,7 @@ from mongoengine.base.utils import LazyRegexCompiler signal_output = [] -class LazyRegexCompilerTest(unittest.TestCase): +class TestLazyRegexCompiler: def test_lazy_regex_compiler_verify_laziness_of_descriptor(self): class UserEmail(object): EMAIL_REGEX = LazyRegexCompiler("@", flags=32) From 81647d67a0c97b3d0fac6cb687c385fc2827a108 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Thu, 31 Oct 2019 23:06:40 +0100 Subject: [PATCH 083/216] fix recent tests update with unittest2pytest --- tests/document/test_indexes.py | 16 ++++++---------- tests/queryset/test_queryset.py | 9 +++++---- tests/test_connection.py | 4 ++-- 3 files changed, 13 insertions(+), 16 deletions(-) diff --git a/tests/document/test_indexes.py b/tests/document/test_indexes.py index 90402c46..be857b59 100644 --- a/tests/document/test_indexes.py +++ b/tests/document/test_indexes.py @@ -544,23 +544,19 @@ class TestIndexes(unittest.TestCase): BlogPost(name=name).save() query_result = BlogPost.objects.collation(base).order_by("name") - self.assertEqual( - [x.name for x in query_result], sorted(names, key=lambda x: x.lower()) - ) - self.assertEqual(5, query_result.count()) + assert [x.name for x in query_result] == sorted(names, key=lambda x: x.lower()) + assert 5 == query_result.count() query_result = BlogPost.objects.collation(Collation(**base)).order_by("name") - self.assertEqual( - [x.name for x in query_result], sorted(names, key=lambda x: x.lower()) - ) - self.assertEqual(5, query_result.count()) + assert [x.name for x in query_result] == sorted(names, key=lambda x: x.lower()) + assert 5 == query_result.count() incorrect_collation = {"arndom": "wrdo"} - with self.assertRaises(OperationFailure): + with pytest.raises(OperationFailure): BlogPost.objects.collation(incorrect_collation).count() query_result = BlogPost.objects.collation({}).order_by("name") - self.assertEqual([x.name for x in query_result], sorted(names)) + assert [x.name for x in query_result] == sorted(names) def test_unique(self): """Ensure that uniqueness constraints are applied to fields. diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 79f5793d..7812ab66 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -4626,7 +4626,8 @@ class TestQueryset(unittest.TestCase): bars = Bar.objects.read_preference(ReadPreference.SECONDARY_PREFERRED) assert bars._read_preference == ReadPreference.SECONDARY_PREFERRED assert ( - bars._cursor.collection.read_preference == ReadPreference.SECONDARY_PREFERRED + bars._cursor.collection.read_preference + == ReadPreference.SECONDARY_PREFERRED ) # Make sure that `.read_preference(...)` does accept string values. @@ -5765,13 +5766,13 @@ class TestQueryset(unittest.TestCase): def test_no_cursor_timeout(self): qs = self.Person.objects() - self.assertEqual(qs._cursor_args, {}) # ensure no regression of #2148 + assert qs._cursor_args == {} # ensure no regression of #2148 qs = self.Person.objects().timeout(True) - self.assertEqual(qs._cursor_args, {}) + assert qs._cursor_args == {} qs = self.Person.objects().timeout(False) - self.assertEqual(qs._cursor_args, {"no_cursor_timeout": True}) + assert qs._cursor_args == {"no_cursor_timeout": True} if __name__ == "__main__": diff --git a/tests/test_connection.py b/tests/test_connection.py index acaab904..e40a6994 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -625,8 +625,8 @@ class ConnectionTest(unittest.TestCase): alias="conn1", host="mongodb://localhost/testing?w=1&journal=true" ) conn2 = connect("testing", alias="conn2", w=1, journal=True) - self.assertEqual(conn1.write_concern.document, {"w": 1, "j": True}) - self.assertEqual(conn2.write_concern.document, {"w": 1, "j": True}) + assert conn1.write_concern.document == {"w": 1, "j": True} + assert conn2.write_concern.document == {"w": 1, "j": True} def test_connect_with_replicaset_via_uri(self): """Ensure connect() works when specifying a replicaSet via the From ff749a7a0a7b9a86b3745cad393effcf594db5f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Philip=20G=C3=B6pfert?= Date: Wed, 6 Nov 2019 10:35:16 +0100 Subject: [PATCH 084/216] Specify version of requirement In `README.rst`, a version of `six` of at least `1.10.0` is specified. This was missing from the requirements, potentially leading to broken installations. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 939e8e50..ceb5afad 100644 --- a/setup.py +++ b/setup.py @@ -143,7 +143,7 @@ setup( long_description=LONG_DESCRIPTION, platforms=["any"], classifiers=CLASSIFIERS, - install_requires=["pymongo>=3.4", "six"], + install_requires=["pymongo>=3.4", "six>=1.10.0"], cmdclass={"test": PyTest}, **extra_opts ) From d3420918cd9804243900c8566b7e155044e668cb Mon Sep 17 00:00:00 2001 From: Eloi Zalczer Date: Mon, 18 Nov 2019 17:16:06 +0100 Subject: [PATCH 085/216] Added alias parameter in query_counter --- mongoengine/context_managers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index d8dfeaac..5920b724 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -182,10 +182,10 @@ class query_counter(object): - Some queries are ignored by default by the counter (killcursors, db.system.indexes) """ - def __init__(self): + def __init__(self, alias=DEFAULT_CONNECTION_NAME): """Construct the query_counter """ - self.db = get_db() + self.db = get_db(alias=alias) self.initial_profiling_level = None self._ctx_query_counter = 0 # number of queries issued by the context From 0bf08db7b943eba85d7e0dd85d161df4e615a371 Mon Sep 17 00:00:00 2001 From: Eloi Zalczer Date: Mon, 2 Dec 2019 10:07:33 +0100 Subject: [PATCH 086/216] Added test case for query_counter alias --- tests/document/test_instance.py | 40 ++++++++++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 1d3e18d0..c8ad2ff3 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -2825,6 +2825,44 @@ class TestInstance(MongoDBTestCase): assert "testdb-1" == B._meta.get("db_alias") + def test_query_counter_alias(self): + """query_counter works properly with db aliases?""" + # Register a connection with db_alias testdb-1 + register_connection("testdb-1", "mongoenginetest2") + + class A(Document): + """Uses default db_alias + """ + + name = StringField() + + class B(Document): + """Uses testdb-1 db_alias + """ + + name = StringField() + meta = {"db_alias": "testdb-1"} + + with query_counter() as q: + assert q == 0 + a = A.objects.create(name="A") + assert q == 1 + a = A.objects.first() + assert q == 2 + a.name = "Test A" + a.save() + assert q == 3 + + with query_counter(alias="testdb-1") as q: + assert q == 0 + b = B.objects.create(name="B") + assert q == 1 + b = B.objects.first() + assert q == 2 + b.name = "Test B" + b.save() + assert q == 3 + def test_db_ref_usage(self): """DB Ref usage in dict_fields.""" @@ -3644,7 +3682,7 @@ class TestInstance(MongoDBTestCase): User.objects().select_related() def test_embedded_document_failed_while_loading_instance_when_it_is_not_a_dict( - self + self, ): class LightSaber(EmbeddedDocument): color = StringField() From 0458ef869eb07d98c2ebb4da82dc3ca0bcd94a49 Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Tue, 3 Dec 2019 00:42:10 +0100 Subject: [PATCH 087/216] Add __eq__ to Q and Q operations --- mongoengine/queryset/visitor.py | 12 ++++++++++++ tests/queryset/test_visitor.py | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+) diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 0fe139fd..058c722a 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -96,9 +96,11 @@ class QNode(object): """Combine this node with another node into a QCombination object. """ + # If the other Q() is empty, ignore it and just use `self`. if getattr(other, "empty", True): return self + # Or if this Q is empty, ignore it and just use `other`. if self.empty: return other @@ -146,6 +148,13 @@ class QCombination(QNode): def empty(self): return not bool(self.children) + def __eq__(self, other): + return ( + self.__class__ == other.__class__ + and self.operation == other.operation + and self.children == other.children + ) + class Q(QNode): """A simple query object, used in a query tree to build up more complex @@ -164,3 +173,6 @@ class Q(QNode): @property def empty(self): return not bool(self.query) + + def __eq__(self, other): + return self.__class__ == other.__class__ and self.query == other.query diff --git a/tests/queryset/test_visitor.py b/tests/queryset/test_visitor.py index e597e3d8..e8504abd 100644 --- a/tests/queryset/test_visitor.py +++ b/tests/queryset/test_visitor.py @@ -374,6 +374,38 @@ class TestQ(unittest.TestCase): == 2 ) + def test_equality(self): + assert Q(name="John") == Q(name="John") + assert Q() == Q() + + def test_inequality(self): + assert Q(name="John") != Q(name="Ralph") + + def test_operation_equality(self): + q1 = Q(name="John") | Q(title="Sir") & Q(surname="Paul") + q2 = Q(name="John") | Q(title="Sir") & Q(surname="Paul") + assert q1 == q2 + + def test_operation_inequality(self): + q1 = Q(name="John") | Q(title="Sir") + q2 = Q(title="Sir") | Q(name="John") + assert q1 != q2 + + def test_combine_and_empty(self): + q = Q(x=1) + assert q & Q() == q + assert Q() & q == q + + def test_combine_and_both_empty(self): + assert Q() & Q() == Q() + + def test_combine_or_empty(self): + q = Q(x=1) + assert q | Q() == q + assert Q() | q == q + + def test_combine_or_both_empty(self): + assert Q() | Q() == Q() if __name__ == "__main__": unittest.main() From 091238a2cfd3e77fba724ad8264bae78c360c675 Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Tue, 3 Dec 2019 00:54:46 +0100 Subject: [PATCH 088/216] Update Authors --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 45a754cc..aa044bd2 100644 --- a/AUTHORS +++ b/AUTHORS @@ -252,3 +252,4 @@ that much better: * Paulo Amaral (https://github.com/pauloAmaral) * Gaurav Dadhania (https://github.com/GVRV) * Yurii Andrieiev (https://github.com/yandrieiev) + * Filip Kucharczyk (https://github.com/Pacu2) From f7f0e10d4d3748381007617a119758e40bdd76bb Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Tue, 3 Dec 2019 00:54:53 +0100 Subject: [PATCH 089/216] Update changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 0b4893a6..102e826d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -23,6 +23,7 @@ Development - Switch from nosetest to pytest as test runner #2114 - The codebase is now formatted using ``black``. #2109 - In bulk write insert, the detailed error message would raise in exception. +- Added ability to compare Q and Q operations #2204 Changes in 0.18.2 ================= From 3f75f30f2675375fef0bf14fdbff63480676e056 Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Tue, 3 Dec 2019 09:03:49 +0100 Subject: [PATCH 090/216] Run black --- tests/queryset/test_visitor.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/queryset/test_visitor.py b/tests/queryset/test_visitor.py index e8504abd..afa00839 100644 --- a/tests/queryset/test_visitor.py +++ b/tests/queryset/test_visitor.py @@ -407,5 +407,6 @@ class TestQ(unittest.TestCase): def test_combine_or_both_empty(self): assert Q() | Q() == Q() + if __name__ == "__main__": unittest.main() From af82c07acc3226f7ed65818536680343f2fd83c6 Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Tue, 3 Dec 2019 09:19:02 +0100 Subject: [PATCH 091/216] Reformat with black --- tests/document/test_instance.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 1d3e18d0..173e02f2 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -3644,7 +3644,7 @@ class TestInstance(MongoDBTestCase): User.objects().select_related() def test_embedded_document_failed_while_loading_instance_when_it_is_not_a_dict( - self + self, ): class LightSaber(EmbeddedDocument): color = StringField() From 78b240b740b34de450d30a00c669a9283a8b37de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 4 Dec 2019 21:49:17 +0100 Subject: [PATCH 092/216] updated changelog + improved query_counter test --- docs/changelog.rst | 1 + mongoengine/context_managers.py | 2 +- tests/document/test_instance.py | 38 --------------------------- tests/test_context_managers.py | 46 +++++++++++++++++++++++++++++++++ 4 files changed, 48 insertions(+), 39 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 102e826d..99081957 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -24,6 +24,7 @@ Development - The codebase is now formatted using ``black``. #2109 - In bulk write insert, the detailed error message would raise in exception. - Added ability to compare Q and Q operations #2204 +- Added ability to use a db alias on query_counter #2194 Changes in 0.18.2 ================= diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 5920b724..1592ceef 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -171,7 +171,7 @@ class no_sub_classes(object): class query_counter(object): """Query_counter context manager to get the number of queries. This works by updating the `profiling_level` of the database so that all queries get logged, - resetting the db.system.profile collection at the beginnig of the context and counting the new entries. + resetting the db.system.profile collection at the beginning of the context and counting the new entries. This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes can interfere with it diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index c8ad2ff3..173e02f2 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -2825,44 +2825,6 @@ class TestInstance(MongoDBTestCase): assert "testdb-1" == B._meta.get("db_alias") - def test_query_counter_alias(self): - """query_counter works properly with db aliases?""" - # Register a connection with db_alias testdb-1 - register_connection("testdb-1", "mongoenginetest2") - - class A(Document): - """Uses default db_alias - """ - - name = StringField() - - class B(Document): - """Uses testdb-1 db_alias - """ - - name = StringField() - meta = {"db_alias": "testdb-1"} - - with query_counter() as q: - assert q == 0 - a = A.objects.create(name="A") - assert q == 1 - a = A.objects.first() - assert q == 2 - a.name = "Test A" - a.save() - assert q == 3 - - with query_counter(alias="testdb-1") as q: - assert q == 0 - b = B.objects.create(name="B") - assert q == 1 - b = B.objects.first() - assert q == 2 - b.name = "Test B" - b.save() - assert q == 3 - def test_db_ref_usage(self): """DB Ref usage in dict_fields.""" diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index c10a0224..fa3f5960 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -282,6 +282,52 @@ class TestContextManagers: assert q < 1000 assert q <= int(q) + def test_query_counter_alias(self): + """query_counter works properly with db aliases?""" + # Register a connection with db_alias testdb-1 + register_connection("testdb-1", "mongoenginetest2") + + class A(Document): + """Uses default db_alias""" + + name = StringField() + + class B(Document): + """Uses testdb-1 db_alias""" + + name = StringField() + meta = {"db_alias": "testdb-1"} + + A.drop_collection() + B.drop_collection() + + with query_counter() as q: + assert q == 0 + A.objects.create(name="A") + assert q == 1 + a = A.objects.first() + assert q == 2 + a.name = "Test A" + a.save() + assert q == 3 + # querying the other db should'nt alter the counter + B.objects().first() + assert q == 3 + + with query_counter(alias="testdb-1") as q: + assert q == 0 + B.objects.create(name="B") + assert q == 1 + b = B.objects.first() + assert q == 2 + b.name = "Test B" + b.save() + assert b.name == "Test B" + assert q == 3 + # querying the other db should'nt alter the counter + A.objects().first() + assert q == 3 + def test_query_counter_counts_getmore_queries(self): connect("mongoenginetest") db = get_db() From cb77bb6b69bb80c79c97d2f0792d173fc2f443d4 Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Thu, 5 Dec 2019 00:21:03 +0100 Subject: [PATCH 093/216] Implement __bool__ on Q and QCombination --- mongoengine/queryset/base.py | 2 +- mongoengine/queryset/visitor.py | 20 ++++++++++++++++---- tests/queryset/test_visitor.py | 11 +++++++++++ 3 files changed, 28 insertions(+), 5 deletions(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index a648391e..c6f467cc 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -686,7 +686,7 @@ class BaseQuerySet(object): .. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set """ queryset = self.clone() - if not queryset._query_obj.empty: + if queryset._query_obj: msg = "Cannot use a filter whilst using `with_id`" raise InvalidQueryError(msg) return queryset.filter(pk=object_id).first() diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 058c722a..a7295ae5 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -2,6 +2,8 @@ import copy from mongoengine.errors import InvalidQueryError from mongoengine.queryset import transform +import warnings + __all__ = ("Q", "QNode") @@ -101,13 +103,15 @@ class QNode(object): return self # Or if this Q is empty, ignore it and just use `other`. - if self.empty: + if not self: return other return QCombination(operation, [self, other]) @property def empty(self): + msg = "'empty' property is deprecated in favour of using 'not bool(filter)" + warnings.warn(msg, DeprecationWarning) return False def __or__(self, other): @@ -137,6 +141,9 @@ class QCombination(QNode): op = " & " if self.operation is self.AND else " | " return "(%s)" % op.join([repr(node) for node in self.children]) + def __bool__(self): + return bool(self.children) + def accept(self, visitor): for i in range(len(self.children)): if isinstance(self.children[i], QNode): @@ -146,6 +153,8 @@ class QCombination(QNode): @property def empty(self): + msg = "'empty' property is deprecated in favour of using 'not bool(filter)" + warnings.warn(msg, DeprecationWarning) return not bool(self.children) def __eq__(self, other): @@ -167,12 +176,15 @@ class Q(QNode): def __repr__(self): return "Q(**%s)" % repr(self.query) + def __bool__(self): + return bool(self.query) + + def __eq__(self, other): + return self.__class__ == other.__class__ and self.query == other.query + def accept(self, visitor): return visitor.visit_query(self) @property def empty(self): return not bool(self.query) - - def __eq__(self, other): - return self.__class__ == other.__class__ and self.query == other.query diff --git a/tests/queryset/test_visitor.py b/tests/queryset/test_visitor.py index afa00839..81e0f253 100644 --- a/tests/queryset/test_visitor.py +++ b/tests/queryset/test_visitor.py @@ -407,6 +407,17 @@ class TestQ(unittest.TestCase): def test_combine_or_both_empty(self): assert Q() | Q() == Q() + def test_q_bool(self): + assert Q(name="John") + assert not Q() + + def test_combine_bool(self): + assert not Q() & Q() + assert Q() & Q(name="John") + assert Q(name="John") & Q() + assert Q() | Q(name="John") + assert Q(name="John") | Q() + if __name__ == "__main__": unittest.main() From bd6c52e025fbe60473a6f009b100eb4d8edbfe83 Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Thu, 5 Dec 2019 00:30:03 +0100 Subject: [PATCH 094/216] Changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 99081957..e2ffa41e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -25,6 +25,7 @@ Development - In bulk write insert, the detailed error message would raise in exception. - Added ability to compare Q and Q operations #2204 - Added ability to use a db alias on query_counter #2194 +- Added ability to check if Q or Q operations is empty by parsing them to bool #2210 Changes in 0.18.2 ================= From 5f14d958ac32925df18e757f77f729f3bfb79c5a Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Thu, 5 Dec 2019 00:46:57 +0100 Subject: [PATCH 095/216] Sort imports --- mongoengine/queryset/visitor.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index a7295ae5..8038d23f 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -1,9 +1,8 @@ import copy +import warnings from mongoengine.errors import InvalidQueryError from mongoengine.queryset import transform -import warnings - __all__ = ("Q", "QNode") From 17151f67c2cdd98e97f3f4f539af167b84927ee2 Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Thu, 5 Dec 2019 00:53:39 +0100 Subject: [PATCH 096/216] Reformat repo with pre-commit hooks --- docs/django.rst | 4 ++-- docs/faq.rst | 1 - docs/guide/connecting.rst | 2 +- docs/guide/mongomock.rst | 4 ++-- docs/guide/signals.rst | 4 ++-- docs/guide/text-indexes.rst | 6 +++--- docs/index.rst | 1 - mongoengine/queryset/base.py | 4 +++- python-mongoengine.spec | 2 +- 9 files changed, 14 insertions(+), 14 deletions(-) diff --git a/docs/django.rst b/docs/django.rst index b8a52165..d43a205e 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -13,7 +13,7 @@ Help Wanted! The MongoEngine team is looking for help contributing and maintaining a new Django extension for MongoEngine! If you have Django experience and would like -to help contribute to the project, please get in touch on the -`mailing list `_ or by +to help contribute to the project, please get in touch on the +`mailing list `_ or by simply contributing on `GitHub `_. diff --git a/docs/faq.rst b/docs/faq.rst index 27cd6937..49c73023 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -10,4 +10,3 @@ If this is a requirement for your project, check the alternative: `uMongo`_ and .. _uMongo: https://umongo.readthedocs.io/ .. _MotorEngine: https://motorengine.readthedocs.io/ - diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index aac13902..ac2146a6 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -86,7 +86,7 @@ using 3 different databases to store data:: connect(alias='user-db-alias', db='user-db') connect(alias='book-db-alias', db='book-db') connect(alias='users-books-db-alias', db='users-books-db') - + class User(Document): name = StringField() diff --git a/docs/guide/mongomock.rst b/docs/guide/mongomock.rst index d70ee6a6..9f199ce5 100644 --- a/docs/guide/mongomock.rst +++ b/docs/guide/mongomock.rst @@ -2,10 +2,10 @@ Use mongomock for testing ============================== -`mongomock `_ is a package to do just +`mongomock `_ is a package to do just what the name implies, mocking a mongo database. -To use with mongoengine, simply specify mongomock when connecting with +To use with mongoengine, simply specify mongomock when connecting with mongoengine: .. code-block:: python diff --git a/docs/guide/signals.rst b/docs/guide/signals.rst index 06bccb3b..e5214610 100644 --- a/docs/guide/signals.rst +++ b/docs/guide/signals.rst @@ -44,8 +44,8 @@ Available signals include: `post_save` Called within :meth:`~mongoengine.Document.save` after most actions - (validation, insert/update, and cascades, but not clearing dirty flags) have - completed successfully. Passed the additional boolean keyword argument + (validation, insert/update, and cascades, but not clearing dirty flags) have + completed successfully. Passed the additional boolean keyword argument `created` to indicate if the save was an insert or an update. `pre_delete` diff --git a/docs/guide/text-indexes.rst b/docs/guide/text-indexes.rst index 92a4471a..a5eaf7d8 100644 --- a/docs/guide/text-indexes.rst +++ b/docs/guide/text-indexes.rst @@ -8,7 +8,7 @@ After MongoDB 2.4 version, supports search documents by text indexes. Defining a Document with text index =================================== Use the *$* prefix to set a text index, Look the declaration:: - + class News(Document): title = StringField() content = StringField() @@ -35,10 +35,10 @@ Saving a document:: content="Various improvements").save() Next, start a text search using :attr:`QuerySet.search_text` method:: - + document = News.objects.search_text('testing').first() document.title # may be: "Using mongodb text search" - + document = News.objects.search_text('released').first() document.title # may be: "MongoEngine 0.9 released" diff --git a/docs/index.rst b/docs/index.rst index 662968d4..15f3c590 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -91,4 +91,3 @@ Indices and tables * :ref:`genindex` * :ref:`modindex` * :ref:`search` - diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index a648391e..a09cbf99 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1193,7 +1193,9 @@ class BaseQuerySet(object): validate_read_preference("read_preference", read_preference) queryset = self.clone() queryset._read_preference = read_preference - queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_preference + queryset._cursor_obj = ( + None + ) # we need to re-create the cursor object whenever we apply read_preference return queryset def scalar(self, *fields): diff --git a/python-mongoengine.spec b/python-mongoengine.spec index eddb488d..635c779f 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -51,4 +51,4 @@ rm -rf $RPM_BUILD_ROOT # %{python_sitearch}/* %changelog -* See: http://docs.mongoengine.org/en/latest/changelog.html \ No newline at end of file +* See: http://docs.mongoengine.org/en/latest/changelog.html From 5b9f2bac87cafab072e224f9f5c0caa123d26c8f Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Thu, 5 Dec 2019 00:54:20 +0100 Subject: [PATCH 097/216] Add pre-commit --- .pre-commit-config.yaml | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..cac25e41 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,13 @@ +repos: + - repo: https://github.com/ambv/black + rev: 19.3b0 + hooks: + - id: black + language_version: python3 + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.2.3 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: flake8 From 648b28876d152be2a6e1f28b79ddb4f82e05e78b Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Thu, 5 Dec 2019 00:55:16 +0100 Subject: [PATCH 098/216] Rename requirements-lint.txt to requirements-dev.txt --- requirements-lint.txt => requirements-dev.txt | 4 ++++ 1 file changed, 4 insertions(+) rename requirements-lint.txt => requirements-dev.txt (51%) diff --git a/requirements-lint.txt b/requirements-dev.txt similarity index 51% rename from requirements-lint.txt rename to requirements-dev.txt index 9dc6123b..e57131c5 100644 --- a/requirements-lint.txt +++ b/requirements-dev.txt @@ -1,3 +1,7 @@ black flake8 flake8-import-order +pre-commit +pytest +ipdb +ipython From abc159b7b95380d0da6f9bc7cef427a31f3139e6 Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Thu, 5 Dec 2019 01:10:54 +0100 Subject: [PATCH 099/216] Update Contributing, changelog --- CONTRIBUTING.rst | 24 ++++++++++++++++++++++-- docs/changelog.rst | 2 ++ requirements-dev.txt | 1 + 3 files changed, 25 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 56bae31f..b571acf1 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -31,8 +31,28 @@ build. You should ensure that your code is properly converted by Style Guide ----------- -MongoEngine uses `black `_ for code -formatting. +MongoEngine uses various tools to maintain a common coding style. + +To install all development tools, simply run the following commands: + +.. code-block:: console + + $ python -m pip install -r requirements-dev.txt + + +You can install `pre-commit `_ into your git hooks, +to automatically check and fix any formatting issue before creating a +git commit. + +Simply run the following command: + +.. code-block:: console + + $ pre-commit install + +See the ``.pre-commit-config.yaml`` configuration file for more information +on how it works. + Testing ------- diff --git a/docs/changelog.rst b/docs/changelog.rst index 99081957..933d0231 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -25,6 +25,8 @@ Development - In bulk write insert, the detailed error message would raise in exception. - Added ability to compare Q and Q operations #2204 - Added ability to use a db alias on query_counter #2194 +- Added pre-commit +- Renamed requirements-lint.txt to requirements-dev.txt Changes in 0.18.2 ================= diff --git a/requirements-dev.txt b/requirements-dev.txt index e57131c5..ee788e7a 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -5,3 +5,4 @@ pre-commit pytest ipdb ipython +tox From 37ffeafeff79e615ca0aa3140b5db638a1cd252b Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Thu, 5 Dec 2019 01:13:05 +0100 Subject: [PATCH 100/216] Replace 'pip' with 'python -m pip install' in docs --- README.rst | 6 +++--- docs/guide/installing.rst | 2 +- docs/index.rst | 2 +- docs/tutorial.rst | 2 +- docs/upgrade.rst | 8 ++++---- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/README.rst b/README.rst index 853d8fbe..bae7c52c 100644 --- a/README.rst +++ b/README.rst @@ -34,7 +34,7 @@ with MongoDB version > 3.6. Installation ============ We recommend the use of `virtualenv `_ and of -`pip `_. You can then use ``pip install -U mongoengine``. +`pip `_. You can then use ``python -m pip install -U mongoengine``. You may also have `setuptools `_ and thus you can use ``easy_install -U mongoengine``. Another option is `pipenv `_. You can then use ``pipenv install mongoengine`` @@ -44,7 +44,7 @@ run ``python setup.py install``. Dependencies ============ -All of the dependencies can easily be installed via `pip `_. +All of the dependencies can easily be installed via `python -m pip `_. At the very least, you'll need these two packages to use MongoEngine: - pymongo>=3.4 @@ -126,7 +126,7 @@ installed in your environment and then: .. code-block:: shell # Install tox - $ pip install tox + $ python -m pip install tox # Run the test suites $ tox diff --git a/docs/guide/installing.rst b/docs/guide/installing.rst index b89d48f0..2c962ad9 100644 --- a/docs/guide/installing.rst +++ b/docs/guide/installing.rst @@ -12,7 +12,7 @@ MongoEngine is available on PyPI, so you can use :program:`pip`: .. code-block:: console - $ pip install mongoengine + $ python -m pip install mongoengine Alternatively, if you don't have setuptools installed, `download it from PyPi `_ and run diff --git a/docs/index.rst b/docs/index.rst index 15f3c590..a42ff857 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -7,7 +7,7 @@ MongoDB. To install it, simply run .. code-block:: console - $ pip install -U mongoengine + $ python -m pip install -U mongoengine :doc:`tutorial` A quick tutorial building a tumblelog to get you up and running with diff --git a/docs/tutorial.rst b/docs/tutorial.rst index bcd0d17f..b7885c34 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -18,7 +18,7 @@ location --- running it locally will be easier, but if that is not an option then it may be run on a remote server. If you haven't installed MongoEngine, simply use pip to install it like so:: - $ pip install mongoengine + $ python -m pip install mongoengine Before we can start using MongoEngine, we need to tell it how to connect to our instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 082dbadc..285bf24c 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -85,10 +85,10 @@ by default from now on. The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: :: - pip uninstall pymongo - pip uninstall mongoengine - pip install pymongo==2.8 - pip install mongoengine + python -m pip uninstall pymongo + python -m pip uninstall mongoengine + python -m pip install pymongo==2.8 + python -m pip install mongoengine 0.8.7 ***** From d3d7f0e670b09fdd571fa6f2cf18ee36b793b6c7 Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Thu, 5 Dec 2019 01:18:29 +0100 Subject: [PATCH 101/216] Changelog --- CONTRIBUTING.rst | 2 +- docs/changelog.rst | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index b571acf1..27759f8f 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -44,7 +44,7 @@ You can install `pre-commit `_ into your git hooks, to automatically check and fix any formatting issue before creating a git commit. -Simply run the following command: +To enable ``pre-commit`` simply run: .. code-block:: console diff --git a/docs/changelog.rst b/docs/changelog.rst index 933d0231..38f1a85e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -25,8 +25,8 @@ Development - In bulk write insert, the detailed error message would raise in exception. - Added ability to compare Q and Q operations #2204 - Added ability to use a db alias on query_counter #2194 -- Added pre-commit -- Renamed requirements-lint.txt to requirements-dev.txt +- Added pre-commit #2212 +- Renamed requirements-lint.txt to requirements-dev.txt #2212 Changes in 0.18.2 ================= From 90fecc56ddf53872f72774b75d27090acd6315ff Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Thu, 5 Dec 2019 01:33:35 +0100 Subject: [PATCH 102/216] Reformat with black --- mongoengine/queryset/base.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index a09cbf99..a648391e 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1193,9 +1193,7 @@ class BaseQuerySet(object): validate_read_preference("read_preference", read_preference) queryset = self.clone() queryset._read_preference = read_preference - queryset._cursor_obj = ( - None - ) # we need to re-create the cursor object whenever we apply read_preference + queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_preference return queryset def scalar(self, *fields): From 6e8196d475953f88bd70207e81234bc07e1526d0 Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Thu, 5 Dec 2019 01:31:37 +0100 Subject: [PATCH 103/216] Python 2.x compatibility --- mongoengine/queryset/visitor.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 8038d23f..7faed897 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -143,6 +143,8 @@ class QCombination(QNode): def __bool__(self): return bool(self.children) + __nonzero__ = __bool__ # For Py2 support + def accept(self, visitor): for i in range(len(self.children)): if isinstance(self.children[i], QNode): @@ -178,6 +180,8 @@ class Q(QNode): def __bool__(self): return bool(self.query) + __nonzero__ = __bool__ # For Py2 support + def __eq__(self, other): return self.__class__ == other.__class__ and self.query == other.query From 1b38309d70efc122720d9c5d3fcc6d362436ed62 Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Fri, 6 Dec 2019 10:14:22 +0100 Subject: [PATCH 104/216] Revert 'empty' usage to it's previous state --- mongoengine/queryset/base.py | 2 +- mongoengine/queryset/visitor.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index c6f467cc..a648391e 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -686,7 +686,7 @@ class BaseQuerySet(object): .. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set """ queryset = self.clone() - if queryset._query_obj: + if not queryset._query_obj.empty: msg = "Cannot use a filter whilst using `with_id`" raise InvalidQueryError(msg) return queryset.filter(pk=object_id).first() diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 7faed897..470839c1 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -102,14 +102,14 @@ class QNode(object): return self # Or if this Q is empty, ignore it and just use `other`. - if not self: + if self.empty: return other return QCombination(operation, [self, other]) @property def empty(self): - msg = "'empty' property is deprecated in favour of using 'not bool(filter)" + msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" warnings.warn(msg, DeprecationWarning) return False @@ -154,7 +154,7 @@ class QCombination(QNode): @property def empty(self): - msg = "'empty' property is deprecated in favour of using 'not bool(filter)" + msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" warnings.warn(msg, DeprecationWarning) return not bool(self.children) From e83132f32c254f04cb505d1ede1d90f0dac84b18 Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Tue, 10 Dec 2019 11:51:33 +0100 Subject: [PATCH 105/216] Note deprecation of 'empty' in changelog --- docs/changelog.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index e2ffa41e..bc01a403 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -17,6 +17,9 @@ Development - If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it. - BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 - From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required. +- DEPRECATION: ``Q.empty`` & ``QNode.empty`` are marked as deprecated and will be removed in a next version of MongoEngine. #2210 + - Added ability to check if Q or QNode are empty by parsing them to bool. + - Instead of ``Q(name="John").empty`` use ``not Q(name="John")``. - Improve error message related to InvalidDocumentError #2180 - Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125 - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 @@ -25,7 +28,6 @@ Development - In bulk write insert, the detailed error message would raise in exception. - Added ability to compare Q and Q operations #2204 - Added ability to use a db alias on query_counter #2194 -- Added ability to check if Q or Q operations is empty by parsing them to bool #2210 Changes in 0.18.2 ================= From b2053144241f8f579993eff2c4de3084f074535f Mon Sep 17 00:00:00 2001 From: Matt Simpson Date: Tue, 10 Dec 2019 11:09:22 -0500 Subject: [PATCH 106/216] Add ability for dict keys to have . or $ in MongoDB >= 3.6 Starting in MongoDB >= 3.6, it is valid for dictionary keys to have $ or . in them as long as they don't start with $. Additional tests added. --- AUTHORS | 1 + mongoengine/fields.py | 19 ++++++++++++++++++- tests/fields/test_dict_field.py | 21 ++++++++++++++++----- 3 files changed, 35 insertions(+), 6 deletions(-) diff --git a/AUTHORS b/AUTHORS index aa044bd2..1271a8d9 100644 --- a/AUTHORS +++ b/AUTHORS @@ -253,3 +253,4 @@ that much better: * Gaurav Dadhania (https://github.com/GVRV) * Yurii Andrieiev (https://github.com/yandrieiev) * Filip Kucharczyk (https://github.com/Pacu2) + * Matthew Simpson (https://github.com/mcsimps2) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index f8f527a3..c3d93740 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -41,6 +41,7 @@ from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.document import Document, EmbeddedDocument from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError +from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version from mongoengine.python_support import StringIO from mongoengine.queryset import DO_NOTHING from mongoengine.queryset.base import BaseQuerySet @@ -1051,6 +1052,15 @@ def key_has_dot_or_dollar(d): return True +def key_starts_with_dollar(d): + """Helper function to recursively determine if any key in a + dictionary starts with a dollar + """ + for k, v in d.items(): + if (k.startswith("$")) or (isinstance(v, dict) and key_starts_with_dollar(v)): + return True + + class DictField(ComplexBaseField): """A dictionary field that wraps a standard Python dictionary. This is similar to an embedded document, but the structure is not defined. @@ -1077,11 +1087,18 @@ class DictField(ComplexBaseField): if key_not_string(value): msg = "Invalid dictionary key - documents must have only string keys" self.error(msg) - if key_has_dot_or_dollar(value): + + curr_mongo_ver = get_mongodb_version() + + if curr_mongo_ver < MONGODB_36 and key_has_dot_or_dollar(value): self.error( 'Invalid dictionary key name - keys may not contain "."' ' or startswith "$" characters' ) + elif curr_mongo_ver >= MONGODB_36 and key_starts_with_dollar(value): + self.error( + 'Invalid dictionary key name - keys may not startswith "$" characters' + ) super(DictField, self).validate(value) def lookup_member(self, member_name): diff --git a/tests/fields/test_dict_field.py b/tests/fields/test_dict_field.py index e88128f9..44e628f6 100644 --- a/tests/fields/test_dict_field.py +++ b/tests/fields/test_dict_field.py @@ -3,6 +3,7 @@ import pytest from mongoengine import * from mongoengine.base import BaseDict +from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version from tests.utils import MongoDBTestCase, get_as_pymongo @@ -43,11 +44,7 @@ class TestDictField(MongoDBTestCase): with pytest.raises(ValidationError): post.validate() - post.info = {"the.title": "test"} - with pytest.raises(ValidationError): - post.validate() - - post.info = {"nested": {"the.title": "test"}} + post.info = {"$title.test": "test"} with pytest.raises(ValidationError): post.validate() @@ -55,6 +52,20 @@ class TestDictField(MongoDBTestCase): with pytest.raises(ValidationError): post.validate() + post.info = {"nested": {"the.title": "test"}} + if get_mongodb_version() < MONGODB_36: + with pytest.raises(ValidationError): + post.validate() + else: + post.validate() + + post.info = {"dollar_and_dot": {"te$st.test": "test"}} + if get_mongodb_version() < MONGODB_36: + with pytest.raises(ValidationError): + post.validate() + else: + post.validate() + post.info = {"title": "test"} post.save() From 3b099f936a02444b3bf02c7dcdf13b1f2fc3b895 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 13 Dec 2019 21:32:45 +0100 Subject: [PATCH 107/216] provide additional details on how inheritance works in doc --- docs/guide/defining-documents.rst | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 9dcca88c..652c5cd9 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -744,7 +744,7 @@ Document inheritance To create a specialised type of a :class:`~mongoengine.Document` you have defined, you may subclass it and add any extra fields or methods you may need. -As this is new class is not a direct subclass of +As this new class is not a direct subclass of :class:`~mongoengine.Document`, it will not be stored in its own collection; it will use the same collection as its superclass uses. This allows for more convenient and efficient retrieval of related documents -- all you need do is @@ -767,6 +767,27 @@ document.:: Setting :attr:`allow_inheritance` to True should also be used in :class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it +When it comes to querying using :attr:`.objects()`, querying `Page.objects()` will query +both `Page` and `DatedPage` whereas querying `DatedPage` will only query the `DatedPage` documents. +Behind the scenes, MongoEngine deals with inheritance by adding a :attr:`_cls` attribute that contains +the class name in every documents. When a document is loaded, MongoEngine checks +it's :attr:`_cls` attribute and use that class to construct the instance.:: + + Page(title='a funky title').save() + DatedPage(title='another title', date=datetime.utcnow()).save() + + print(Page.objects().count()) # 2 + print(DatedPage.objects().count()) # 1 + + # print documents in their native form + # we remove 'id' to avoid polluting the output with unnecessary detail + qs = Page.objects.exclude('id').as_pymongo() + print(list(qs)) + # [ + # {'_cls': u 'Page', 'title': 'a funky title'}, + # {'_cls': u 'Page.DatedPage', 'title': u 'another title', 'date': datetime.datetime(2019, 12, 13, 20, 16, 59, 993000)} + # ] + Working with existing data -------------------------- As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and From 2ca905b6e53794a4725d7519b8aca64f4d2ebd31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 14 Jun 2019 23:30:01 +0200 Subject: [PATCH 108/216] Finalize python2/3 codebase compatibility and get rid of 2to3 --- mongoengine/base/datastructures.py | 4 ++-- mongoengine/base/document.py | 4 ++-- mongoengine/fields.py | 8 +------- mongoengine/queryset/base.py | 2 +- setup.py | 4 ++-- tests/fields/test_embedded_document_field.py | 10 ++++++---- tests/fields/test_long_field.py | 6 +----- tests/fields/test_sequence_field.py | 14 +++++++------- tests/fields/test_url_field.py | 7 ++++--- tests/queryset/test_queryset.py | 8 ++++---- tests/test_signals.py | 2 +- 11 files changed, 31 insertions(+), 38 deletions(-) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index d1b5ae76..a9abf6ea 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -422,10 +422,10 @@ class StrictDict(object): return len(list(iteritems(self))) def __eq__(self, other): - return self.items() == other.items() + return list(self.items()) == list(other.items()) def __ne__(self, other): - return self.items() != other.items() + return list(self.items()) != list(other.items()) @classmethod def create(cls, allowed_keys): diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index ad691362..4948e672 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -92,7 +92,7 @@ class BaseDocument(object): # if so raise an Exception. if not self._dynamic and (self._meta.get("strict", True) or _created): _undefined_fields = set(values.keys()) - set( - self._fields.keys() + ["id", "pk", "_cls", "_text_score"] + list(self._fields.keys()) + ["id", "pk", "_cls", "_text_score"] ) if _undefined_fields: msg = ('The fields "{0}" do not exist on the document "{1}"').format( @@ -670,7 +670,7 @@ class BaseDocument(object): del set_data["_id"] # Determine if any changed items were actually unset. - for path, value in set_data.items(): + for path, value in list(set_data.items()): if value or isinstance( value, (numbers.Number, bool) ): # Account for 0 and True that are truthy diff --git a/mongoengine/fields.py b/mongoengine/fields.py index f8f527a3..bef85dd7 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -8,6 +8,7 @@ import uuid from operator import itemgetter from bson import Binary, DBRef, ObjectId, SON +from bson.int64 import Int64 import gridfs import pymongo from pymongo import ReturnDocument @@ -21,11 +22,6 @@ except ImportError: else: import dateutil.parser -try: - from bson.int64 import Int64 -except ImportError: - Int64 = long - from mongoengine.base import ( BaseDocument, @@ -53,8 +49,6 @@ except ImportError: ImageOps = None if six.PY3: - # Useless as long as 2to3 gets executed - # as it turns `long` into `int` blindly long = int diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index a648391e..d3176050 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -989,7 +989,7 @@ class BaseQuerySet(object): .. versionchanged:: 0.5 - Added subfield support """ fields = {f: QueryFieldList.ONLY for f in fields} - self.only_fields = fields.keys() + self.only_fields = list(fields.keys()) return self.fields(True, **fields) def exclude(self, *fields): diff --git a/setup.py b/setup.py index ceb5afad..c6e99f53 100644 --- a/setup.py +++ b/setup.py @@ -118,8 +118,8 @@ extra_opts = { "Pillow>=2.0.0", ], } + if sys.version_info[0] == 3: - extra_opts["use_2to3"] = True if "test" in sys.argv: extra_opts["packages"] = find_packages() extra_opts["package_data"] = { @@ -143,7 +143,7 @@ setup( long_description=LONG_DESCRIPTION, platforms=["any"], classifiers=CLASSIFIERS, - install_requires=["pymongo>=3.4", "six>=1.10.0"], + install_requires=['pymongo>=3.4', 'six', 'future'], cmdclass={"test": PyTest}, **extra_opts ) diff --git a/tests/fields/test_embedded_document_field.py b/tests/fields/test_embedded_document_field.py index eeddac1e..b80f4d8c 100644 --- a/tests/fields/test_embedded_document_field.py +++ b/tests/fields/test_embedded_document_field.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +from builtins import str + import pytest from mongoengine import ( @@ -75,7 +77,7 @@ class TestEmbeddedDocumentField(MongoDBTestCase): # Test non exiting attribute with pytest.raises(InvalidQueryError) as exc_info: Person.objects(settings__notexist="bar").first() - assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' + assert str(exc_info.value) == u'Cannot resolve field "notexist"' with pytest.raises(LookUpError): Person.objects.only("settings.notexist") @@ -111,7 +113,7 @@ class TestEmbeddedDocumentField(MongoDBTestCase): # Test non exiting attribute with pytest.raises(InvalidQueryError) as exc_info: assert Person.objects(settings__notexist="bar").first().id == p.id - assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' + assert str(exc_info.value) == u'Cannot resolve field "notexist"' # Test existing attribute assert Person.objects(settings__base_foo="basefoo").first().id == p.id @@ -319,7 +321,7 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): # Test non exiting attribute with pytest.raises(InvalidQueryError) as exc_info: Person.objects(settings__notexist="bar").first() - assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' + assert str(exc_info.value) == u'Cannot resolve field "notexist"' with pytest.raises(LookUpError): Person.objects.only("settings.notexist") @@ -347,7 +349,7 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): # Test non exiting attribute with pytest.raises(InvalidQueryError) as exc_info: assert Person.objects(settings__notexist="bar").first().id == p.id - assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' + assert str(exc_info.value) == u'Cannot resolve field "notexist"' # Test existing attribute assert Person.objects(settings__base_foo="basefoo").first().id == p.id diff --git a/tests/fields/test_long_field.py b/tests/fields/test_long_field.py index da4f04c8..b39a714c 100644 --- a/tests/fields/test_long_field.py +++ b/tests/fields/test_long_field.py @@ -1,12 +1,8 @@ # -*- coding: utf-8 -*- import pytest +from bson.int64 import Int64 import six -try: - from bson.int64 import Int64 -except ImportError: - Int64 = long - from mongoengine import * from mongoengine.connection import get_db diff --git a/tests/fields/test_sequence_field.py b/tests/fields/test_sequence_field.py index aa83f710..f96f6b06 100644 --- a/tests/fields/test_sequence_field.py +++ b/tests/fields/test_sequence_field.py @@ -21,7 +21,7 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == range(1, 11) + assert ids == list(range(1, 11)) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) assert c["next"] == 10 @@ -76,7 +76,7 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == range(1, 11) + assert ids == list(range(1, 11)) c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) assert c["next"] == 10 @@ -101,10 +101,10 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == range(1, 11) + assert ids == list(range(1, 11)) counters = [i.counter for i in Person.objects] - assert counters == range(1, 11) + assert counters == list(range(1, 11)) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) assert c["next"] == 10 @@ -166,10 +166,10 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == range(1, 11) + assert ids == list(range(1, 11)) id = [i.id for i in Animal.objects] - assert id == range(1, 11) + assert id == list(range(1, 11)) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) assert c["next"] == 10 @@ -193,7 +193,7 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == map(str, range(1, 11)) + assert ids == [str(i) for i in range(1, 11)] c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) assert c["next"] == 10 diff --git a/tests/fields/test_url_field.py b/tests/fields/test_url_field.py index 948a4788..98f5d4fd 100644 --- a/tests/fields/test_url_field.py +++ b/tests/fields/test_url_field.py @@ -1,6 +1,8 @@ # -*- coding: utf-8 -*- import pytest +from builtins import str + from mongoengine import * from tests.utils import MongoDBTestCase @@ -35,9 +37,8 @@ class TestURLField(MongoDBTestCase): with pytest.raises(ValidationError) as exc_info: link.validate() assert ( - unicode(exc_info.value) - == u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])" - ) + str(exc_info.exception) + == u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])") def test_url_scheme_validation(self): """Ensure that URLFields validate urls with specific schemes properly. diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 7812ab66..49dab169 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -110,7 +110,7 @@ class TestQueryset(unittest.TestCase): # Filter people by age people = self.Person.objects(age=20) assert people.count() == 1 - person = people.next() + person = next(people) assert person == user_a assert person.name == "User A" assert person.age == 20 @@ -2768,7 +2768,7 @@ class TestQueryset(unittest.TestCase): ) # start a map/reduce - cursor.next() + next(cursor) results = Person.objects.map_reduce( map_f=map_person, @@ -4395,7 +4395,7 @@ class TestQueryset(unittest.TestCase): # Use a query to filter the people found to just person1 people = self.Person.objects(age=20).scalar("name") assert people.count() == 1 - person = people.next() + person = next(people) assert person == "User A" # Test limit @@ -5309,7 +5309,7 @@ class TestQueryset(unittest.TestCase): if not test: raise AssertionError("Cursor has data and returned False") - queryset.next() + next(queryset) if not queryset: raise AssertionError( "Cursor has data and it must returns True, even in the last item." diff --git a/tests/test_signals.py b/tests/test_signals.py index d79eaf75..13ea5a05 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -58,7 +58,7 @@ class TestSignal(unittest.TestCase): @classmethod def post_save(cls, sender, document, **kwargs): - dirty_keys = document._delta()[0].keys() + document._delta()[1].keys() + dirty_keys = list(document._delta()[0].keys()) + list(document._delta()[1].keys()) signal_output.append("post_save signal, %s" % document) signal_output.append("post_save dirty keys, %s" % dirty_keys) if kwargs.pop("created", False): From 009f9a2b14fb1bde3465ea702784c1381d617c9f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 15 Jun 2019 12:24:13 +0200 Subject: [PATCH 109/216] set dist as xenial to avoid relying on flaky travis default dist --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index cbf34cde..0a96e762 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,6 +28,8 @@ python: dist: xenial +dist: xenial + env: global: - MONGODB_3_4=3.4.17 From a3e432eb68db6be7944ad1cf96c0526d7a10d39d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 15 Jun 2019 14:37:47 +0200 Subject: [PATCH 110/216] remove references to '2to3' in doc, travis, etc --- .landscape.yml | 9 ++------- .travis.yml | 4 ++-- CONTRIBUTING.rst | 18 +++++++++++++----- benchmarks/test_inserts.py | 7 +++++++ mongoengine/document.py | 4 ---- 5 files changed, 24 insertions(+), 18 deletions(-) diff --git a/.landscape.yml b/.landscape.yml index a27bbb03..4f13a5eb 100644 --- a/.landscape.yml +++ b/.landscape.yml @@ -5,17 +5,12 @@ pylint: options: additional-builtins: - # add xrange and long as valid built-ins. In Python 3, xrange is - # translated into range and long is translated into int via 2to3 (see - # "use_2to3" in setup.py). This should be removed when we drop Python - # 2 support (which probably won't happen any time soon). - - xrange + # add long as valid built-ins. - long pyflakes: disable: - # undefined variables are already covered by pylint (and exclude - # xrange & long) + # undefined variables are already covered by pylint (and exclude long) - F821 ignore-paths: diff --git a/.travis.yml b/.travis.yml index 0a96e762..ecd5163d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -73,7 +73,7 @@ install: before_script: - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork - - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi # Run flake8 for Python 2.7 only + - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then flake8 .; else echo "flake8 only runs on py37"; fi # Run flake8 for Python 3.7 only - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then black --check .; else echo "black only runs on py37"; fi # Run black for Python 3.7 only - mongo --eval 'db.version();' # Make sure mongo is awake @@ -84,7 +84,7 @@ script: # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible # code in a separate dir and runs tests on that. after_success: -- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; else echo "coveralls only sent for py27"; fi +- coveralls --verbose notifications: irc: irc.freenode.org#mongoengine diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 56bae31f..a71c2cec 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,11 +22,19 @@ Supported Interpreters MongoEngine supports CPython 2.7 and newer. Language features not supported by all interpreters can not be used. -The codebase is written in python 2 so you must be using python 2 -when developing new features. Compatibility of the library with Python 3 -relies on the 2to3 package that gets executed as part of the installation -build. You should ensure that your code is properly converted by -`2to3 `_. +The codebase is written in a compatible manner for python 2 & 3 so it +is important that this is taken into account when it comes to discrepencies +between the 2 versions (check this https://python-future.org/compatible_idioms.html). +Travis runs run the tests against the different versions as a safety net. + +Python 2/3 compatibility +---------------------- + +The codebase is written in a compatible manner for python 2 & 3 so it +is important that this is taken into account when it comes to discrepencies +between the 2 versions (check this https://python-future.org/compatible_idioms.html). +Travis runs run the tests against the different versions as a safety net. + Style Guide ----------- diff --git a/benchmarks/test_inserts.py b/benchmarks/test_inserts.py index fd017bae..af6399f7 100644 --- a/benchmarks/test_inserts.py +++ b/benchmarks/test_inserts.py @@ -3,13 +3,17 @@ import timeit def main(): setup = """ +from builtins import range + from pymongo import MongoClient + connection = MongoClient() connection.drop_database('mongoengine_benchmark_test') """ stmt = """ from pymongo import MongoClient + connection = MongoClient() db = connection.mongoengine_benchmark_test @@ -55,7 +59,10 @@ myNoddys = noddy.find() print("{}s".format(t.timeit(1))) setup = """ +from builtins import range + from pymongo import MongoClient + connection = MongoClient() connection.drop_database('mongoengine_benchmark_test') connection.close() diff --git a/mongoengine/document.py b/mongoengine/document.py index 23968f17..fc6b9f16 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -71,7 +71,6 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): __slots__ = ("_instance",) - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = DocumentMetaclass @@ -156,7 +155,6 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): in the :attr:`meta` dictionary. """ - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = TopLevelDocumentMetaclass @@ -1045,7 +1043,6 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): There is one caveat on Dynamic Documents: undeclared fields cannot start with `_` """ - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = TopLevelDocumentMetaclass @@ -1069,7 +1066,6 @@ class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocu information about dynamic documents. """ - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = DocumentMetaclass From 44b7f792fead6a3fa61dcb75a9a4d3c7ecdc90b4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 16 Jun 2019 22:45:04 +0200 Subject: [PATCH 111/216] fix benchmarks prints --- benchmarks/test_basic_doc_ops.py | 1 - 1 file changed, 1 deletion(-) diff --git a/benchmarks/test_basic_doc_ops.py b/benchmarks/test_basic_doc_ops.py index e840f97a..c74594fe 100644 --- a/benchmarks/test_basic_doc_ops.py +++ b/benchmarks/test_basic_doc_ops.py @@ -135,7 +135,6 @@ def test_big_doc(): % (timeit(create_and_delete_company, 10) * 10 ** 3) ) - if __name__ == "__main__": test_basic() print("-" * 100) From 7e0ba1b3359f494a750400042245662a58ae472c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 16 Jun 2019 23:09:46 +0200 Subject: [PATCH 112/216] clean remaining references to 2to3 --- .travis.yml | 3 --- CONTRIBUTING.rst | 4 ---- 2 files changed, 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index ecd5163d..de32e6bf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -80,9 +80,6 @@ before_script: script: - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" -# For now only submit coveralls for Python v2.7. Python v3.x currently shows -# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible -# code in a separate dir and runs tests on that. after_success: - coveralls --verbose diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index a71c2cec..445319e5 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,10 +22,6 @@ Supported Interpreters MongoEngine supports CPython 2.7 and newer. Language features not supported by all interpreters can not be used. -The codebase is written in a compatible manner for python 2 & 3 so it -is important that this is taken into account when it comes to discrepencies -between the 2 versions (check this https://python-future.org/compatible_idioms.html). -Travis runs run the tests against the different versions as a safety net. Python 2/3 compatibility ---------------------- From 82af5e4a1920578fbd9dc36de8c1760311fb9837 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 18 Jun 2019 22:55:51 +0200 Subject: [PATCH 113/216] fix small finding from review on py2py3 compat --- mongoengine/base/document.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 4948e672..4e4df92f 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -3,6 +3,7 @@ import numbers from functools import partial from bson import DBRef, ObjectId, SON, json_util +from future.utils import listitems import pymongo import six from six import iteritems @@ -670,7 +671,7 @@ class BaseDocument(object): del set_data["_id"] # Determine if any changed items were actually unset. - for path, value in list(set_data.items()): + for path, value in listitems(set_data): if value or isinstance( value, (numbers.Number, bool) ): # Account for 0 and True that are truthy From 64c0cace85ae528892409b9f34c343aff1bfbe60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 19 Jul 2019 21:50:47 +0200 Subject: [PATCH 114/216] fix from code review regarding py2py3 compat --- CONTRIBUTING.rst | 4 ++-- mongoengine/base/datastructures.py | 5 +++-- tests/fields/test_sequence_field.py | 10 +++++----- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 445319e5..b04ae968 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -28,8 +28,8 @@ Python 2/3 compatibility The codebase is written in a compatible manner for python 2 & 3 so it is important that this is taken into account when it comes to discrepencies -between the 2 versions (check this https://python-future.org/compatible_idioms.html). -Travis runs run the tests against the different versions as a safety net. +between the two versions (see https://python-future.org/compatible_idioms.html). +Travis runs the tests against different Python versions as a safety net. Style Guide diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index a9abf6ea..f8b4cd92 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -1,6 +1,7 @@ import weakref from bson import DBRef +from future.utils import listitems import six from six import iteritems @@ -422,10 +423,10 @@ class StrictDict(object): return len(list(iteritems(self))) def __eq__(self, other): - return list(self.items()) == list(other.items()) + return listitems(self) == listitems(other) def __ne__(self, other): - return list(self.items()) != list(other.items()) + return not(self == other) @classmethod def create(cls, allowed_keys): diff --git a/tests/fields/test_sequence_field.py b/tests/fields/test_sequence_field.py index f96f6b06..8e6615a1 100644 --- a/tests/fields/test_sequence_field.py +++ b/tests/fields/test_sequence_field.py @@ -267,12 +267,12 @@ class TestSequenceField(MongoDBTestCase): foo = Foo(name="Foo") foo.save() - assert not ( - "base.counter" in self.db["mongoengine.counters"].find().distinct("_id") + assert ( + "base.counter" not in self.db["mongoengine.counters"].find().distinct("_id") ) - assert ("foo.counter" and "bar.counter") in self.db[ - "mongoengine.counters" - ].find().distinct("_id") + existing_counters = self.db["mongoengine.counters"].find().distinct("_id") + assert "foo.counter" in existing_counters + assert "bar.counter" in existing_counters assert foo.counter == bar.counter assert foo._fields["counter"].owner_document == Foo assert bar._fields["counter"].owner_document == Bar From 45a7520fc3c9b22452d78ee2b5ae9c11f616b6c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 21 Jul 2019 22:54:14 +0200 Subject: [PATCH 115/216] make use past.builtins.long --- mongoengine/fields.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index bef85dd7..562cc906 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -10,6 +10,7 @@ from operator import itemgetter from bson import Binary, DBRef, ObjectId, SON from bson.int64 import Int64 import gridfs +from past.builtins import long import pymongo from pymongo import ReturnDocument import six @@ -48,9 +49,6 @@ except ImportError: Image = None ImageOps = None -if six.PY3: - long = int - __all__ = ( "StringField", From 6bc4e602bb5fef6c48ccc3233645eed729f2d9a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 14 Dec 2019 00:05:48 +0100 Subject: [PATCH 116/216] additional fix --- tests/fields/test_url_field.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/fields/test_url_field.py b/tests/fields/test_url_field.py index 98f5d4fd..477bced7 100644 --- a/tests/fields/test_url_field.py +++ b/tests/fields/test_url_field.py @@ -1,10 +1,9 @@ # -*- coding: utf-8 -*- -import pytest - from builtins import str -from mongoengine import * +import pytest +from mongoengine import * from tests.utils import MongoDBTestCase @@ -37,8 +36,9 @@ class TestURLField(MongoDBTestCase): with pytest.raises(ValidationError) as exc_info: link.validate() assert ( - str(exc_info.exception) - == u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])") + str(exc_info.value) + == u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])" + ) def test_url_scheme_validation(self): """Ensure that URLFields validate urls with specific schemes properly. From 9166ba91d713924f91983cfbe959c4981e4f0711 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 14 Dec 2019 15:39:02 +0100 Subject: [PATCH 117/216] fix minor styling issue --- benchmarks/test_basic_doc_ops.py | 1 + mongoengine/base/datastructures.py | 2 +- setup.py | 2 +- tests/fields/test_sequence_field.py | 4 ++-- tests/test_signals.py | 4 +++- 5 files changed, 8 insertions(+), 5 deletions(-) diff --git a/benchmarks/test_basic_doc_ops.py b/benchmarks/test_basic_doc_ops.py index c74594fe..e840f97a 100644 --- a/benchmarks/test_basic_doc_ops.py +++ b/benchmarks/test_basic_doc_ops.py @@ -135,6 +135,7 @@ def test_big_doc(): % (timeit(create_and_delete_company, 10) * 10 ** 3) ) + if __name__ == "__main__": test_basic() print("-" * 100) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index f8b4cd92..8c93f596 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -426,7 +426,7 @@ class StrictDict(object): return listitems(self) == listitems(other) def __ne__(self, other): - return not(self == other) + return not (self == other) @classmethod def create(cls, allowed_keys): diff --git a/setup.py b/setup.py index c6e99f53..19cd4be7 100644 --- a/setup.py +++ b/setup.py @@ -143,7 +143,7 @@ setup( long_description=LONG_DESCRIPTION, platforms=["any"], classifiers=CLASSIFIERS, - install_requires=['pymongo>=3.4', 'six', 'future'], + install_requires=["pymongo>=3.4", "six", "future"], cmdclass={"test": PyTest}, **extra_opts ) diff --git a/tests/fields/test_sequence_field.py b/tests/fields/test_sequence_field.py index 8e6615a1..81d648fd 100644 --- a/tests/fields/test_sequence_field.py +++ b/tests/fields/test_sequence_field.py @@ -267,8 +267,8 @@ class TestSequenceField(MongoDBTestCase): foo = Foo(name="Foo") foo.save() - assert ( - "base.counter" not in self.db["mongoengine.counters"].find().distinct("_id") + assert "base.counter" not in self.db["mongoengine.counters"].find().distinct( + "_id" ) existing_counters = self.db["mongoengine.counters"].find().distinct("_id") assert "foo.counter" in existing_counters diff --git a/tests/test_signals.py b/tests/test_signals.py index 13ea5a05..451e01ff 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -58,7 +58,9 @@ class TestSignal(unittest.TestCase): @classmethod def post_save(cls, sender, document, **kwargs): - dirty_keys = list(document._delta()[0].keys()) + list(document._delta()[1].keys()) + dirty_keys = list(document._delta()[0].keys()) + list( + document._delta()[1].keys() + ) signal_output.append("post_save signal, %s" % document) signal_output.append("post_save dirty keys, %s" % dirty_keys) if kwargs.pop("created", False): From d8c0631dabe7d900691ec9c25366c45f3112c50e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 14 Dec 2019 21:23:28 +0100 Subject: [PATCH 118/216] added entry in changelog for py2py3 compatibility --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index bc01a403..bac5ee17 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,6 +6,7 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). +- Codebase is now compatible with both Python2 and Python3 (no more relying on 2to3 during installation) #2087 - Documentation improvements: - Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver. - BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112 From 280a73af3bea8e9232a5ebe761d451840f025135 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 14 Dec 2019 21:44:59 +0100 Subject: [PATCH 119/216] minor fix in doc of NULLIFY to improve #834 --- docs/guide/defining-documents.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 9dcca88c..82388d3d 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -352,7 +352,7 @@ Its value can take any of the following constants: Deletion is denied if there still exist references to the object being deleted. :const:`mongoengine.NULLIFY` - Any object's fields still referring to the object being deleted are removed + Any object's fields still referring to the object being deleted are set to None (using MongoDB's "unset" operation), effectively nullifying the relationship. :const:`mongoengine.CASCADE` Any object containing fields that are referring to the object being deleted From 50882e5bb09b74faddfac3cb93afd278ea94ced2 Mon Sep 17 00:00:00 2001 From: Eric Timmons Date: Wed, 16 Oct 2019 09:49:40 -0400 Subject: [PATCH 120/216] Add failing test Test that __eq__ for EmbeddedDocuments with LazyReferenceFields works as expected. --- tests/document/test_instance.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 173e02f2..6ba6827e 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -3319,6 +3319,38 @@ class TestInstance(MongoDBTestCase): f1.ref # Dereferences lazily assert f1 == f2 + def test_embedded_document_equality_with_lazy_ref(self): + class Job(EmbeddedDocument): + boss = LazyReferenceField('Person') + + class Person(Document): + job = EmbeddedDocumentField(Job) + + Person.drop_collection() + + boss = Person() + worker = Person(job=Job(boss=boss)) + boss.save() + worker.save() + + worker1 = Person.objects.get(id=worker.id) + + # worker1.job should be equal to the job used originally to create the + # document. + self.assertEqual(worker1.job, worker.job) + + # worker1.job should be equal to a newly created Job EmbeddedDocument + # using either the Boss object or his ID. + self.assertEqual(worker1.job, Job(boss=boss)) + self.assertEqual(worker1.job, Job(boss=boss.id)) + + # The above equalities should also hold after worker1.job.boss has been + # fetch()ed. + worker1.job.boss.fetch() + self.assertEqual(worker1.job, worker.job) + self.assertEqual(worker1.job, Job(boss=boss)) + self.assertEqual(worker1.job, Job(boss=boss.id)) + def test_dbref_equality(self): class Test2(Document): name = StringField() From dc7b96a5691335e970b13fb30ef62426b126e2bd Mon Sep 17 00:00:00 2001 From: Eric Timmons Date: Wed, 16 Oct 2019 09:50:47 -0400 Subject: [PATCH 121/216] Make python value for LazyReferenceFields be a DBRef Previously, when reading a LazyReferenceField from the DB, it was stored internally in the parent document's _data field as an ObjectId. However, this meant that equality tests using an enclosing EmbeddedDocument would not return True when the EmbeddedDocument being compared to contained a DBRef or Document in _data. Enclosing Documents were largely unaffected because they look at the primary key for equality (which EmbeddedDocuments lack). This makes the internal Python representation of a LazyReferenceField (before the LazyReference itself has been constructed) a DBRef, using code identical to ReferenceField. --- mongoengine/fields.py | 9 +++++++++ tests/document/test_instance.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index f8f527a3..0c29d1bc 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -2502,6 +2502,15 @@ class LazyReferenceField(BaseField): else: return pk + def to_python(self, value): + """Convert a MongoDB-compatible type to a Python type.""" + if not self.dbref and not isinstance( + value, (DBRef, Document, EmbeddedDocument) + ): + collection = self.document_type._get_collection_name() + value = DBRef(collection, self.document_type.id.to_python(value)) + return value + def validate(self, value): if isinstance(value, LazyReference): if value.collection != self.document_type._get_collection_name(): diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 6ba6827e..07376b4b 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -3321,7 +3321,7 @@ class TestInstance(MongoDBTestCase): def test_embedded_document_equality_with_lazy_ref(self): class Job(EmbeddedDocument): - boss = LazyReferenceField('Person') + boss = LazyReferenceField("Person") class Person(Document): job = EmbeddedDocumentField(Job) From 0d4e61d489a9264863cecdfed08fc9e67a74d03a Mon Sep 17 00:00:00 2001 From: Eric Timmons Date: Wed, 16 Oct 2019 10:01:19 -0400 Subject: [PATCH 122/216] Add daewok to AUTHORS per contributing guidelines --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index aa044bd2..374e2f7f 100644 --- a/AUTHORS +++ b/AUTHORS @@ -253,3 +253,4 @@ that much better: * Gaurav Dadhania (https://github.com/GVRV) * Yurii Andrieiev (https://github.com/yandrieiev) * Filip Kucharczyk (https://github.com/Pacu2) + * Eric Timmons (https://github.com/daewok) From 68dc2925fbea13702fa23ced0afd786d77b2ca28 Mon Sep 17 00:00:00 2001 From: Eric Timmons Date: Sun, 15 Dec 2019 12:08:04 -0500 Subject: [PATCH 123/216] Add LazyReferenceField with dbref=True to embedded_document equality test --- tests/document/test_instance.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 07376b4b..b899684f 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -3322,6 +3322,7 @@ class TestInstance(MongoDBTestCase): def test_embedded_document_equality_with_lazy_ref(self): class Job(EmbeddedDocument): boss = LazyReferenceField("Person") + boss_dbref = LazyReferenceField("Person", dbref=True) class Person(Document): job = EmbeddedDocumentField(Job) @@ -3329,7 +3330,7 @@ class TestInstance(MongoDBTestCase): Person.drop_collection() boss = Person() - worker = Person(job=Job(boss=boss)) + worker = Person(job=Job(boss=boss, boss_dbref=boss)) boss.save() worker.save() @@ -3341,15 +3342,15 @@ class TestInstance(MongoDBTestCase): # worker1.job should be equal to a newly created Job EmbeddedDocument # using either the Boss object or his ID. - self.assertEqual(worker1.job, Job(boss=boss)) - self.assertEqual(worker1.job, Job(boss=boss.id)) + self.assertEqual(worker1.job, Job(boss=boss, boss_dbref=boss)) + self.assertEqual(worker1.job, Job(boss=boss.id, boss_dbref=boss.id)) # The above equalities should also hold after worker1.job.boss has been # fetch()ed. worker1.job.boss.fetch() self.assertEqual(worker1.job, worker.job) - self.assertEqual(worker1.job, Job(boss=boss)) - self.assertEqual(worker1.job, Job(boss=boss.id)) + self.assertEqual(worker1.job, Job(boss=boss, boss_dbref=boss)) + self.assertEqual(worker1.job, Job(boss=boss.id, boss_dbref=boss.id)) def test_dbref_equality(self): class Test2(Document): From 329f030a41da4d93aaec1f3ccee634e898f7d289 Mon Sep 17 00:00:00 2001 From: Eric Timmons Date: Sun, 15 Dec 2019 20:15:13 -0500 Subject: [PATCH 124/216] Always store a DBRef, Document, or EmbeddedDocument in LazyReferenceField._data This is required to handle the case of equality tests on a LazyReferenceField with dbref=True when comparing against a field instantiated with an ObjectId. --- mongoengine/fields.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 0c29d1bc..a385559d 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -2504,9 +2504,7 @@ class LazyReferenceField(BaseField): def to_python(self, value): """Convert a MongoDB-compatible type to a Python type.""" - if not self.dbref and not isinstance( - value, (DBRef, Document, EmbeddedDocument) - ): + if not isinstance(value, (DBRef, Document, EmbeddedDocument)): collection = self.document_type._get_collection_name() value = DBRef(collection, self.document_type.id.to_python(value)) return value From cfd4d6a161556ef4a8aa355468384554eb684442 Mon Sep 17 00:00:00 2001 From: Eric Timmons Date: Sun, 15 Dec 2019 12:02:24 -0500 Subject: [PATCH 125/216] Add breaking change to changelog for LazyReferenceField representation in _data --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index bc01a403..b308c5fb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -17,6 +17,7 @@ Development - If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it. - BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 - From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required. +- BREAKING CHANGE: A ``LazyReferenceField`` is now stored in the ``_data`` field of its parent as a ``DBRef``, ``Document``, or ``EmbeddedDocument`` (``ObjectId`` is no longer allowed). #2182 - DEPRECATION: ``Q.empty`` & ``QNode.empty`` are marked as deprecated and will be removed in a next version of MongoEngine. #2210 - Added ability to check if Q or QNode are empty by parsing them to bool. - Instead of ``Q(name="John").empty`` use ``not Q(name="John")``. From ae326678ec0d7345645120c864b8b15cac741dc1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 20 Dec 2019 22:09:04 +0100 Subject: [PATCH 126/216] updated changelog for upcoming 0.19.0 --- docs/changelog.rst | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b308c5fb..5c1e838a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,8 +6,9 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). -- Documentation improvements: - - Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver. + +Changes in 0.19.0 +================= - BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112 - Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``. - Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``. @@ -21,14 +22,16 @@ Development - DEPRECATION: ``Q.empty`` & ``QNode.empty`` are marked as deprecated and will be removed in a next version of MongoEngine. #2210 - Added ability to check if Q or QNode are empty by parsing them to bool. - Instead of ``Q(name="John").empty`` use ``not Q(name="John")``. -- Improve error message related to InvalidDocumentError #2180 - Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125 - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 -- Switch from nosetest to pytest as test runner #2114 -- The codebase is now formatted using ``black``. #2109 -- In bulk write insert, the detailed error message would raise in exception. +- Improve error message related to InvalidDocumentError #2180 +- Added BulkWriteError to replace NotUniqueError which was misleading in bulk write insert #2152 - Added ability to compare Q and Q operations #2204 - Added ability to use a db alias on query_counter #2194 +- Switch from nosetest to pytest as test runner #2114 +- The codebase is now formatted using ``black``. #2109 +- Documentation improvements: + - Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver. Changes in 0.18.2 ================= From 12d8bd5a22f0cee1f99f87b37bbaec94f0a002bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 20 Dec 2019 22:11:43 +0100 Subject: [PATCH 127/216] bump version to 0.19.0 --- mongoengine/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index d7093d28..c41b5e70 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -28,7 +28,7 @@ __all__ = ( ) -VERSION = (0, 18, 2) +VERSION = (0, 19, 0) def get_version(): From d44533d95651559a65ce72664dca9ede98aa58e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 20 Dec 2019 22:41:22 +0100 Subject: [PATCH 128/216] completed the changelog with missing details --- docs/changelog.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 5c1e838a..93776a70 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -23,11 +23,13 @@ Changes in 0.19.0 - Added ability to check if Q or QNode are empty by parsing them to bool. - Instead of ``Q(name="John").empty`` use ``not Q(name="John")``. - Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125 +- Only set no_cursor_timeout when requested (fixes an incompatibility with MongoDB 4.2) #2148 - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 - Improve error message related to InvalidDocumentError #2180 - Added BulkWriteError to replace NotUniqueError which was misleading in bulk write insert #2152 - Added ability to compare Q and Q operations #2204 - Added ability to use a db alias on query_counter #2194 +- Added ability to specify collations for querysets with ``Doc.objects.collation`` #2024 - Switch from nosetest to pytest as test runner #2114 - The codebase is now formatted using ``black``. #2109 - Documentation improvements: From 332bd767d43af14bbb783e779d585cd2dbcf21de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 20 Dec 2019 22:51:08 +0100 Subject: [PATCH 129/216] minor fixes in tests --- docs/guide/mongomock.rst | 6 +++--- tests/document/test_instance.py | 22 +++++++++++----------- tests/fields/test_file_field.py | 2 +- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/docs/guide/mongomock.rst b/docs/guide/mongomock.rst index d70ee6a6..040ff912 100644 --- a/docs/guide/mongomock.rst +++ b/docs/guide/mongomock.rst @@ -2,10 +2,10 @@ Use mongomock for testing ============================== -`mongomock `_ is a package to do just +`mongomock `_ is a package to do just what the name implies, mocking a mongo database. -To use with mongoengine, simply specify mongomock when connecting with +To use with mongoengine, simply specify mongomock when connecting with mongoengine: .. code-block:: python @@ -45,4 +45,4 @@ Example of test file: pers.save() fresh_pers = Person.objects().first() - self.assertEqual(fresh_pers.name, 'John') + assert fresh_pers.name == 'John' diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index b899684f..609d0690 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -3338,19 +3338,19 @@ class TestInstance(MongoDBTestCase): # worker1.job should be equal to the job used originally to create the # document. - self.assertEqual(worker1.job, worker.job) + assert worker1.job == worker.job # worker1.job should be equal to a newly created Job EmbeddedDocument # using either the Boss object or his ID. - self.assertEqual(worker1.job, Job(boss=boss, boss_dbref=boss)) - self.assertEqual(worker1.job, Job(boss=boss.id, boss_dbref=boss.id)) + assert worker1.job == Job(boss=boss, boss_dbref=boss) + assert worker1.job == Job(boss=boss.id, boss_dbref=boss.id) # The above equalities should also hold after worker1.job.boss has been # fetch()ed. worker1.job.boss.fetch() - self.assertEqual(worker1.job, worker.job) - self.assertEqual(worker1.job, Job(boss=boss, boss_dbref=boss)) - self.assertEqual(worker1.job, Job(boss=boss.id, boss_dbref=boss.id)) + assert worker1.job == worker.job + assert worker1.job == Job(boss=boss, boss_dbref=boss) + assert worker1.job == Job(boss=boss.id, boss_dbref=boss.id) def test_dbref_equality(self): class Test2(Document): @@ -3693,13 +3693,13 @@ class TestInstance(MongoDBTestCase): value = u"I_should_be_a_dict" coll.insert_one({"light_saber": value}) - with self.assertRaises(InvalidDocumentError) as cm: + with pytest.raises(InvalidDocumentError) as exc_info: list(Jedi.objects) - self.assertEqual( - str(cm.exception), - "Invalid data to create a `Jedi` instance.\nField 'light_saber' - The source SON object needs to be of type 'dict' but a '%s' was found" - % type(value), + assert str( + exc_info.value + ) == "Invalid data to create a `Jedi` instance.\nField 'light_saber' - The source SON object needs to be of type 'dict' but a '%s' was found" % type( + value ) diff --git a/tests/fields/test_file_field.py b/tests/fields/test_file_field.py index bfc86511..b8ece1a9 100644 --- a/tests/fields/test_file_field.py +++ b/tests/fields/test_file_field.py @@ -151,7 +151,7 @@ class TestFileField(MongoDBTestCase): result = StreamFile.objects.first() assert streamfile == result assert result.the_file.read() == text + more_text - # self.assertEqual(result.the_file.content_type, content_type) + # assert result.the_file.content_type == content_type result.the_file.seek(0) assert result.the_file.tell() == 0 assert result.the_file.read(len(text)) == text From 1170de1e8e30b976895c1c92cca134089dc5b806 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 20 Dec 2019 23:16:29 +0100 Subject: [PATCH 130/216] added explicit doc for order_by #2117 --- docs/guide/mongomock.rst | 2 +- docs/guide/querying.rst | 15 ++++++++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/docs/guide/mongomock.rst b/docs/guide/mongomock.rst index 040ff912..141d7b69 100644 --- a/docs/guide/mongomock.rst +++ b/docs/guide/mongomock.rst @@ -21,7 +21,7 @@ or with an alias: conn = get_connection('testdb') Example of test file: --------- +--------------------- .. code-block:: python import unittest diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index d64c169c..121325ae 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -222,6 +222,18 @@ keyword argument:: .. versionadded:: 0.4 +Sorting/Ordering results +======================== +It is possible to order the results by 1 or more keys using :meth:`~mongoengine.queryset.QuerySet.order_by`. +The order may be specified by prepending each of the keys by "+" or "-". Ascending order is assumed if there's no prefix.:: + + # Order by ascending date + blogs = BlogPost.objects().order_by('date') # equivalent to .order_by('+date') + + # Order by ascending date first, then descending title + blogs = BlogPost.objects().order_by('+date', '-title') + + Limiting and skipping results ============================= Just as with traditional ORMs, you may limit the number of results returned or @@ -585,7 +597,8 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: ['database', 'mongodb'] From MongoDB version 2.6, push operator supports $position value which allows -to push values with index. +to push values with index:: + >>> post = BlogPost(title="Test", tags=["mongo"]) >>> post.save() >>> post.update(push__tags__0=["database", "code"]) From 8e892dccfe01e284a99928a6ee4fbb1e0c35519d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 20 Dec 2019 23:51:01 +0100 Subject: [PATCH 131/216] document recent merged PR in changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 93776a70..06eb8d0c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -30,6 +30,7 @@ Changes in 0.19.0 - Added ability to compare Q and Q operations #2204 - Added ability to use a db alias on query_counter #2194 - Added ability to specify collations for querysets with ``Doc.objects.collation`` #2024 +- Fix updates of a list field by negative index #2094 - Switch from nosetest to pytest as test runner #2114 - The codebase is now formatted using ``black``. #2109 - Documentation improvements: From 488604ff2e6cdadeed538de0dfb9fe36e27473b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 24 Dec 2019 00:00:15 +0100 Subject: [PATCH 132/216] test python 3.8 --- .travis.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index cbf34cde..c825571b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,6 +23,7 @@ python: - 3.5 - 3.6 - 3.7 +- 3.8 - pypy - pypy3 @@ -50,7 +51,8 @@ matrix: env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} - python: 3.7 env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} - + - python: 3.8 + env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} install: # Install Mongo From 62c8597a3b33dfd34cc785687c747606a0bbba9c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 24 Dec 2019 11:15:23 +0100 Subject: [PATCH 133/216] fix pypi deployment version match --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index cbf34cde..6680a7e5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -110,5 +110,5 @@ deploy: on: tags: true repo: MongoEngine/mongoengine - condition: ($PYMONGO = ${PYMONGO_3_6}) && ($MONGODB = ${MONGODB_3_4}) + condition: ($PYMONGO = ${PYMONGO_3_9}) && ($MONGODB = ${MONGODB_3_4}) python: 2.7 From ca4967311d3328fcc7abb84f8e9cdda7409e8dd7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Thu, 26 Dec 2019 21:00:34 +0100 Subject: [PATCH 134/216] update python 3.8 config --- .travis.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 971a51a2..42f2a112 100644 --- a/.travis.yml +++ b/.travis.yml @@ -51,8 +51,6 @@ matrix: env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} - python: 3.7 env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} - - python: 3.8 - env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} install: # Install Mongo From f0d1ee2cb41f1c9794eeee4de445fb27a7a77f88 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Thu, 26 Dec 2019 21:03:34 +0100 Subject: [PATCH 135/216] update travis config + improve readmecode --- .travis.yml | 2 +- README.rst | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 42f2a112..809fbad8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,7 @@ # with a very large number of jobs, hence we only test a subset of all the # combinations: # * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, -# tested against Python v2.7, v3.5, v3.6, and PyPy. +# tested against Python v2.7, v3.5, v3.6, v3.7, v3.8 and PyPy. # * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo # combination: MongoDB v3.4, PyMongo v3.4, Python v2.7. # * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8. diff --git a/README.rst b/README.rst index 853d8fbe..3b85fd48 100644 --- a/README.rst +++ b/README.rst @@ -91,12 +91,11 @@ Some simple examples of what MongoEngine code looks like: # Iterate over all posts using the BlogPost superclass >>> for post in BlogPost.objects: - ... print '===', post.title, '===' + ... print('===', post.title, '===') ... if isinstance(post, TextPost): - ... print post.content + ... print(post.content) ... elif isinstance(post, LinkPost): - ... print 'Link:', post.url - ... print + ... print('Link:', post.url) ... # Count all blog posts and its subtypes From aa02f87b69787ac678d4ee740e1cb2e5e6753fc9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 27 Dec 2019 09:23:15 +0100 Subject: [PATCH 136/216] change & deprecate .aggregate api to mimic pymongo's interface + separate the aggregation tests from the large test_queryset.py file --- docs/guide/querying.rst | 4 +- mongoengine/queryset/base.py | 29 ++- tests/queryset/test_queryset.py | 236 +----------------- tests/queryset/test_queryset_aggregation.py | 255 ++++++++++++++++++++ 4 files changed, 277 insertions(+), 247 deletions(-) create mode 100644 tests/queryset/test_queryset_aggregation.py diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 121325ae..07de0378 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -400,7 +400,7 @@ would be generating "tag-clouds":: MongoDB aggregation API ----------------------- -If you need to run aggregation pipelines, MongoEngine provides an entry point `Pymongo's aggregation framework `_ +If you need to run aggregation pipelines, MongoEngine provides an entry point to `Pymongo's aggregation framework `_ through :meth:`~mongoengine.queryset.QuerySet.aggregate`. Check out Pymongo's documentation for the syntax and pipeline. An example of its use would be:: @@ -414,7 +414,7 @@ An example of its use would be:: {"$sort" : {"name" : -1}}, {"$project": {"_id": 0, "name": {"$toUpper": "$name"}}} ] - data = Person.objects().aggregate(*pipeline) + data = Person.objects().aggregate(pipeline) assert data == [{'name': 'BOB'}, {'name': 'JOHN'}] Query efficiency and performance diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index a648391e..aa5f2584 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1255,16 +1255,25 @@ class BaseQuerySet(object): for data in son_data ] - def aggregate(self, *pipeline, **kwargs): - """ - Perform a aggregate function based in your queryset params + def aggregate(self, pipeline, *suppl_pipeline, **kwargs): + """Perform a aggregate function based in your queryset params + :param pipeline: list of aggregation commands,\ see: http://docs.mongodb.org/manual/core/aggregation-pipeline/ - + :param suppl_pipeline: unpacked list of pipeline (added to support deprecation of the old interface) + parameter will be removed shortly .. versionadded:: 0.9 """ - initial_pipeline = [] + using_deprecated_interface = isinstance(pipeline, dict) or bool(suppl_pipeline) + user_pipeline = [pipeline] if isinstance(pipeline, dict) else list(pipeline) + if using_deprecated_interface: + msg = "Calling .aggregate() with un unpacked list (*pipeline) is deprecated, it will soon change and will expect a list (similar to pymongo.Collection.aggregate interface), see documentation" + warnings.warn(msg, DeprecationWarning) + + user_pipeline += suppl_pipeline + + initial_pipeline = [] if self._query: initial_pipeline.append({"$match": self._query}) @@ -1281,14 +1290,14 @@ class BaseQuerySet(object): if self._skip is not None: initial_pipeline.append({"$skip": self._skip}) - pipeline = initial_pipeline + list(pipeline) + final_pipeline = initial_pipeline + user_pipeline + collection = self._collection if self._read_preference is not None: - return self._collection.with_options( + collection = self._collection.with_options( read_preference=self._read_preference - ).aggregate(pipeline, cursor={}, **kwargs) - - return self._collection.aggregate(pipeline, cursor={}, **kwargs) + ) + return collection.aggregate(final_pipeline, cursor={}, **kwargs) # JS functionality def map_reduce( diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 7812ab66..b30350e6 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -14,7 +14,7 @@ import six from six import iteritems from mongoengine import * -from mongoengine.connection import get_connection, get_db +from mongoengine.connection import get_db from mongoengine.context_managers import query_counter, switch_db from mongoengine.errors import InvalidQueryError from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version @@ -4658,21 +4658,6 @@ class TestQueryset(unittest.TestCase): ) assert_read_pref(bars, ReadPreference.SECONDARY_PREFERRED) - def test_read_preference_aggregation_framework(self): - class Bar(Document): - txt = StringField() - - meta = {"indexes": ["txt"]} - - # Aggregates with read_preference - bars = Bar.objects.read_preference( - ReadPreference.SECONDARY_PREFERRED - ).aggregate() - assert ( - bars._CommandCursor__collection.read_preference - == ReadPreference.SECONDARY_PREFERRED - ) - def test_json_simple(self): class Embedded(EmbeddedDocument): string = StringField() @@ -5399,225 +5384,6 @@ class TestQueryset(unittest.TestCase): assert Person.objects.first().name == "A" assert Person.objects._has_data(), "Cursor has data and returned False" - def test_queryset_aggregation_framework(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = Person.objects(age__lte=22).aggregate( - {"$project": {"name": {"$toUpper": "$name"}}} - ) - - assert list(data) == [ - {"_id": p1.pk, "name": "ISABELLA LUANNA"}, - {"_id": p2.pk, "name": "WILSON JUNIOR"}, - ] - - data = ( - Person.objects(age__lte=22) - .order_by("-name") - .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) - ) - - assert list(data) == [ - {"_id": p2.pk, "name": "WILSON JUNIOR"}, - {"_id": p1.pk, "name": "ISABELLA LUANNA"}, - ] - - data = ( - Person.objects(age__gte=17, age__lte=40) - .order_by("-age") - .aggregate( - {"$group": {"_id": None, "total": {"$sum": 1}, "avg": {"$avg": "$age"}}} - ) - ) - assert list(data) == [{"_id": None, "avg": 29, "total": 2}] - - data = Person.objects().aggregate({"$match": {"name": "Isabella Luanna"}}) - assert list(data) == [{u"_id": p1.pk, u"age": 16, u"name": u"Isabella Luanna"}] - - def test_queryset_aggregation_with_skip(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = Person.objects.skip(1).aggregate( - {"$project": {"name": {"$toUpper": "$name"}}} - ) - - assert list(data) == [ - {"_id": p2.pk, "name": "WILSON JUNIOR"}, - {"_id": p3.pk, "name": "SANDRA MARA"}, - ] - - def test_queryset_aggregation_with_limit(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = Person.objects.limit(1).aggregate( - {"$project": {"name": {"$toUpper": "$name"}}} - ) - - assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}] - - def test_queryset_aggregation_with_sort(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = Person.objects.order_by("name").aggregate( - {"$project": {"name": {"$toUpper": "$name"}}} - ) - - assert list(data) == [ - {"_id": p1.pk, "name": "ISABELLA LUANNA"}, - {"_id": p3.pk, "name": "SANDRA MARA"}, - {"_id": p2.pk, "name": "WILSON JUNIOR"}, - ] - - def test_queryset_aggregation_with_skip_with_limit(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = list( - Person.objects.skip(1) - .limit(1) - .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) - ) - - assert list(data) == [{"_id": p2.pk, "name": "WILSON JUNIOR"}] - - # Make sure limit/skip chaining order has no impact - data2 = ( - Person.objects.limit(1) - .skip(1) - .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) - ) - - assert data == list(data2) - - def test_queryset_aggregation_with_sort_with_limit(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = ( - Person.objects.order_by("name") - .limit(2) - .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) - ) - - assert list(data) == [ - {"_id": p1.pk, "name": "ISABELLA LUANNA"}, - {"_id": p3.pk, "name": "SANDRA MARA"}, - ] - - # Verify adding limit/skip steps works as expected - data = ( - Person.objects.order_by("name") - .limit(2) - .aggregate({"$project": {"name": {"$toUpper": "$name"}}}, {"$limit": 1}) - ) - - assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}] - - data = ( - Person.objects.order_by("name") - .limit(2) - .aggregate( - {"$project": {"name": {"$toUpper": "$name"}}}, - {"$skip": 1}, - {"$limit": 1}, - ) - ) - - assert list(data) == [{"_id": p3.pk, "name": "SANDRA MARA"}] - - def test_queryset_aggregation_with_sort_with_skip(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = ( - Person.objects.order_by("name") - .skip(2) - .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) - ) - - assert list(data) == [{"_id": p2.pk, "name": "WILSON JUNIOR"}] - - def test_queryset_aggregation_with_sort_with_skip_with_limit(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = ( - Person.objects.order_by("name") - .skip(1) - .limit(1) - .aggregate({"$project": {"name": {"$toUpper": "$name"}}}) - ) - - assert list(data) == [{"_id": p3.pk, "name": "SANDRA MARA"}] - def test_delete_count(self): [self.Person(name="User {0}".format(i), age=i * 10).save() for i in range(1, 4)] assert ( diff --git a/tests/queryset/test_queryset_aggregation.py b/tests/queryset/test_queryset_aggregation.py new file mode 100644 index 00000000..00e04a36 --- /dev/null +++ b/tests/queryset/test_queryset_aggregation.py @@ -0,0 +1,255 @@ +# -*- coding: utf-8 -*- + +import unittest +import warnings + +from pymongo.read_preferences import ReadPreference + +from mongoengine import * +from tests.utils import MongoDBTestCase + + +class TestQuerysetAggregate(MongoDBTestCase): + def test_read_preference_aggregation_framework(self): + class Bar(Document): + txt = StringField() + + meta = {"indexes": ["txt"]} + + # Aggregates with read_preference + pipeline = [] + bars = Bar.objects.read_preference( + ReadPreference.SECONDARY_PREFERRED + ).aggregate(pipeline) + assert ( + bars._CommandCursor__collection.read_preference + == ReadPreference.SECONDARY_PREFERRED + ) + + def test_queryset_aggregation_framework(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects(age__lte=22).aggregate(pipeline) + + assert list(data) == [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + ] + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects(age__lte=22).order_by("-name").aggregate(pipeline) + + assert list(data) == [ + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + ] + + pipeline = [ + {"$group": {"_id": None, "total": {"$sum": 1}, "avg": {"$avg": "$age"}}} + ] + data = ( + Person.objects(age__gte=17, age__lte=40) + .order_by("-age") + .aggregate(pipeline) + ) + assert list(data) == [{"_id": None, "avg": 29, "total": 2}] + + pipeline = [{"$match": {"name": "Isabella Luanna"}}] + data = Person.objects().aggregate(pipeline) + assert list(data) == [{u"_id": p1.pk, u"age": 16, u"name": u"Isabella Luanna"}] + + def test_queryset_aggregation_with_skip(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects.skip(1).aggregate(pipeline) + + assert list(data) == [ + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + ] + + def test_queryset_aggregation_with_limit(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects.limit(1).aggregate(pipeline) + + assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}] + + def test_queryset_aggregation_with_sort(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects.order_by("name").aggregate(pipeline) + + assert list(data) == [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + ] + + def test_queryset_aggregation_with_skip_with_limit(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = list(Person.objects.skip(1).limit(1).aggregate(pipeline)) + + assert list(data) == [{"_id": p2.pk, "name": "WILSON JUNIOR"}] + + # Make sure limit/skip chaining order has no impact + data2 = Person.objects.limit(1).skip(1).aggregate(pipeline) + + assert data == list(data2) + + def test_queryset_aggregation_with_sort_with_limit(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects.order_by("name").limit(2).aggregate(pipeline) + + assert list(data) == [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + ] + + # Verify adding limit/skip steps works as expected + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}, {"$limit": 1}] + data = Person.objects.order_by("name").limit(2).aggregate(pipeline) + + assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}] + + pipeline = [ + {"$project": {"name": {"$toUpper": "$name"}}}, + {"$skip": 1}, + {"$limit": 1}, + ] + data = Person.objects.order_by("name").limit(2).aggregate(pipeline) + + assert list(data) == [{"_id": p3.pk, "name": "SANDRA MARA"}] + + def test_queryset_aggregation_with_sort_with_skip(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects.order_by("name").skip(2).aggregate(pipeline) + + assert list(data) == [{"_id": p2.pk, "name": "WILSON JUNIOR"}] + + def test_queryset_aggregation_with_sort_with_skip_with_limit(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects.order_by("name").skip(1).limit(1).aggregate(pipeline) + + assert list(data) == [{"_id": p3.pk, "name": "SANDRA MARA"}] + + def test_queryset_aggregation_deprecated_interface(self): + class Person(Document): + name = StringField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna") + p2 = Person(name="Wilson Junior") + p3 = Person(name="Sandra Mara") + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + + # Make sure a warning is emitted + with warnings.catch_warnings(): + warnings.simplefilter("error", DeprecationWarning) + with self.assertRaises(DeprecationWarning): + Person.objects.order_by("name").limit(2).aggregate(*pipeline) + + # Make sure old interface works as expected with a 1-step pipeline + data = Person.objects.order_by("name").limit(2).aggregate(*pipeline) + + assert list(data) == [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + ] + + # Make sure old interface works as expected with a 2-steps pipeline + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}, {"$limit": 1}] + data = Person.objects.order_by("name").limit(2).aggregate(*pipeline) + + assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}] + + +if __name__ == "__main__": + unittest.main() From 99e660c66d85620b427f2ab48eba557a6faa5a43 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 27 Dec 2019 09:32:05 +0100 Subject: [PATCH 137/216] update changelog --- docs/changelog.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 06eb8d0c..5fe7d6b4 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,6 +6,8 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). +- DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of + pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079 Changes in 0.19.0 ================= From b3dbb87c3c394cab5708bd321f58932d0c6b1063 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 27 Dec 2019 10:06:27 +0100 Subject: [PATCH 138/216] improve doc of aggregate kwargs --- mongoengine/queryset/base.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index aa5f2584..50cb37ac 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -302,7 +302,7 @@ class BaseQuerySet(object): ``insert(..., {w: 2, fsync: True})`` will wait until at least two servers have recorded the write and will force an fsync on each server being written to. - :parm signal_kwargs: (optional) kwargs dictionary to be passed to + :param signal_kwargs: (optional) kwargs dictionary to be passed to the signal calls. By default returns document instances, set ``load_bulk`` to False to @@ -1262,6 +1262,8 @@ class BaseQuerySet(object): see: http://docs.mongodb.org/manual/core/aggregation-pipeline/ :param suppl_pipeline: unpacked list of pipeline (added to support deprecation of the old interface) parameter will be removed shortly + :param kwargs: (optional) kwargs dictionary to be passed to pymongo's aggregate call + See https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.aggregate .. versionadded:: 0.9 """ using_deprecated_interface = isinstance(pipeline, dict) or bool(suppl_pipeline) From e7c7a66cd1d32fb1aa4595e3d8935c57de4beab1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 27 Dec 2019 10:38:03 +0100 Subject: [PATCH 139/216] improve doc of GridFS, emphasize that subsequent call to read() requires to rewind the file with seek(0) --- docs/guide/gridfs.rst | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/docs/guide/gridfs.rst b/docs/guide/gridfs.rst index f7380e89..6e4a75a5 100644 --- a/docs/guide/gridfs.rst +++ b/docs/guide/gridfs.rst @@ -10,8 +10,9 @@ Writing GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field object. This field acts as a file-like object and provides a couple of different ways of inserting and retrieving data. Arbitrary metadata such as -content type can also be stored alongside the files. In the following example, -a document is created to store details about animals, including a photo:: +content type can also be stored alongside the files. The object returned when accessing a +FileField is a proxy to `Pymongo's GridFS `_ +In the following example, a document is created to store details about animals, including a photo:: class Animal(Document): genus = StringField() @@ -34,6 +35,20 @@ field. The file can also be retrieved just as easily:: photo = marmot.photo.read() content_type = marmot.photo.content_type +.. note:: If you need to read() the content of a file multiple times, you'll need to "rewind" + the file-like object using `seek`:: + + marmot = Animal.objects(genus='Marmota').first() + content1 = marmot.photo.read() + assert content1 != "" + + content2 = marmot.photo.read() # will be empty + assert content2 == "" + + marmot.photo.seek(0) # rewind the file by setting the current position of the cursor in the file to 0 + content3 = marmot.photo.read() + assert content3 == content1 + Streaming --------- From 152b51fd339a1bb9db254555461b5e6651bc3a6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 29 Dec 2019 14:36:50 +0100 Subject: [PATCH 140/216] improve gridfs example (properly opening file) --- docs/guide/gridfs.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/guide/gridfs.rst b/docs/guide/gridfs.rst index 6e4a75a5..0baf88e0 100644 --- a/docs/guide/gridfs.rst +++ b/docs/guide/gridfs.rst @@ -21,8 +21,8 @@ In the following example, a document is created to store details about animals, marmot = Animal(genus='Marmota', family='Sciuridae') - marmot_photo = open('marmot.jpg', 'rb') - marmot.photo.put(marmot_photo, content_type = 'image/jpeg') + with open('marmot.jpg', 'rb') as fd: + marmot.photo.put(fd, content_type = 'image/jpeg') marmot.save() Retrieval From 4edad4601c7ccf30120cb8e45df7e1fb700a9347 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 3 Jan 2020 14:03:17 +0100 Subject: [PATCH 141/216] Bump version to 0.19.1 + force pillow to be < 7.0.0 --- docs/changelog.rst | 4 ++++ mongoengine/__init__.py | 2 +- setup.py | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 5fe7d6b4..b8e6ae56 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,6 +6,10 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). + +Changes in 0.19.1 +================= +- Requires Pillow < 7.0.0 as it dropped Python2 support - DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079 diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index c41b5e70..e45dfc2b 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -28,7 +28,7 @@ __all__ = ( ) -VERSION = (0, 19, 0) +VERSION = (0, 19, 1) def get_version(): diff --git a/setup.py b/setup.py index ceb5afad..2d69e44a 100644 --- a/setup.py +++ b/setup.py @@ -115,7 +115,7 @@ extra_opts = { "pytest-cov", "coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls "blinker", - "Pillow>=2.0.0", + "Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support ], } if sys.version_info[0] == 3: From 75ee282a3dfc90ab2c6a4194e8f2851ce7f4543d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 3 Jan 2020 14:15:24 +0100 Subject: [PATCH 142/216] black setup.py to please CI --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 2d69e44a..6c3ef8db 100644 --- a/setup.py +++ b/setup.py @@ -115,7 +115,7 @@ extra_opts = { "pytest-cov", "coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls "blinker", - "Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support + "Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support ], } if sys.version_info[0] == 3: From f8f267a880bcb9b17a51df61b006e7d9b92644b3 Mon Sep 17 00:00:00 2001 From: Tim Gates Date: Sun, 5 Jan 2020 20:48:16 +1100 Subject: [PATCH 143/216] Fix simple typo: thorougly -> thoroughly Closes #2236 --- docs/upgrade.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 082dbadc..250347bf 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -52,7 +52,7 @@ rename its occurrences. This release includes a major rehaul of MongoEngine's code quality and introduces a few breaking changes. It also touches many different parts of the package and although all the changes have been tested and scrutinized, -you're encouraged to thorougly test the upgrade. +you're encouraged to thoroughly test the upgrade. First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`. If you import or catch this exception, you'll need to rename it in your code. From 59fbd505a04dd8cf822a098b834fdd8829395fc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 5 Jan 2020 20:20:13 +0100 Subject: [PATCH 144/216] include latest pymongo version in travis --- .travis.yml | 7 +++++-- setup.py | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 809fbad8..a299eea9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -33,11 +33,12 @@ env: global: - MONGODB_3_4=3.4.17 - MONGODB_3_6=3.6.12 + - PYMONGO_3_10=3.10 - PYMONGO_3_9=3.9 - PYMONGO_3_6=3.6 - PYMONGO_3_4=3.4 matrix: - - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_9} + - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_10} matrix: @@ -51,6 +52,8 @@ matrix: env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} - python: 3.7 env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} + - python: 3.7 + env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_10} install: # Install Mongo @@ -110,5 +113,5 @@ deploy: on: tags: true repo: MongoEngine/mongoengine - condition: ($PYMONGO = ${PYMONGO_3_9}) && ($MONGODB = ${MONGODB_3_4}) + condition: ($PYMONGO = ${PYMONGO_3_10}) && ($MONGODB = ${MONGODB_3_4}) python: 2.7 diff --git a/setup.py b/setup.py index 6c3ef8db..5ba84e06 100644 --- a/setup.py +++ b/setup.py @@ -143,7 +143,7 @@ setup( long_description=LONG_DESCRIPTION, platforms=["any"], classifiers=CLASSIFIERS, - install_requires=["pymongo>=3.4", "six>=1.10.0"], + install_requires=["pymongo>=3.4, <4.0", "six>=1.10.0"], cmdclass={"test": PyTest}, **extra_opts ) From 928770c43a892e7e6a01e5a85f5d90a5b9a63c72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 20 Dec 2019 21:50:00 +0100 Subject: [PATCH 145/216] switching to count_documents --- mongoengine/pymongo_support.py | 28 ++++++++++++++++++++++++---- mongoengine/queryset/base.py | 32 +++++++++++++++++++++++++++++++- mongoengine/queryset/queryset.py | 1 + tests/test_connection.py | 2 +- 4 files changed, 57 insertions(+), 6 deletions(-) diff --git a/mongoengine/pymongo_support.py b/mongoengine/pymongo_support.py index 80c0661b..1fea9525 100644 --- a/mongoengine/pymongo_support.py +++ b/mongoengine/pymongo_support.py @@ -10,12 +10,32 @@ PYMONGO_VERSION = tuple(pymongo.version_tuple[:2]) IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37 -def count_documents(collection, filter): - """Pymongo>3.7 deprecates count in favour of count_documents""" +def count_documents(collection, filter, skip=None, limit=None, hint=None, collation=None): + """Pymongo>3.7 deprecates count in favour of count_documents + """ + if limit == 0: + return 0 # Pymongo raises an OperationFailure if called with limit=0 + if IS_PYMONGO_GTE_37: - return collection.count_documents(filter) + kwargs = {} + if skip is not None: + kwargs["skip"] = skip + if limit is not None: + kwargs["limit"] = limit + if collation is not None: + kwargs["collation"] = collation + if hint not in (-1, None): + kwargs["hint"] = hint + return collection.count_documents(filter=filter, **kwargs) else: - count = collection.find(filter).count() + cursor = collection.find(filter) + if limit: + cursor = cursor.limit(limit) + if skip: + cursor = cursor.skip(skip) + if hint != -1: + cursor = cursor.hint(hint) + count = cursor.count() return count diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 50cb37ac..d7b4007e 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -12,6 +12,7 @@ import pymongo.errors from pymongo.collection import ReturnDocument from pymongo.common import validate_read_preference import six +from pymongo.errors import OperationFailure from six import iteritems from mongoengine import signals @@ -26,6 +27,7 @@ from mongoengine.errors import ( NotUniqueError, OperationError, ) +from mongoengine.pymongo_support import count_documents from mongoengine.queryset import transform from mongoengine.queryset.field_list import QueryFieldList from mongoengine.queryset.visitor import Q, QNode @@ -392,9 +394,37 @@ class BaseQuerySet(object): :meth:`skip` that has been applied to this cursor into account when getting the count """ + # mimic the fact that setting .limit(0) in pymongo sets no limit + # https://docs.mongodb.com/manual/reference/method/cursor.limit/#zero-value if self._limit == 0 and with_limit_and_skip is False or self._none: return 0 - count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) + + kwargs = ( + {"limit": self._limit, "skip": self._skip} if with_limit_and_skip else {} + ) + + if self._limit == 0: + # mimic the fact that historically .limit(0) sets no limit + kwargs.pop('limit', None) + + if self._hint not in (-1, None): + kwargs["hint"] = self._hint + + if self._collation: + kwargs["collation"] = self._collation + + try: + count = count_documents( + collection=self._cursor.collection, + filter=self._cursor._Cursor__spec, + **kwargs + ) + except OperationFailure: + # Accounts for some operators that used to work with .count but are no longer working + # with count_documents (i.e $geoNear, $near, and $nearSphere) + # fallback to deprecated Cursor.count + count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) + self._cursor_obj = None return count diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 4ba62d46..cc1891f6 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -146,6 +146,7 @@ class QuerySet(BaseQuerySet): return super(QuerySet, self).count(with_limit_and_skip) if self._len is None: + # cache the length self._len = super(QuerySet, self).count(with_limit_and_skip) return self._len diff --git a/tests/test_connection.py b/tests/test_connection.py index e40a6994..542da4f0 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -282,7 +282,7 @@ class ConnectionTest(unittest.TestCase): # database won't exist until we save a document some_document.save() assert conn.get_default_database().name == "mongoenginetest" - assert conn.database_names()[0] == "mongoenginetest" + assert conn.list_database_names()[0] == "mongoenginetest" @require_mongomock def test_connect_with_host_list(self): From 705c55ce24c006dd2bd06437f3d7fb945ed1de17 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 5 Jan 2020 20:30:56 +0100 Subject: [PATCH 146/216] update tox file to account for mg310 --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 349b5577..a3d2df60 100644 --- a/tox.ini +++ b/tox.ini @@ -7,6 +7,7 @@ commands = deps = mg34: pymongo>=3.4,<3.5 mg36: pymongo>=3.6,<3.7 - mg39: pymongo>=3.9,<4.0 + mg39: pymongo>=3.9,<3.10 + mg310: pymongo>=3.10,<3.11 setenv = PYTHON_EGG_CACHE = {envdir}/python-eggs From f93f9406ee3646eb3bac8fbeed6aa502ffd83a31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 5 Jan 2020 21:08:20 +0100 Subject: [PATCH 147/216] improve doc next to code --- mongoengine/queryset/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index d7b4007e..125480a7 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -423,6 +423,7 @@ class BaseQuerySet(object): # Accounts for some operators that used to work with .count but are no longer working # with count_documents (i.e $geoNear, $near, and $nearSphere) # fallback to deprecated Cursor.count + # Keeping this should be reevaluated the day pymongo removes .count entirely count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) self._cursor_obj = None From 60c42dddd5a44623b696d8ce6eff7be7f955e796 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 5 Jan 2020 22:29:13 +0100 Subject: [PATCH 148/216] finalize code related to count_documents migration --- docs/changelog.rst | 3 +++ mongoengine/pymongo_support.py | 38 +++++++++++++++++++--------------- mongoengine/queryset/base.py | 17 +++++---------- tests/document/test_indexes.py | 3 ++- 4 files changed, 31 insertions(+), 30 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b8e6ae56..d11fd347 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,6 +6,9 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). +- When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count + and Cursor.count that got deprecated in pymongo >= 3.7. + This should have a negative impact on performance of count see Issue #2219 Changes in 0.19.1 ================= diff --git a/mongoengine/pymongo_support.py b/mongoengine/pymongo_support.py index 1fea9525..38332c13 100644 --- a/mongoengine/pymongo_support.py +++ b/mongoengine/pymongo_support.py @@ -2,6 +2,7 @@ Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support. """ import pymongo +from pymongo.errors import OperationFailure _PYMONGO_37 = (3, 7) @@ -16,25 +17,28 @@ def count_documents(collection, filter, skip=None, limit=None, hint=None, collat if limit == 0: return 0 # Pymongo raises an OperationFailure if called with limit=0 - if IS_PYMONGO_GTE_37: - kwargs = {} - if skip is not None: - kwargs["skip"] = skip - if limit is not None: - kwargs["limit"] = limit - if collation is not None: - kwargs["collation"] = collation - if hint not in (-1, None): - kwargs["hint"] = hint + kwargs = {} + if skip is not None: + kwargs["skip"] = skip + if limit is not None: + kwargs["limit"] = limit + if hint not in (-1, None): + kwargs["hint"] = hint + if collation is not None: + kwargs["collation"] = collation + + try: return collection.count_documents(filter=filter, **kwargs) - else: + except (AttributeError, OperationFailure) as ex: + # AttributeError - count_documents appeared in pymongo 3.7 + # OperationFailure - accounts for some operators that used to work + # with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere) + # fallback to deprecated Cursor.count + # Keeping this should be reevaluated the day pymongo removes .count entirely cursor = collection.find(filter) - if limit: - cursor = cursor.limit(limit) - if skip: - cursor = cursor.skip(skip) - if hint != -1: - cursor = cursor.hint(hint) + for option, option_value in kwargs.items(): + cursor_method = getattr(cursor, option) + cursor = cursor_method(option_value) count = cursor.count() return count diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 125480a7..41b394b4 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -413,18 +413,11 @@ class BaseQuerySet(object): if self._collation: kwargs["collation"] = self._collation - try: - count = count_documents( - collection=self._cursor.collection, - filter=self._cursor._Cursor__spec, - **kwargs - ) - except OperationFailure: - # Accounts for some operators that used to work with .count but are no longer working - # with count_documents (i.e $geoNear, $near, and $nearSphere) - # fallback to deprecated Cursor.count - # Keeping this should be reevaluated the day pymongo removes .count entirely - count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) + count = count_documents( + collection=self._cursor.collection, + filter=self._cursor._Cursor__spec, + **kwargs + ) self._cursor_obj = None return count diff --git a/tests/document/test_indexes.py b/tests/document/test_indexes.py index be857b59..801473b1 100644 --- a/tests/document/test_indexes.py +++ b/tests/document/test_indexes.py @@ -552,8 +552,9 @@ class TestIndexes(unittest.TestCase): assert 5 == query_result.count() incorrect_collation = {"arndom": "wrdo"} - with pytest.raises(OperationFailure): + with pytest.raises(OperationFailure) as exc_info: BlogPost.objects.collation(incorrect_collation).count() + assert 'Missing expected field' in str(exc_info.value) query_result = BlogPost.objects.collation({}).order_by("name") assert [x.name for x in query_result] == sorted(names) From 84f3dce4920e4e212a485dcbe9f1fa4cda4f7daa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 5 Jan 2020 22:50:19 +0100 Subject: [PATCH 149/216] fix flake8 findings --- mongoengine/pymongo_support.py | 2 +- mongoengine/queryset/base.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/mongoengine/pymongo_support.py b/mongoengine/pymongo_support.py index 38332c13..d9c5ee27 100644 --- a/mongoengine/pymongo_support.py +++ b/mongoengine/pymongo_support.py @@ -29,7 +29,7 @@ def count_documents(collection, filter, skip=None, limit=None, hint=None, collat try: return collection.count_documents(filter=filter, **kwargs) - except (AttributeError, OperationFailure) as ex: + except (AttributeError, OperationFailure): # AttributeError - count_documents appeared in pymongo 3.7 # OperationFailure - accounts for some operators that used to work # with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 41b394b4..303271f5 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -12,7 +12,6 @@ import pymongo.errors from pymongo.collection import ReturnDocument from pymongo.common import validate_read_preference import six -from pymongo.errors import OperationFailure from six import iteritems from mongoengine import signals From e64a7a94481f02764b16ff9ce448489fad2fc321 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 7 Jan 2020 21:44:04 +0100 Subject: [PATCH 150/216] reformat with latest black --- mongoengine/pymongo_support.py | 4 +++- mongoengine/queryset/base.py | 2 +- tests/document/test_indexes.py | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/mongoengine/pymongo_support.py b/mongoengine/pymongo_support.py index d9c5ee27..3aef4e09 100644 --- a/mongoengine/pymongo_support.py +++ b/mongoengine/pymongo_support.py @@ -11,7 +11,9 @@ PYMONGO_VERSION = tuple(pymongo.version_tuple[:2]) IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37 -def count_documents(collection, filter, skip=None, limit=None, hint=None, collation=None): +def count_documents( + collection, filter, skip=None, limit=None, hint=None, collation=None +): """Pymongo>3.7 deprecates count in favour of count_documents """ if limit == 0: diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 303271f5..ae45297d 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -404,7 +404,7 @@ class BaseQuerySet(object): if self._limit == 0: # mimic the fact that historically .limit(0) sets no limit - kwargs.pop('limit', None) + kwargs.pop("limit", None) if self._hint not in (-1, None): kwargs["hint"] = self._hint diff --git a/tests/document/test_indexes.py b/tests/document/test_indexes.py index 801473b1..5133b007 100644 --- a/tests/document/test_indexes.py +++ b/tests/document/test_indexes.py @@ -554,7 +554,7 @@ class TestIndexes(unittest.TestCase): incorrect_collation = {"arndom": "wrdo"} with pytest.raises(OperationFailure) as exc_info: BlogPost.objects.collation(incorrect_collation).count() - assert 'Missing expected field' in str(exc_info.value) + assert "Missing expected field" in str(exc_info.value) query_result = BlogPost.objects.collation({}).order_by("name") assert [x.name for x in query_result] == sorted(names) From 2fa48cd9e5207ad753419ace20dc0c615c7481cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 7 Jan 2020 22:24:55 +0100 Subject: [PATCH 151/216] fix for pymongo < 3.7 --- mongoengine/pymongo_support.py | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/mongoengine/pymongo_support.py b/mongoengine/pymongo_support.py index 3aef4e09..284efc2f 100644 --- a/mongoengine/pymongo_support.py +++ b/mongoengine/pymongo_support.py @@ -29,20 +29,22 @@ def count_documents( if collation is not None: kwargs["collation"] = collation - try: - return collection.count_documents(filter=filter, **kwargs) - except (AttributeError, OperationFailure): - # AttributeError - count_documents appeared in pymongo 3.7 - # OperationFailure - accounts for some operators that used to work - # with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere) - # fallback to deprecated Cursor.count - # Keeping this should be reevaluated the day pymongo removes .count entirely - cursor = collection.find(filter) - for option, option_value in kwargs.items(): - cursor_method = getattr(cursor, option) - cursor = cursor_method(option_value) - count = cursor.count() - return count + # count_documents appeared in pymongo 3.7 + if IS_PYMONGO_GTE_37: + try: + return collection.count_documents(filter=filter, **kwargs) + except OperationFailure: + # OperationFailure - accounts for some operators that used to work + # with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere) + # fallback to deprecated Cursor.count + # Keeping this should be reevaluated the day pymongo removes .count entirely + pass + + cursor = collection.find(filter) + for option, option_value in kwargs.items(): + cursor_method = getattr(cursor, option) + cursor = cursor_method(option_value) + return cursor.count() def list_collection_names(db, include_system_collections=False): From d73846213977b2fb1e382b38d86406bdf9a7e81f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 11 Jan 2020 23:15:30 +0100 Subject: [PATCH 152/216] Fix bug introduced in -1.19 related to DictField validate failing without default connection --- docs/changelog.rst | 5 ++++ mongoengine/fields.py | 14 +++++----- mongoengine/mongodb_support.py | 2 +- tests/fields/test_dict_field.py | 48 ++++++++++++++++++++------------- 4 files changed, 42 insertions(+), 27 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b8e6ae56..2b532da9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -7,6 +7,11 @@ Development =========== - (Fill this out as you fix issues and develop your features). +Changes in 0.19.2 +================= +- DictField validate failed without default connection (bug introduced in 0.19.0) #2239 + + Changes in 0.19.1 ================= - Requires Pillow < 7.0.0 as it dropped Python2 support diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 7ec8c0f3..d502dba3 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1088,14 +1088,12 @@ class DictField(ComplexBaseField): msg = "Invalid dictionary key - documents must have only string keys" self.error(msg) - curr_mongo_ver = get_mongodb_version() - - if curr_mongo_ver < MONGODB_36 and key_has_dot_or_dollar(value): - self.error( - 'Invalid dictionary key name - keys may not contain "."' - ' or startswith "$" characters' - ) - elif curr_mongo_ver >= MONGODB_36 and key_starts_with_dollar(value): + # Following condition applies to MongoDB >= 3.6 + # older Mongo has stricter constraints but + # it will be rejected upon insertion anyway + # Having a validation that depends on the MongoDB version + # is not straightforward as the field isn't aware of the connected Mongo + if key_starts_with_dollar(value): self.error( 'Invalid dictionary key name - keys may not startswith "$" characters' ) diff --git a/mongoengine/mongodb_support.py b/mongoengine/mongodb_support.py index 5d437fef..522f064e 100644 --- a/mongoengine/mongodb_support.py +++ b/mongoengine/mongodb_support.py @@ -11,7 +11,7 @@ MONGODB_36 = (3, 6) def get_mongodb_version(): - """Return the version of the connected mongoDB (first 2 digits) + """Return the version of the default connected mongoDB (first 2 digits) :return: tuple(int, int) """ diff --git a/tests/fields/test_dict_field.py b/tests/fields/test_dict_field.py index 44e628f6..6850cd58 100644 --- a/tests/fields/test_dict_field.py +++ b/tests/fields/test_dict_field.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- import pytest +from bson import InvalidDocument from mongoengine import * from mongoengine.base import BaseDict @@ -19,22 +20,24 @@ class TestDictField(MongoDBTestCase): post = BlogPost(info=info).save() assert get_as_pymongo(post) == {"_id": post.id, "info": info} - def test_general_things(self): - """Ensure that dict types work as expected.""" + def test_validate_invalid_type(self): + class BlogPost(Document): + info = DictField() + BlogPost.drop_collection() + + invalid_infos = ["my post", ["test", "test"], {1: "test"}] + for invalid_info in invalid_infos: + with pytest.raises(ValidationError): + BlogPost(info=invalid_info).validate() + + def test_keys_with_dots_or_dollars(self): class BlogPost(Document): info = DictField() BlogPost.drop_collection() post = BlogPost() - post.info = "my post" - with pytest.raises(ValidationError): - post.validate() - - post.info = ["test", "test"] - with pytest.raises(ValidationError): - post.validate() post.info = {"$title": "test"} with pytest.raises(ValidationError): @@ -48,25 +51,34 @@ class TestDictField(MongoDBTestCase): with pytest.raises(ValidationError): post.validate() - post.info = {1: "test"} - with pytest.raises(ValidationError): - post.validate() - post.info = {"nested": {"the.title": "test"}} if get_mongodb_version() < MONGODB_36: - with pytest.raises(ValidationError): - post.validate() + # MongoDB < 3.6 rejects dots + # To avoid checking the mongodb version from the DictField class + # we rely on MongoDB to reject the data during the save + post.validate() + with pytest.raises(InvalidDocument): + post.save() else: post.validate() post.info = {"dollar_and_dot": {"te$st.test": "test"}} if get_mongodb_version() < MONGODB_36: - with pytest.raises(ValidationError): - post.validate() + post.validate() + with pytest.raises(InvalidDocument): + post.save() else: post.validate() - post.info = {"title": "test"} + def test_general_things(self): + """Ensure that dict types work as expected.""" + + class BlogPost(Document): + info = DictField() + + BlogPost.drop_collection() + + post = BlogPost(info={"title": "test"}) post.save() post = BlogPost() From 412bed0f6d7aa42a5fa855217b0d2a3b1721ef27 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 12 Jan 2020 11:04:05 +0100 Subject: [PATCH 153/216] fix bug in legacy .count due to with_limit_and_skip that was missing --- mongoengine/pymongo_support.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mongoengine/pymongo_support.py b/mongoengine/pymongo_support.py index 284efc2f..9cf9e2ae 100644 --- a/mongoengine/pymongo_support.py +++ b/mongoengine/pymongo_support.py @@ -44,7 +44,8 @@ def count_documents( for option, option_value in kwargs.items(): cursor_method = getattr(cursor, option) cursor = cursor_method(option_value) - return cursor.count() + with_limit_and_skip = "skip" in kwargs or "limit" in kwargs + return cursor.count(with_limit_and_skip=with_limit_and_skip) def list_collection_names(db, include_system_collections=False): From 18b68f1b8034457384daa5bdbd7d780055e21690 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 12 Jan 2020 21:29:18 +0100 Subject: [PATCH 154/216] update travis mongo 4.0 to latest 4.0.13 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index f7880649..a7d6da1a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,7 +31,7 @@ dist: xenial env: global: - - MONGODB_4_0=4.0.12 + - MONGODB_4_0=4.0.13 - MONGODB_3_4=3.4.17 - MONGODB_3_6=3.6.12 - PYMONGO_3_10=3.10 From e0565ddac5cd62f9fd05c69187b49a20bb644035 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 12 Jan 2020 21:31:28 +0100 Subject: [PATCH 155/216] update changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index b8e6ae56..d924a2c1 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,6 +6,7 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). +- Add Mongo 4.0 to Travis Changes in 0.19.1 ================= From 605de59bd08d9d4f54f695c7f73a1458975d479f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 12 Jan 2020 21:37:32 +0100 Subject: [PATCH 156/216] improve travis + fix tox mg310 --- .travis.yml | 9 +++++---- tox.ini | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index a7d6da1a..62bbacb1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,13 +31,14 @@ dist: xenial env: global: - - MONGODB_4_0=4.0.13 - MONGODB_3_4=3.4.17 - MONGODB_3_6=3.6.12 - - PYMONGO_3_10=3.10 - - PYMONGO_3_9=3.9 - - PYMONGO_3_6=3.6 + - MONGODB_4_0=4.0.13 + - PYMONGO_3_4=3.4 + - PYMONGO_3_6=3.6 + - PYMONGO_3_9=3.9 + - PYMONGO_3_10=3.10 matrix: - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_10} diff --git a/tox.ini b/tox.ini index c3789b7d..396817ca 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27,py35,pypy,pypy3}-{mg34,mg36, mg39} +envlist = {py27,py35,pypy,pypy3}-{mg34,mg36,mg39,mg310} [testenv] commands = From 72de6d67c7f91d9e1278d1503e87766c4e4708c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 3 Jan 2020 09:40:00 +0100 Subject: [PATCH 157/216] Bump dev status classifier --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 5ba84e06..5c6451aa 100644 --- a/setup.py +++ b/setup.py @@ -92,7 +92,7 @@ version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0] VERSION = get_version(eval(version_line.split("=")[-1])) CLASSIFIERS = [ - "Development Status :: 4 - Beta", + "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", From 57db68dc040acae8070667a2840f945d3c40669a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Mon, 13 Jan 2020 23:34:43 +0100 Subject: [PATCH 158/216] update changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index d924a2c1..add37120 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -7,6 +7,7 @@ Development =========== - (Fill this out as you fix issues and develop your features). - Add Mongo 4.0 to Travis +- Bump development Status classifier to Production/Stable #2232 Changes in 0.19.1 ================= From 86e965f854763863de2d12e54b4416b4761df55b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Mon, 30 Dec 2019 21:58:57 +0100 Subject: [PATCH 159/216] remove very old deprecated method --- docs/changelog.rst | 3 ++- mongoengine/queryset/base.py | 20 -------------------- 2 files changed, 2 insertions(+), 21 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index d924a2c1..b96a85ed 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -13,7 +13,8 @@ Changes in 0.19.1 - Requires Pillow < 7.0.0 as it dropped Python2 support - DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079 - +- BREAKING CHANGE: Removed Queryset._ensure_indexes and Queryset.ensure_indexes that were deprecated in 2013. + Document.ensure_indexes still exists Changes in 0.19.0 ================= - BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112 diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 50cb37ac..805a3d0a 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1958,23 +1958,3 @@ class BaseQuerySet(object): setattr(queryset, "_" + method_name, val) return queryset - - # Deprecated - def ensure_index(self, **kwargs): - """Deprecated use :func:`Document.ensure_index`""" - msg = ( - "Doc.objects()._ensure_index() is deprecated. " - "Use Doc.ensure_index() instead." - ) - warnings.warn(msg, DeprecationWarning) - self._document.__class__.ensure_index(**kwargs) - return self - - def _ensure_indexes(self): - """Deprecated use :func:`~Document.ensure_indexes`""" - msg = ( - "Doc.objects()._ensure_indexes() is deprecated. " - "Use Doc.ensure_indexes() instead." - ) - warnings.warn(msg, DeprecationWarning) - self._document.__class__.ensure_indexes() From 095217e7977b933c53697e9736a6a8bcb37830cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Mon, 13 Jan 2020 23:53:24 +0100 Subject: [PATCH 160/216] remove methods that were derecated in 2013... --- docs/changelog.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index b96a85ed..5fe34f91 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -7,14 +7,15 @@ Development =========== - (Fill this out as you fix issues and develop your features). - Add Mongo 4.0 to Travis +- BREAKING CHANGE: Removed ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes`` that were deprecated in 2013. + ``Document.ensure_indexes`` still exists and is the right method to use Changes in 0.19.1 ================= - Requires Pillow < 7.0.0 as it dropped Python2 support - DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079 -- BREAKING CHANGE: Removed Queryset._ensure_indexes and Queryset.ensure_indexes that were deprecated in 2013. - Document.ensure_indexes still exists + Changes in 0.19.0 ================= - BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112 From 38703acc299740ea52a73cca3e222bc050754223 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Thu, 23 Jan 2020 23:33:23 +0100 Subject: [PATCH 161/216] fix complex datetime field invalid string set --- mongoengine/fields.py | 5 ++++- tests/fields/test_complex_datetime_field.py | 17 +++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 7ec8c0f3..3eff0325 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -684,7 +684,10 @@ class ComplexDateTimeField(StringField): super(ComplexDateTimeField, self).__set__(instance, value) value = instance._data[self.name] if value is not None: - instance._data[self.name] = self._convert_from_datetime(value) + if isinstance(value, datetime.datetime): + instance._data[self.name] = self._convert_from_datetime(value) + else: + instance._data[self.name] = value def validate(self, value): value = self.to_python(value) diff --git a/tests/fields/test_complex_datetime_field.py b/tests/fields/test_complex_datetime_field.py index f0a6b96e..699032cc 100644 --- a/tests/fields/test_complex_datetime_field.py +++ b/tests/fields/test_complex_datetime_field.py @@ -4,6 +4,8 @@ import itertools import math import re +import pytest + from mongoengine import * from tests.utils import MongoDBTestCase @@ -191,3 +193,18 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): fetched_log = Log.objects.with_id(log.id) assert fetched_log.timestamp >= NOW + + def test_setting_bad_value_does_not_raise_unless_validate_is_called(self): + # test regression of #2253 + + class Log(Document): + timestamp = ComplexDateTimeField() + + Log.drop_collection() + + log = Log(timestamp="garbage") + with pytest.raises(ValidationError): + log.validate() + + with pytest.raises(ValidationError): + log.save() From 2d6a4c4b9043d5cf4fe029870fceb9ecd47ce9af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Thu, 23 Jan 2020 23:36:03 +0100 Subject: [PATCH 162/216] update changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index d924a2c1..11e6c063 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -7,6 +7,7 @@ Development =========== - (Fill this out as you fix issues and develop your features). - Add Mongo 4.0 to Travis +- Fix error when setting a string as a ComplexDateTimeField #2253 Changes in 0.19.1 ================= From 8e17e42e26d944d90bdd16bfacb8d07879768055 Mon Sep 17 00:00:00 2001 From: Agustin Date: Fri, 24 Jan 2020 13:11:07 -0300 Subject: [PATCH 163/216] Allow setting read_concern --- mongoengine/context_managers.py | 8 ++++++ mongoengine/queryset/base.py | 31 +++++++++++++++++---- tests/queryset/test_queryset.py | 49 +++++++++++++++++++++++++++++++++ 3 files changed, 82 insertions(+), 6 deletions(-) diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 1592ceef..0f6c8698 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -257,3 +257,11 @@ def set_write_concern(collection, write_concerns): combined_concerns = dict(collection.write_concern.document.items()) combined_concerns.update(write_concerns) yield collection.with_options(write_concern=WriteConcern(**combined_concerns)) + + +@contextmanager +def set_read_write_concern(collection, write_concerns, read_concern): + combined_write_concerns = dict(collection.write_concern.document.items()) + combined_write_concerns.update(write_concerns) + + yield collection.with_options(write_concern=WriteConcern(**combined_write_concerns), read_concern=read_concern) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 50cb37ac..0b76235c 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -11,6 +11,7 @@ import pymongo import pymongo.errors from pymongo.collection import ReturnDocument from pymongo.common import validate_read_preference +from pymongo.read_concern import ReadConcern import six from six import iteritems @@ -18,7 +19,7 @@ from mongoengine import signals from mongoengine.base import get_document from mongoengine.common import _import_class from mongoengine.connection import get_db -from mongoengine.context_managers import set_write_concern, switch_db +from mongoengine.context_managers import set_write_concern, set_read_write_concern, switch_db from mongoengine.errors import ( BulkWriteError, InvalidQueryError, @@ -62,6 +63,7 @@ class BaseQuerySet(object): self._timeout = True self._slave_okay = False self._read_preference = None + self._read_concern = None self._iter = False self._scalar = [] self._none = False @@ -490,7 +492,7 @@ class BaseQuerySet(object): return result.deleted_count def update( - self, upsert=False, multi=True, write_concern=None, full_result=False, **update + self, upsert=False, multi=True, write_concern=None, read_concern=None, full_result=False, **update ): """Perform an atomic update on the fields matched by the query. @@ -502,6 +504,7 @@ class BaseQuerySet(object): ``save(..., write_concern={w: 2, fsync: True}, ...)`` will wait until at least two servers have recorded the write and will force an fsync on the primary server. + :param read_concern: Override the read concern for the operation :param full_result: Return the associated ``pymongo.UpdateResult`` rather than just the number updated items :param update: Django-style update keyword arguments @@ -528,7 +531,7 @@ class BaseQuerySet(object): else: update["$set"] = {"_cls": queryset._document._class_name} try: - with set_write_concern(queryset._collection, write_concern) as collection: + with set_read_write_concern(queryset._collection, write_concern, read_concern) as collection: update_func = collection.update_one if multi: update_func = collection.update_many @@ -545,7 +548,7 @@ class BaseQuerySet(object): raise OperationError(message) raise OperationError(u"Update failed (%s)" % six.text_type(err)) - def upsert_one(self, write_concern=None, **update): + def upsert_one(self, write_concern=None, read_concern=None, **update): """Overwrite or add the first document matched by the query. :param write_concern: Extra keyword arguments are passed down which @@ -554,6 +557,7 @@ class BaseQuerySet(object): ``save(..., write_concern={w: 2, fsync: True}, ...)`` will wait until at least two servers have recorded the write and will force an fsync on the primary server. + :param read_concern: Override the read concern for the operation :param update: Django-style update keyword arguments :returns the new or overwritten document @@ -565,6 +569,7 @@ class BaseQuerySet(object): multi=False, upsert=True, write_concern=write_concern, + read_concern=read_concern, full_result=True, **update ) @@ -1196,6 +1201,20 @@ class BaseQuerySet(object): queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_preference return queryset + def read_concern(self, read_concern): + """Change the read_concern when querying. + + :param read_concern: override ReplicaSetConnection-level + preference. + """ + if read_concern is not None and not isinstance(read_concern, ReadConcern): + raise TypeError("%r is not a read concern." % (read_concern,)) + + queryset = self.clone() + queryset._read_concern = read_concern + queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_concern + return queryset + def scalar(self, *fields): """Instead of returning Document instances, return either a specific value or a tuple of values in order. @@ -1642,9 +1661,9 @@ class BaseQuerySet(object): # XXX In PyMongo 3+, we define the read preference on a collection # level, not a cursor level. Thus, we need to get a cloned collection # object using `with_options` first. - if self._read_preference is not None: + if self._read_preference is not None or self._read_concern is not None: self._cursor_obj = self._collection.with_options( - read_preference=self._read_preference + read_preference=self._read_preference, read_concern=self._read_concern ).find(self._query, **self._cursor_args) else: self._cursor_obj = self._collection.find(self._query, **self._cursor_args) diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index b30350e6..c238752d 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -7,6 +7,7 @@ from decimal import Decimal from bson import DBRef, ObjectId import pymongo +from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference from pymongo.results import UpdateResult import pytest @@ -4658,6 +4659,54 @@ class TestQueryset(unittest.TestCase): ) assert_read_pref(bars, ReadPreference.SECONDARY_PREFERRED) + def test_read_concern(self): + class Bar(Document): + txt = StringField() + + meta = {"indexes": ["txt"]} + + Bar.drop_collection() + bar = Bar.objects.create(txt="xyz") + + bars = list(Bar.objects.read_concern(None)) + assert bars == [bar] + + bars = Bar.objects.read_concern(ReadConcern(level='local')) + assert bars._read_concern == ReadConcern(level='local') + assert ( + bars._cursor.collection.read_concern + == ReadConcern(level='local') + ) + + # Make sure that `.read_concern(...)` does accept string values. + with pytest.raises(TypeError): + Bar.objects.read_concern('local') + + def assert_read_concern(qs, expected_read_concern): + assert qs._read_concern == expected_read_concern + assert qs._cursor.collection.read_concern == expected_read_concern + + # Make sure read concern is respected after a `.skip(...)`. + bars = Bar.objects.skip(1).read_concern(ReadConcern('majority')) + assert_read_concern(bars, ReadConcern('majority')) + + # Make sure read concern is respected after a `.limit(...)`. + bars = Bar.objects.limit(1).read_concern(ReadConcern('majority')) + assert_read_concern(bars, ReadConcern('majority')) + + # Make sure read concern is respected after an `.order_by(...)`. + bars = Bar.objects.order_by("txt").read_concern( + ReadConcern('majority') + ) + assert_read_concern(bars, ReadConcern('majority')) + + # Make sure read concern is respected after a `.hint(...)`. + bars = Bar.objects.hint([("txt", 1)]).read_concern( + ReadConcern('majority') + ) + assert_read_concern(bars, ReadConcern('majority')) + + def test_json_simple(self): class Embedded(EmbeddedDocument): string = StringField() From 450658d7ac2b26be7849f8f0599fff06bb7e6ebc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 4 Feb 2020 22:51:02 +0100 Subject: [PATCH 164/216] fix indirect library version that dropped python2 support recently --- setup.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 5ba84e06..5cba5d9e 100644 --- a/setup.py +++ b/setup.py @@ -108,6 +108,10 @@ CLASSIFIERS = [ "Topic :: Software Development :: Libraries :: Python Modules", ] +PYTHON_VERSION = sys.version_info[0] +PY3 = PYTHON_VERSION == 3 +PY2 = PYTHON_VERSION == 2 + extra_opts = { "packages": find_packages(exclude=["tests", "tests.*"]), "tests_require": [ @@ -116,9 +120,10 @@ extra_opts = { "coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls "blinker", "Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support + "zipp<2.0.0", # (dependency of pytest) dropped python2 support ], } -if sys.version_info[0] == 3: +if PY3: extra_opts["use_2to3"] = True if "test" in sys.argv: extra_opts["packages"] = find_packages() From 4bca3de42f08a5ca4fdd0b4021d6a54f25e24003 Mon Sep 17 00:00:00 2001 From: Agustin Barto Date: Fri, 14 Feb 2020 16:43:07 -0300 Subject: [PATCH 165/216] Add support for the elemMatch projection operator. Add basic tests to the fields queryset method. --- mongoengine/queryset/base.py | 2 +- tests/queryset/test_queryset.py | 51 +++++++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 50cb37ac..710259df 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1037,7 +1037,7 @@ class BaseQuerySet(object): """ # Check for an operator and transform to mongo-style if there is - operators = ["slice"] + operators = ["slice", "elemMatch"] cleaned_fields = [] for key, value in kwargs.items(): parts = key.split("__") diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index b30350e6..5ebd545f 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -4476,6 +4476,57 @@ class TestQueryset(unittest.TestCase): expected = "[u'A1', u'A2']" assert expected == "%s" % sorted(names) + def test_fields(self): + class Bar(EmbeddedDocument): + v = StringField() + z = StringField() + + + class Foo(Document): + x = StringField() + y = IntField() + items = EmbeddedDocumentListField(Bar) + + + Foo.drop_collection() + + Foo(x='foo1', y=1).save() + Foo(x='foo2', y=2, items=[]).save() + Foo(x='foo3', y=3, items=[Bar(z='a', v='V')]).save() + Foo(x='foo4', y=4, items=[Bar(z='a', v='V'), Bar(z='b', v='W'), Bar(z='b', v='X'), Bar(z='c', v='V')]).save() + Foo(x='foo5', y=5, items=[Bar(z='b', v='X'), Bar(z='c', v='V'), Bar(z='d', v='V'), Bar(z='e', v='V')]).save() + + foos_with_x = list(Foo.objects.order_by('y').fields(x=1)) + + assert all(o.x is not None for o in foos_with_x) + + foos_without_y = list(Foo.objects.order_by('y').fields(y=0)) + + assert all(o.y is None for o in foos_with_x) + + foos_with_sliced_items = list(Foo.objects.order_by('y').fields(slice__items=1)) + + assert foos_with_sliced_items[0].items == [] + assert foos_with_sliced_items[1].items == [] + assert len(foos_with_sliced_items[2].items) == 1 + assert foos_with_sliced_items[2].items[0].z == 'a' + assert len(foos_with_sliced_items[3].items) == 1 + assert foos_with_sliced_items[3].items[0].z == 'a' + assert len(foos_with_sliced_items[4].items) == 1 + assert foos_with_sliced_items[4].items[0].z == 'b' + + foos_with_elem_match_items = list(Foo.objects.order_by('y').fields(elemMatch__items={'z': 'b'})) + + assert foos_with_elem_match_items[0].items == [] + assert foos_with_elem_match_items[1].items == [] + assert foos_with_elem_match_items[2].items == [] + assert len(foos_with_elem_match_items[3].items) == 1 + assert foos_with_elem_match_items[3].items[0].z == 'b' + assert foos_with_elem_match_items[3].items[0].v == 'W' + assert len(foos_with_elem_match_items[4].items) == 1 + assert foos_with_elem_match_items[4].items[0].z == 'b' + + def test_elem_match(self): class Foo(EmbeddedDocument): shape = StringField() From 81f9b351b3838fe27924937e706300805e9eb82b Mon Sep 17 00:00:00 2001 From: Leonardo Domingues Date: Fri, 21 Feb 2020 19:14:34 -0300 Subject: [PATCH 166/216] Add return info in the save function docstring --- AUTHORS | 1 + mongoengine/document.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index b9a81c63..7d3000ce 100644 --- a/AUTHORS +++ b/AUTHORS @@ -255,3 +255,4 @@ that much better: * Filip Kucharczyk (https://github.com/Pacu2) * Eric Timmons (https://github.com/daewok) * Matthew Simpson (https://github.com/mcsimps2) + * Leonardo Domingues (https://github.com/leodmgs) diff --git a/mongoengine/document.py b/mongoengine/document.py index 23968f17..5e812510 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -332,7 +332,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): ): """Save the :class:`~mongoengine.Document` to the database. If the document already exists, it will be updated, otherwise it will be - created. + created. Returns the saved object instance. :param force_insert: only try to create a new document, don't allow updates of existing documents. From cfb4943986189a22ae75fd4f378f38f7ec0a77bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Mon, 2 Mar 2020 22:49:21 +0100 Subject: [PATCH 167/216] reformat with black --- tests/queryset/test_queryset.py | 53 ++++++++++++++++++++++----------- 1 file changed, 35 insertions(+), 18 deletions(-) diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 5ebd545f..f6d1a916 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -4481,51 +4481,68 @@ class TestQueryset(unittest.TestCase): v = StringField() z = StringField() - class Foo(Document): x = StringField() y = IntField() items = EmbeddedDocumentListField(Bar) - Foo.drop_collection() - Foo(x='foo1', y=1).save() - Foo(x='foo2', y=2, items=[]).save() - Foo(x='foo3', y=3, items=[Bar(z='a', v='V')]).save() - Foo(x='foo4', y=4, items=[Bar(z='a', v='V'), Bar(z='b', v='W'), Bar(z='b', v='X'), Bar(z='c', v='V')]).save() - Foo(x='foo5', y=5, items=[Bar(z='b', v='X'), Bar(z='c', v='V'), Bar(z='d', v='V'), Bar(z='e', v='V')]).save() + Foo(x="foo1", y=1).save() + Foo(x="foo2", y=2, items=[]).save() + Foo(x="foo3", y=3, items=[Bar(z="a", v="V")]).save() + Foo( + x="foo4", + y=4, + items=[ + Bar(z="a", v="V"), + Bar(z="b", v="W"), + Bar(z="b", v="X"), + Bar(z="c", v="V"), + ], + ).save() + Foo( + x="foo5", + y=5, + items=[ + Bar(z="b", v="X"), + Bar(z="c", v="V"), + Bar(z="d", v="V"), + Bar(z="e", v="V"), + ], + ).save() - foos_with_x = list(Foo.objects.order_by('y').fields(x=1)) + foos_with_x = list(Foo.objects.order_by("y").fields(x=1)) assert all(o.x is not None for o in foos_with_x) - foos_without_y = list(Foo.objects.order_by('y').fields(y=0)) + foos_without_y = list(Foo.objects.order_by("y").fields(y=0)) assert all(o.y is None for o in foos_with_x) - foos_with_sliced_items = list(Foo.objects.order_by('y').fields(slice__items=1)) + foos_with_sliced_items = list(Foo.objects.order_by("y").fields(slice__items=1)) assert foos_with_sliced_items[0].items == [] assert foos_with_sliced_items[1].items == [] assert len(foos_with_sliced_items[2].items) == 1 - assert foos_with_sliced_items[2].items[0].z == 'a' + assert foos_with_sliced_items[2].items[0].z == "a" assert len(foos_with_sliced_items[3].items) == 1 - assert foos_with_sliced_items[3].items[0].z == 'a' + assert foos_with_sliced_items[3].items[0].z == "a" assert len(foos_with_sliced_items[4].items) == 1 - assert foos_with_sliced_items[4].items[0].z == 'b' + assert foos_with_sliced_items[4].items[0].z == "b" - foos_with_elem_match_items = list(Foo.objects.order_by('y').fields(elemMatch__items={'z': 'b'})) + foos_with_elem_match_items = list( + Foo.objects.order_by("y").fields(elemMatch__items={"z": "b"}) + ) assert foos_with_elem_match_items[0].items == [] assert foos_with_elem_match_items[1].items == [] assert foos_with_elem_match_items[2].items == [] assert len(foos_with_elem_match_items[3].items) == 1 - assert foos_with_elem_match_items[3].items[0].z == 'b' - assert foos_with_elem_match_items[3].items[0].v == 'W' + assert foos_with_elem_match_items[3].items[0].z == "b" + assert foos_with_elem_match_items[3].items[0].v == "W" assert len(foos_with_elem_match_items[4].items) == 1 - assert foos_with_elem_match_items[4].items[0].z == 'b' - + assert foos_with_elem_match_items[4].items[0].z == "b" def test_elem_match(self): class Foo(EmbeddedDocument): From d287f480e5016090384650515c9342c767c4bbb7 Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Tue, 4 Feb 2020 12:35:03 +0100 Subject: [PATCH 168/216] Fix for combining raw and regular filters --- mongoengine/queryset/transform.py | 4 ++-- tests/queryset/test_transform.py | 6 ++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 0b73e99b..659a97e2 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -169,9 +169,9 @@ def query(_doc_cls=None, **kwargs): key = ".".join(parts) - if op is None or key not in mongo_query: + if key not in mongo_query: mongo_query[key] = value - elif key in mongo_query: + else: if isinstance(mongo_query[key], dict) and isinstance(value, dict): mongo_query[key].update(value) # $max/minDistance needs to come last - convert to SON diff --git a/tests/queryset/test_transform.py b/tests/queryset/test_transform.py index 3898809e..8d6c2d06 100644 --- a/tests/queryset/test_transform.py +++ b/tests/queryset/test_transform.py @@ -24,6 +24,12 @@ class TestTransform(unittest.TestCase): } assert transform.query(friend__age__gte=30) == {"friend.age": {"$gte": 30}} assert transform.query(name__exists=True) == {"name": {"$exists": True}} + assert transform.query(name=["Mark"], __raw__={"name": {"$in": "Tom"}}) == { + "$and": [{"name": ["Mark"]}, {"name": {"$in": "Tom"}}] + } + assert transform.query(name__in=["Tom"], __raw__={"name": "Mark"}) == { + "$and": [{"name": {"$in": ["Tom"]}}, {"name": "Mark"}] + } def test_transform_update(self): class LisDoc(Document): From fda2e2b47ab5085b6ff5b6e27b40794a3fa9e77f Mon Sep 17 00:00:00 2001 From: Filip Kucharczyk Date: Tue, 4 Feb 2020 12:58:25 +0100 Subject: [PATCH 169/216] Update changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index d924a2c1..8dcea62a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -7,6 +7,7 @@ Development =========== - (Fill this out as you fix issues and develop your features). - Add Mongo 4.0 to Travis +- Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 Changes in 0.19.1 ================= From ff4d57032ad366fb766b9625ead6df66bd989675 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 3 Mar 2020 23:51:11 +0100 Subject: [PATCH 170/216] reformat w black --- mongoengine/context_managers.py | 4 +++- mongoengine/queryset/base.py | 20 ++++++++++++++++---- tests/queryset/test_queryset.py | 32 ++++++++++++-------------------- 3 files changed, 31 insertions(+), 25 deletions(-) diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 0f6c8698..0c58b57c 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -264,4 +264,6 @@ def set_read_write_concern(collection, write_concerns, read_concern): combined_write_concerns = dict(collection.write_concern.document.items()) combined_write_concerns.update(write_concerns) - yield collection.with_options(write_concern=WriteConcern(**combined_write_concerns), read_concern=read_concern) + yield collection.with_options( + write_concern=WriteConcern(**combined_write_concerns), read_concern=read_concern + ) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 0b76235c..0743429c 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -19,7 +19,11 @@ from mongoengine import signals from mongoengine.base import get_document from mongoengine.common import _import_class from mongoengine.connection import get_db -from mongoengine.context_managers import set_write_concern, set_read_write_concern, switch_db +from mongoengine.context_managers import ( + set_write_concern, + set_read_write_concern, + switch_db, +) from mongoengine.errors import ( BulkWriteError, InvalidQueryError, @@ -492,7 +496,13 @@ class BaseQuerySet(object): return result.deleted_count def update( - self, upsert=False, multi=True, write_concern=None, read_concern=None, full_result=False, **update + self, + upsert=False, + multi=True, + write_concern=None, + read_concern=None, + full_result=False, + **update ): """Perform an atomic update on the fields matched by the query. @@ -531,7 +541,9 @@ class BaseQuerySet(object): else: update["$set"] = {"_cls": queryset._document._class_name} try: - with set_read_write_concern(queryset._collection, write_concern, read_concern) as collection: + with set_read_write_concern( + queryset._collection, write_concern, read_concern + ) as collection: update_func = collection.update_one if multi: update_func = collection.update_many @@ -1214,7 +1226,7 @@ class BaseQuerySet(object): queryset._read_concern = read_concern queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_concern return queryset - + def scalar(self, *fields): """Instead of returning Document instances, return either a specific value or a tuple of values in order. diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index c238752d..708033f4 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -4671,41 +4671,33 @@ class TestQueryset(unittest.TestCase): bars = list(Bar.objects.read_concern(None)) assert bars == [bar] - bars = Bar.objects.read_concern(ReadConcern(level='local')) - assert bars._read_concern == ReadConcern(level='local') - assert ( - bars._cursor.collection.read_concern - == ReadConcern(level='local') - ) + bars = Bar.objects.read_concern(ReadConcern(level="local")) + assert bars._read_concern == ReadConcern(level="local") + assert bars._cursor.collection.read_concern == ReadConcern(level="local") # Make sure that `.read_concern(...)` does accept string values. with pytest.raises(TypeError): - Bar.objects.read_concern('local') + Bar.objects.read_concern("local") def assert_read_concern(qs, expected_read_concern): assert qs._read_concern == expected_read_concern assert qs._cursor.collection.read_concern == expected_read_concern # Make sure read concern is respected after a `.skip(...)`. - bars = Bar.objects.skip(1).read_concern(ReadConcern('majority')) - assert_read_concern(bars, ReadConcern('majority')) + bars = Bar.objects.skip(1).read_concern(ReadConcern("majority")) + assert_read_concern(bars, ReadConcern("majority")) # Make sure read concern is respected after a `.limit(...)`. - bars = Bar.objects.limit(1).read_concern(ReadConcern('majority')) - assert_read_concern(bars, ReadConcern('majority')) + bars = Bar.objects.limit(1).read_concern(ReadConcern("majority")) + assert_read_concern(bars, ReadConcern("majority")) # Make sure read concern is respected after an `.order_by(...)`. - bars = Bar.objects.order_by("txt").read_concern( - ReadConcern('majority') - ) - assert_read_concern(bars, ReadConcern('majority')) + bars = Bar.objects.order_by("txt").read_concern(ReadConcern("majority")) + assert_read_concern(bars, ReadConcern("majority")) # Make sure read concern is respected after a `.hint(...)`. - bars = Bar.objects.hint([("txt", 1)]).read_concern( - ReadConcern('majority') - ) - assert_read_concern(bars, ReadConcern('majority')) - + bars = Bar.objects.hint([("txt", 1)]).read_concern(ReadConcern("majority")) + assert_read_concern(bars, ReadConcern("majority")) def test_json_simple(self): class Embedded(EmbeddedDocument): From 421e3f324fbb8a260eba817717f8391d2c9ecdd2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 8 Mar 2020 14:58:21 +0100 Subject: [PATCH 171/216] Drop python2 support --- .travis.yml | 35 ++++++++------------ CONTRIBUTING.rst | 18 +++++----- benchmarks/test_inserts.py | 4 --- mongoengine/base/datastructures.py | 16 +-------- mongoengine/base/document.py | 10 ++---- mongoengine/fields.py | 17 ++++------ mongoengine/python_support.py | 23 ------------- mongoengine/queryset/base.py | 4 --- mongoengine/queryset/field_list.py | 2 -- mongoengine/queryset/transform.py | 2 +- mongoengine/queryset/visitor.py | 4 --- setup.py | 17 ++++------ tests/fields/test_binary_field.py | 10 ++---- tests/fields/test_embedded_document_field.py | 2 -- tests/fields/test_file_field.py | 6 ++-- tests/fields/test_url_field.py | 2 -- tests/queryset/test_queryset.py | 5 +-- tests/test_datastructures.py | 6 ++-- tox.ini | 2 +- 19 files changed, 51 insertions(+), 134 deletions(-) delete mode 100644 mongoengine/python_support.py diff --git a/.travis.yml b/.travis.yml index ad659fca..d34f8a36 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,13 +1,10 @@ # For full coverage, we'd have to test all supported Python, MongoDB, and # PyMongo combinations. However, that would result in an overly long build # with a very large number of jobs, hence we only test a subset of all the -# combinations: -# * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, -# tested against Python v2.7, v3.5, v3.6, v3.7, v3.8, PyPy and PyPy3. -# * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo -# combination: MongoDB v3.4, PyMongo v3.4, Python v2.7. -# * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8. -# +# combinations. +# * Python3.7, MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, +# Other combinations are tested. See below for the details or check the travis jobs + # We should periodically check MongoDB Server versions supported by MongoDB # Inc., add newly released versions to the test matrix, and remove versions # which have reached their End of Life. See: @@ -16,21 +13,15 @@ # # Reminder: Update README.rst if you change MongoDB versions we test. - language: python +dist: xenial python: -- 2.7 - 3.5 - 3.6 - 3.7 - 3.8 -- pypy - pypy3 -dist: xenial - -dist: xenial - env: global: - MONGODB_3_4=3.4.17 @@ -41,6 +32,8 @@ env: - PYMONGO_3_6=3.6 - PYMONGO_3_9=3.9 - PYMONGO_3_10=3.10 + + - MAIN_PYTHON_VERSION = "3.7" matrix: - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_10} @@ -49,8 +42,6 @@ matrix: fast_finish: true include: - - python: 2.7 - env: MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_4} - python: 3.7 env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} - python: 3.7 @@ -74,20 +65,20 @@ install: # tox dryrun to setup the tox venv (we run a mock test). - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder" # Install black for Python v3.7 only. - - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then pip install black; fi + - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then pip install black; fi before_script: - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork - - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then flake8 .; else echo "flake8 only runs on py37"; fi # Run flake8 for Python 3.7 only - - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then black --check .; else echo "black only runs on py37"; fi # Run black for Python 3.7 only + - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then flake8 .; else echo "flake8 only runs on py37"; fi + - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then black --check .; else echo "black only runs on py37"; fi - mongo --eval 'db.version();' # Make sure mongo is awake script: - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" after_success: -- coveralls --verbose +- - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi notifications: irc: irc.freenode.org#mongoengine @@ -109,11 +100,11 @@ deploy: distributions: "sdist bdist_wheel" # Only deploy on tagged commits (aka GitHub releases) and only for the parent - # repo's builds running Python v2.7 along with PyMongo v3.x and MongoDB v3.4. + # repo's builds running Python v3.7 along with PyMongo v3.x and MongoDB v3.4. # We run Travis against many different Python, PyMongo, and MongoDB versions # and we don't want the deploy to occur multiple times). on: tags: true repo: MongoEngine/mongoengine condition: ($PYMONGO = ${PYMONGO_3_10}) && ($MONGODB = ${MONGODB_3_4}) - python: 2.7 + python: 3.7 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index b04ae968..0deda0fc 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -20,23 +20,23 @@ post to the `user group ` Supported Interpreters ---------------------- -MongoEngine supports CPython 2.7 and newer. Language -features not supported by all interpreters can not be used. +MongoEngine supports CPython 3.7 and newer as well as Pypy3. +Language features not supported by all interpreters can not be used. -Python 2/3 compatibility +Python3 codebase ---------------------- -The codebase is written in a compatible manner for python 2 & 3 so it -is important that this is taken into account when it comes to discrepencies -between the two versions (see https://python-future.org/compatible_idioms.html). -Travis runs the tests against different Python versions as a safety net. +Since 0.20, the codebase is exclusively Python 3. + +Earlier versions were exclusively Python2, and was relying on 2to3 to support Python3 installs. +Travis runs the tests against the main Python 3.x versions. Style Guide ----------- -MongoEngine uses `black `_ for code -formatting. +MongoEngine uses `black `_ for code formatting. +Black runs as part of the CI so it will fail in case the code is not formatted properly. Testing ------- diff --git a/benchmarks/test_inserts.py b/benchmarks/test_inserts.py index af6399f7..4ecd48de 100644 --- a/benchmarks/test_inserts.py +++ b/benchmarks/test_inserts.py @@ -3,8 +3,6 @@ import timeit def main(): setup = """ -from builtins import range - from pymongo import MongoClient connection = MongoClient() @@ -59,8 +57,6 @@ myNoddys = noddy.find() print("{}s".format(t.timeit(1))) setup = """ -from builtins import range - from pymongo import MongoClient connection = MongoClient() diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index 9f78fec0..f2d64c33 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -1,7 +1,6 @@ import weakref from bson import DBRef -from future.utils import listitems import six from six import iteritems @@ -181,19 +180,6 @@ class BaseList(list): __iadd__ = mark_as_changed_wrapper(list.__iadd__) __imul__ = mark_as_changed_wrapper(list.__imul__) - if six.PY2: - # Under py3 __setslice__, __delslice__ and __getslice__ - # are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter - # so we mimic this under python 2 - def __setslice__(self, i, j, sequence): - return self.__setitem__(slice(i, j), sequence) - - def __delslice__(self, i, j): - return self.__delitem__(slice(i, j)) - - def __getslice__(self, i, j): - return self.__getitem__(slice(i, j)) - def _mark_as_changed(self, key=None): if hasattr(self._instance, "_mark_as_changed"): if key: @@ -426,7 +412,7 @@ class StrictDict(object): return len(list(iteritems(self))) def __eq__(self, other): - return listitems(self) == listitems(other) + return list(self.items()) == list(other.items()) def __ne__(self, other): return not (self == other) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 4e4df92f..73c88774 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -1,9 +1,9 @@ import copy + import numbers from functools import partial from bson import DBRef, ObjectId, SON, json_util -from future.utils import listitems import pymongo import six from six import iteritems @@ -26,7 +26,6 @@ from mongoengine.errors import ( OperationError, ValidationError, ) -from mongoengine.python_support import Hashable __all__ = ("BaseDocument", "NON_FIELD_ERRORS") @@ -294,10 +293,7 @@ class BaseDocument(object): def __str__(self): # TODO this could be simpler? if hasattr(self, "__unicode__"): - if six.PY3: - return self.__unicode__() - else: - return six.text_type(self).encode("utf-8") + return self.__unicode__() return six.text_type("%s object" % self.__class__.__name__) def __eq__(self, other): @@ -671,7 +667,7 @@ class BaseDocument(object): del set_data["_id"] # Determine if any changed items were actually unset. - for path, value in listitems(set_data): + for path, value in list(set_data.items()): if value or isinstance( value, (numbers.Number, bool) ): # Account for 0 and True that are truthy diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 9d7d1d04..5cbf3ae0 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -5,12 +5,12 @@ import re import socket import time import uuid +from io import BytesIO from operator import itemgetter from bson import Binary, DBRef, ObjectId, SON from bson.int64 import Int64 import gridfs -from past.builtins import long import pymongo from pymongo import ReturnDocument import six @@ -39,7 +39,6 @@ from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.document import Document, EmbeddedDocument from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version -from mongoengine.python_support import StringIO from mongoengine.queryset import DO_NOTHING from mongoengine.queryset.base import BaseQuerySet from mongoengine.queryset.transform import STRING_OPERATORS @@ -338,7 +337,7 @@ class IntField(BaseField): class LongField(BaseField): - """64-bit integer field.""" + """64-bit integer field. (Equivalent to IntField since the support to Python2 was dropped)""" def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value @@ -346,7 +345,7 @@ class LongField(BaseField): def to_python(self, value): try: - value = long(value) + value = int(value) except (TypeError, ValueError): pass return value @@ -356,7 +355,7 @@ class LongField(BaseField): def validate(self, value): try: - value = long(value) + value = int(value) except (TypeError, ValueError): self.error("%s could not be converted to long" % value) @@ -370,7 +369,7 @@ class LongField(BaseField): if value is None: return value - return super(LongField, self).prepare_query_value(op, long(value)) + return super(LongField, self).prepare_query_value(op, int(value)) class FloatField(BaseField): @@ -1679,8 +1678,6 @@ class GridFSProxy(object): def __bool__(self): return bool(self.grid_id) - __nonzero__ = __bool__ # For Py2 support - def __getstate__(self): self_dict = self.__dict__ self_dict["_fs"] = None @@ -1952,7 +1949,7 @@ class ImageGridFsProxy(GridFSProxy): w, h = img.size - io = StringIO() + io = BytesIO() img.save(io, img_format, progressive=progressive) io.seek(0) @@ -1971,7 +1968,7 @@ class ImageGridFsProxy(GridFSProxy): def _put_thumbnail(self, thumbnail, format, progressive, **kwargs): w, h = thumbnail.size - io = StringIO() + io = BytesIO() thumbnail.save(io, format, progressive=progressive) io.seek(0) diff --git a/mongoengine/python_support.py b/mongoengine/python_support.py deleted file mode 100644 index 57e467db..00000000 --- a/mongoengine/python_support.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Helper functions, constants, and types to aid with Python v2.7 - v3.x support -""" -import six - -# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. -StringIO = six.BytesIO - -# Additionally for Py2, try to use the faster cStringIO, if available -if not six.PY3: - try: - import cStringIO - except ImportError: - pass - else: - StringIO = cStringIO.StringIO - - -if six.PY3: - from collections.abc import Hashable -else: - # raises DeprecationWarnings in Python >=3.7 - from collections import Hashable diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 9f76d92d..b7d0e0ab 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import copy import itertools import re @@ -204,8 +202,6 @@ class BaseQuerySet(object): """Avoid to open all records in an if stmt in Py3.""" return self._has_data() - __nonzero__ = __bool__ # For Py2 support - # Core functions def all(self): diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py index 5c3ff222..c2618ebd 100644 --- a/mongoengine/queryset/field_list.py +++ b/mongoengine/queryset/field_list.py @@ -69,8 +69,6 @@ class QueryFieldList(object): def __bool__(self): return bool(self.fields) - __nonzero__ = __bool__ # For Py2 support - def as_dict(self): field_list = {field: self.value for field in self.fields} if self.slice: diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 659a97e2..c179f541 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -10,7 +10,7 @@ from mongoengine.base import UPDATE_OPERATORS from mongoengine.common import _import_class from mongoengine.errors import InvalidQueryError -__all__ = ("query", "update") +__all__ = ("query", "update", "STRING_OPERATORS") COMPARISON_OPERATORS = ( "ne", diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 470839c1..72e36ac0 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -143,8 +143,6 @@ class QCombination(QNode): def __bool__(self): return bool(self.children) - __nonzero__ = __bool__ # For Py2 support - def accept(self, visitor): for i in range(len(self.children)): if isinstance(self.children[i], QNode): @@ -180,8 +178,6 @@ class Q(QNode): def __bool__(self): return bool(self.query) - __nonzero__ = __bool__ # For Py2 support - def __eq__(self, other): return self.__class__ == other.__class__ and self.query == other.query diff --git a/setup.py b/setup.py index 418ff7ad..6873284a 100644 --- a/setup.py +++ b/setup.py @@ -110,7 +110,6 @@ CLASSIFIERS = [ PYTHON_VERSION = sys.version_info[0] PY3 = PYTHON_VERSION == 3 -PY2 = PYTHON_VERSION == 2 extra_opts = { "packages": find_packages(exclude=["tests", "tests.*"]), @@ -124,14 +123,11 @@ extra_opts = { ], } -if PY3: - if "test" in sys.argv: - extra_opts["packages"] = find_packages() - extra_opts["package_data"] = { - "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"] - } -else: - extra_opts["tests_require"] += ["python-dateutil"] +if "test" in sys.argv: + extra_opts["packages"] = find_packages() + extra_opts["package_data"] = { + "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"] + } setup( name="mongoengine", @@ -148,7 +144,8 @@ setup( long_description=LONG_DESCRIPTION, platforms=["any"], classifiers=CLASSIFIERS, - install_requires=["pymongo>=3.4, <4.0", "six>=1.10.0", "future"], + python_requires=">=3.5", + install_requires=["pymongo>=3.4, <4.0", "six>=1.10.0"], cmdclass={"test": PyTest}, **extra_opts ) diff --git a/tests/fields/test_binary_field.py b/tests/fields/test_binary_field.py index e2a1b8d6..a653b961 100644 --- a/tests/fields/test_binary_field.py +++ b/tests/fields/test_binary_field.py @@ -123,10 +123,7 @@ class TestBinaryField(MongoDBTestCase): upsert=True, new=True, set__bin_field=BIN_VALUE ) assert doc.some_field == "test" - if six.PY3: - assert doc.bin_field == BIN_VALUE - else: - assert doc.bin_field == Binary(BIN_VALUE) + assert doc.bin_field == BIN_VALUE def test_update_one(self): """Ensures no regression of bug #1127""" @@ -144,7 +141,4 @@ class TestBinaryField(MongoDBTestCase): ) assert n_updated == 1 fetched = MyDocument.objects.with_id(doc.id) - if six.PY3: - assert fetched.bin_field == BIN_VALUE - else: - assert fetched.bin_field == Binary(BIN_VALUE) + assert fetched.bin_field == BIN_VALUE diff --git a/tests/fields/test_embedded_document_field.py b/tests/fields/test_embedded_document_field.py index b80f4d8c..13ca9c0b 100644 --- a/tests/fields/test_embedded_document_field.py +++ b/tests/fields/test_embedded_document_field.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -from builtins import str - import pytest from mongoengine import ( diff --git a/tests/fields/test_file_field.py b/tests/fields/test_file_field.py index b8ece1a9..5ab6c93f 100644 --- a/tests/fields/test_file_field.py +++ b/tests/fields/test_file_field.py @@ -3,6 +3,7 @@ import copy import os import tempfile import unittest +from io import BytesIO import gridfs import pytest @@ -10,7 +11,6 @@ import six from mongoengine import * from mongoengine.connection import get_db -from mongoengine.python_support import StringIO try: from PIL import Image @@ -30,7 +30,7 @@ TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png") def get_file(path): """Use a BytesIO instead of a file to allow to have a one-liner and avoid that the file remains opened""" - bytes_io = StringIO() + bytes_io = BytesIO() with open(path, "rb") as f: bytes_io.write(f.read()) bytes_io.seek(0) @@ -80,7 +80,7 @@ class TestFileField(MongoDBTestCase): PutFile.drop_collection() putfile = PutFile() - putstring = StringIO() + putstring = BytesIO() putstring.write(text) putstring.seek(0) putfile.the_file.put(putstring, content_type=content_type) diff --git a/tests/fields/test_url_field.py b/tests/fields/test_url_field.py index 477bced7..c449e467 100644 --- a/tests/fields/test_url_field.py +++ b/tests/fields/test_url_field.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -from builtins import str - import pytest from mongoengine import * diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 17bf7405..7dea6dd5 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -4470,10 +4470,7 @@ class TestQueryset(unittest.TestCase): pks = self.Person.objects.order_by("age").scalar("pk")[1:3] names = self.Person.objects.scalar("name").in_bulk(list(pks)).values() - if six.PY3: - expected = "['A1', 'A2']" - else: - expected = "[u'A1', u'A2']" + expected = "['A1', 'A2']" assert expected == "%s" % sorted(names) def test_elem_match(self): diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 7b5d7d11..734061ed 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -287,7 +287,7 @@ class TestBaseList: base_list[:] = [ 0, 1, - ] # Will use __setslice__ under py2 and __setitem__ under py3 + ] assert base_list._instance._changed_fields == ["my_name"] assert base_list == [0, 1] @@ -296,13 +296,13 @@ class TestBaseList: base_list[0:2] = [ 1, 0, - ] # Will use __setslice__ under py2 and __setitem__ under py3 + ] assert base_list._instance._changed_fields == ["my_name"] assert base_list == [1, 0, 2] def test___setitem___calls_with_step_slice_mark_as_changed(self): base_list = self._get_baselist([0, 1, 2]) - base_list[0:3:2] = [-1, -2] # uses __setitem__ in both py2 & 3 + base_list[0:3:2] = [-1, -2] # uses __setitem__ assert base_list._instance._changed_fields == ["my_name"] assert base_list == [-1, 1, -2] diff --git a/tox.ini b/tox.ini index 396817ca..675b6d9a 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27,py35,pypy,pypy3}-{mg34,mg36,mg39,mg310} +envlist = {py35,pypy3}-{mg34,mg36,mg39,mg310} [testenv] commands = From 03e34299f0d760834386b0b68dc71672b27649bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 11 Mar 2020 21:50:50 +0100 Subject: [PATCH 172/216] clean code related to Py2 + six.text_type & six.string_types --- CONTRIBUTING.rst | 2 +- README.rst | 6 +++ mongoengine/base/datastructures.py | 3 +- mongoengine/base/document.py | 11 ++--- mongoengine/base/fields.py | 26 +++++------ mongoengine/base/metaclasses.py | 18 ++++---- mongoengine/connection.py | 9 ++-- mongoengine/dereference.py | 6 +-- mongoengine/document.py | 10 ++--- mongoengine/errors.py | 5 +-- mongoengine/fields.py | 69 +++++++++++++---------------- mongoengine/queryset/base.py | 30 ++++++------- mongoengine/queryset/transform.py | 4 +- tests/fields/test_date_field.py | 11 ----- tests/fields/test_datetime_field.py | 13 +----- tests/queryset/test_queryset.py | 26 ++++------- 16 files changed, 101 insertions(+), 148 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 0deda0fc..d939e2ee 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -28,7 +28,7 @@ Python3 codebase Since 0.20, the codebase is exclusively Python 3. -Earlier versions were exclusively Python2, and was relying on 2to3 to support Python3 installs. +Earlier versions were exclusively Python2, and were relying on 2to3 to support Python3 installs. Travis runs the tests against the main Python 3.x versions. diff --git a/README.rst b/README.rst index b5c95888..1ef1363f 100644 --- a/README.rst +++ b/README.rst @@ -42,6 +42,8 @@ to both create the virtual environment and install the package. Otherwise, you c download the source from `GitHub `_ and run ``python setup.py install``. +The support for Python2 was dropped with MongoEngine 0.20.0 + Dependencies ============ All of the dependencies can easily be installed via `pip `_. @@ -58,6 +60,10 @@ If you need to use an ``ImageField`` or ``ImageGridFsProxy``: - Pillow>=2.0.0 +If you need to use signals: + +- blinker>=1.3 + Examples ======== Some simple examples of what MongoEngine code looks like: diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index f2d64c33..2a2c7e7d 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -1,7 +1,6 @@ import weakref from bson import DBRef -import six from six import iteritems from mongoengine.common import _import_class @@ -200,7 +199,7 @@ class EmbeddedDocumentList(BaseList): """ for key, expected_value in kwargs.items(): doc_val = getattr(embedded_doc, key) - if doc_val != expected_value and six.text_type(doc_val) != expected_value: + if doc_val != expected_value and str(doc_val) != expected_value: return False return True diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 73c88774..1254d042 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -294,7 +294,7 @@ class BaseDocument(object): # TODO this could be simpler? if hasattr(self, "__unicode__"): return self.__unicode__() - return six.text_type("%s object" % self.__class__.__name__) + return "%s object" % self.__class__.__name__ def __eq__(self, other): if ( @@ -828,7 +828,7 @@ class BaseDocument(object): @classmethod def _build_index_spec(cls, spec): """Build a PyMongo index spec from a MongoEngine index spec.""" - if isinstance(spec, six.string_types): + if isinstance(spec, str): spec = {"fields": [spec]} elif isinstance(spec, (list, tuple)): spec = {"fields": list(spec)} @@ -925,7 +925,7 @@ class BaseDocument(object): # Add any unique_with fields to the back of the index spec if field.unique_with: - if isinstance(field.unique_with, six.string_types): + if isinstance(field.unique_with, str): field.unique_with = [field.unique_with] # Convert unique_with field names to real field names @@ -1172,9 +1172,6 @@ class BaseDocument(object): else [value] ) return sep.join( - [ - six.text_type(dict(field.choices).get(val, val)) - for val in values or [] - ] + [str(dict(field.choices).get(val, val)) for val in values or []] ) return value diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index cd1039cb..3c88149b 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -4,7 +4,6 @@ import weakref from bson import DBRef, ObjectId, SON import pymongo -import six from six import iteritems from mongoengine.base.common import UPDATE_OPERATORS @@ -92,13 +91,11 @@ class BaseField(object): self._owner_document = None # Make sure db_field is a string (if it's explicitly defined). - if self.db_field is not None and not isinstance( - self.db_field, six.string_types - ): + if self.db_field is not None and not isinstance(self.db_field, str): raise TypeError("db_field should be a string.") # Make sure db_field doesn't contain any forbidden characters. - if isinstance(self.db_field, six.string_types) and ( + if isinstance(self.db_field, str) and ( "." in self.db_field or "\0" in self.db_field or self.db_field.startswith("$") @@ -221,14 +218,12 @@ class BaseField(object): # Choices which are other types of Documents if isinstance(value, (Document, EmbeddedDocument)): if not any(isinstance(value, c) for c in choice_list): - self.error( - "Value must be an instance of %s" % (six.text_type(choice_list)) - ) + self.error("Value must be an instance of %s" % (choice_list)) # Choices which are types other than Documents else: values = value if isinstance(value, (list, tuple)) else [value] if len(set(values) - set(choice_list)): - self.error("Value must be one of %s" % six.text_type(choice_list)) + self.error("Value must be one of %s" % str(choice_list)) def _validate(self, value, **kwargs): # Check the Choices Constraint @@ -345,7 +340,7 @@ class ComplexBaseField(BaseField): def to_python(self, value): """Convert a MongoDB-compatible type to a Python type.""" - if isinstance(value, six.string_types): + if isinstance(value, str): return value if hasattr(value, "to_python"): @@ -399,7 +394,7 @@ class ComplexBaseField(BaseField): EmbeddedDocument = _import_class("EmbeddedDocument") GenericReferenceField = _import_class("GenericReferenceField") - if isinstance(value, six.string_types): + if isinstance(value, str): return value if hasattr(value, "to_mongo"): @@ -513,10 +508,9 @@ class ObjectIdField(BaseField): def to_mongo(self, value): if not isinstance(value, ObjectId): try: - return ObjectId(six.text_type(value)) + return ObjectId(str(value)) except Exception as e: - # e.message attribute has been deprecated since Python 2.6 - self.error(six.text_type(e)) + self.error(str(e)) return value def prepare_query_value(self, op, value): @@ -524,9 +518,9 @@ class ObjectIdField(BaseField): def validate(self, value): try: - ObjectId(six.text_type(value)) + ObjectId(str(value)) except Exception: - self.error("Invalid Object ID") + self.error("Invalid ObjectID") class GeoJsonBaseField(BaseField): diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index e4d26811..0d6e08be 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -1,7 +1,6 @@ import itertools import warnings -import six from six import iteritems, itervalues from mongoengine.base.common import _document_registry @@ -180,14 +179,15 @@ class DocumentMetaclass(type): # module continues to use im_func and im_self, so the code below # copies __func__ into im_func and __self__ into im_self for # classmethod objects in Document derived classes. - if six.PY3: - for val in new_class.__dict__.values(): - if isinstance(val, classmethod): - f = val.__get__(new_class) - if hasattr(f, "__func__") and not hasattr(f, "im_func"): - f.__dict__.update({"im_func": getattr(f, "__func__")}) - if hasattr(f, "__self__") and not hasattr(f, "im_self"): - f.__dict__.update({"im_self": getattr(f, "__self__")}) + # + # Relates to https://github.com/MongoEngine/mongoengine/issues/1107 + # for val in new_class.__dict__.values(): + # if isinstance(val, classmethod): + # f = val.__get__(new_class) + # if hasattr(f, "__func__") and not hasattr(f, "im_func"): + # f.__dict__.update({"im_func": getattr(f, "__func__")}) + # if hasattr(f, "__self__") and not hasattr(f, "im_self"): + # f.__dict__.update({"im_self": getattr(f, "__self__")}) # Handle delete rules for field in itervalues(new_class._fields): diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 4e0c60b0..3f754619 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -1,6 +1,5 @@ from pymongo import MongoClient, ReadPreference, uri_parser from pymongo.database import _check_name -import six __all__ = [ "DEFAULT_CONNECTION_NAME", @@ -39,8 +38,8 @@ def _check_db_name(name): """Check if a database name is valid. This functionality is copied from pymongo Database class constructor. """ - if not isinstance(name, six.string_types): - raise TypeError("name must be an instance of %s" % six.string_types) + if not isinstance(name, str): + raise TypeError("name must be an instance of %s" % str) elif name != "$external": _check_name(name) @@ -93,7 +92,7 @@ def _get_connection_settings( conn_host = conn_settings["host"] # Host can be a list or a string, so if string, force to a list. - if isinstance(conn_host, six.string_types): + if isinstance(conn_host, str): conn_host = [conn_host] resolved_hosts = [] @@ -148,7 +147,7 @@ def _get_connection_settings( # TODO simplify the code below once we drop support for # PyMongo v3.4. read_pf_mode = uri_options["readpreference"] - if isinstance(read_pf_mode, six.string_types): + if isinstance(read_pf_mode, str): read_pf_mode = read_pf_mode.lower() for preference in read_preferences: if ( diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 9e75f353..01cd5f36 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -30,7 +30,7 @@ class DeReference(object): :class:`~mongoengine.base.ComplexBaseField` :param get: A boolean determining if being called by __get__ """ - if items is None or isinstance(items, six.string_types): + if items is None or isinstance(items, str): return items # cheapest way to convert a queryset to a list @@ -274,9 +274,7 @@ class DeReference(object): (v["_ref"].collection, v["_ref"].id), v ) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - item_name = six.text_type("{0}.{1}.{2}").format( - name, k, field_name - ) + item_name = "{0}.{1}.{2}".format(name, k, field_name) data[k]._data[field_name] = self._attach_objects( v, depth, instance=instance, name=item_name ) diff --git a/mongoengine/document.py b/mongoengine/document.py index 6a7ced46..13541da8 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -44,7 +44,7 @@ def includes_cls(fields): """Helper function used for ensuring and comparing indexes.""" first_field = None if len(fields): - if isinstance(fields[0], six.string_types): + if isinstance(fields[0], str): first_field = fields[0] elif isinstance(fields[0], (list, tuple)) and len(fields[0]): first_field = fields[0][0] @@ -430,15 +430,15 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): except pymongo.errors.DuplicateKeyError as err: message = u"Tried to save duplicate unique keys (%s)" - raise NotUniqueError(message % six.text_type(err)) + raise NotUniqueError(message % err) except pymongo.errors.OperationFailure as err: message = "Could not save document (%s)" - if re.match("^E1100[01] duplicate key", six.text_type(err)): + if re.match("^E1100[01] duplicate key", str(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update message = u"Tried to save duplicate unique keys (%s)" - raise NotUniqueError(message % six.text_type(err)) - raise OperationError(message % six.text_type(err)) + raise NotUniqueError(message % err) + raise OperationError(message % err) # Make sure we store the PK on this document now that it's saved id_field = self._meta["id_field"] diff --git a/mongoengine/errors.py b/mongoengine/errors.py index b76243d3..1ac01257 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -1,6 +1,5 @@ from collections import defaultdict -import six from six import iteritems __all__ = ( @@ -93,7 +92,7 @@ class ValidationError(AssertionError): self.message = message def __str__(self): - return six.text_type(self.message) + return str(self.message) def __repr__(self): return "%s(%s,)" % (self.__class__.__name__, self.message) @@ -131,7 +130,7 @@ class ValidationError(AssertionError): elif isinstance(source, ValidationError) and source.errors: return build_dict(source.errors) else: - return six.text_type(source) + return str(source) return errors_dict diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 5cbf3ae0..b6ddd566 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -108,7 +108,7 @@ class StringField(BaseField): super(StringField, self).__init__(**kwargs) def to_python(self, value): - if isinstance(value, six.text_type): + if isinstance(value, str): return value try: value = value.decode("utf-8") @@ -117,7 +117,7 @@ class StringField(BaseField): return value def validate(self, value): - if not isinstance(value, six.string_types): + if not isinstance(value, str): self.error("StringField only accepts string values") if self.max_length is not None and len(value) > self.max_length: @@ -133,7 +133,7 @@ class StringField(BaseField): return None def prepare_query_value(self, op, value): - if not isinstance(op, six.string_types): + if not isinstance(op, str): return value if op in STRING_OPERATORS: @@ -472,13 +472,13 @@ class DecimalField(BaseField): if value is None: return value if self.force_string: - return six.text_type(self.to_python(value)) + return str(self.to_python(value)) return float(self.to_python(value)) def validate(self, value): if not isinstance(value, decimal.Decimal): - if not isinstance(value, six.string_types): - value = six.text_type(value) + if not isinstance(value, str): + value = str(value) try: value = decimal.Decimal(value) except (TypeError, ValueError, decimal.InvalidOperation) as exc: @@ -543,7 +543,7 @@ class DateTimeField(BaseField): if callable(value): return value() - if not isinstance(value, six.string_types): + if not isinstance(value, str): return None return self._parse_datetime(value) @@ -707,7 +707,7 @@ class EmbeddedDocumentField(BaseField): def __init__(self, document_type, **kwargs): # XXX ValidationError raised outside of the "validate" method. if not ( - isinstance(document_type, six.string_types) + isinstance(document_type, str) or issubclass(document_type, EmbeddedDocument) ): self.error( @@ -720,7 +720,7 @@ class EmbeddedDocumentField(BaseField): @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: resolved_document_type = self.owner_document else: @@ -846,7 +846,7 @@ class DynamicField(BaseField): """Convert a Python type to a MongoDB compatible type. """ - if isinstance(value, six.string_types): + if isinstance(value, str): return value if hasattr(value, "to_mongo"): @@ -889,7 +889,7 @@ class DynamicField(BaseField): return member_name def prepare_query_value(self, op, value): - if isinstance(value, six.string_types): + if isinstance(value, str): return StringField().prepare_query_value(op, value) return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value)) @@ -954,7 +954,7 @@ class ListField(ComplexBaseField): if ( op in ("set", "unset", None) and hasattr(value, "__iter__") - and not isinstance(value, six.string_types) + and not isinstance(value, str) and not isinstance(value, BaseDocument) ): return [self.field.prepare_query_value(op, v) for v in value] @@ -1026,9 +1026,7 @@ def key_not_string(d): dictionary is not a string. """ for k, v in d.items(): - if not isinstance(k, six.string_types) or ( - isinstance(v, dict) and key_not_string(v) - ): + if not isinstance(k, str) or (isinstance(v, dict) and key_not_string(v)): return True @@ -1107,7 +1105,7 @@ class DictField(ComplexBaseField): "iexact", ] - if op in match_operators and isinstance(value, six.string_types): + if op in match_operators and isinstance(value, str): return StringField().prepare_query_value(op, value) if hasattr( @@ -1194,7 +1192,7 @@ class ReferenceField(BaseField): :class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`. """ # XXX ValidationError raised outside of the "validate" method. - if not isinstance(document_type, six.string_types) and not issubclass( + if not isinstance(document_type, str) and not issubclass( document_type, Document ): self.error( @@ -1209,7 +1207,7 @@ class ReferenceField(BaseField): @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -1325,7 +1323,7 @@ class CachedReferenceField(BaseField): fields = [] # XXX ValidationError raised outside of the "validate" method. - if not isinstance(document_type, six.string_types) and not issubclass( + if not isinstance(document_type, str) and not issubclass( document_type, Document ): self.error( @@ -1370,7 +1368,7 @@ class CachedReferenceField(BaseField): @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -1498,7 +1496,7 @@ class GenericReferenceField(BaseField): # Keep the choices as a list of allowed Document class names if choices: for choice in choices: - if isinstance(choice, six.string_types): + if isinstance(choice, str): self.choices.append(choice) elif isinstance(choice, type) and issubclass(choice, Document): self.choices.append(choice._class_name) @@ -1507,7 +1505,7 @@ class GenericReferenceField(BaseField): # method. self.error( "Invalid choices provided: must be a list of" - "Document subclasses and/or six.string_typess" + "Document subclasses and/or str" ) def _validate_choices(self, value): @@ -1601,8 +1599,8 @@ class BinaryField(BaseField): def __set__(self, instance, value): """Handle bytearrays in python 3.1""" - if six.PY3 and isinstance(value, bytearray): - value = six.binary_type(value) + if isinstance(value, bytearray): + value = bytes(value) return super(BinaryField, self).__set__(instance, value) def to_mongo(self, value): @@ -1831,7 +1829,7 @@ class FileField(BaseField): key = self.name if ( hasattr(value, "read") and not isinstance(value, GridFSProxy) - ) or isinstance(value, (six.binary_type, six.string_types)): + ) or isinstance(value, (six.binary_type, str)): # using "FileField() = file/string" notation grid_file = instance._data.get(self.name) # If a file already exists, delete it @@ -2038,12 +2036,7 @@ class ImageField(FileField): for att_name, att in extra_args.items(): value = None if isinstance(att, (tuple, list)): - if six.PY3: - value = dict( - itertools.zip_longest(params_size, att, fillvalue=None) - ) - else: - value = dict(map(None, params_size, att)) + value = dict(itertools.zip_longest(params_size, att, fillvalue=None)) setattr(self, att_name, value) @@ -2213,8 +2206,8 @@ class UUIDField(BaseField): if not self._binary: original_value = value try: - if not isinstance(value, six.string_types): - value = six.text_type(value) + if not isinstance(value, str): + value = str(value) return uuid.UUID(value) except (ValueError, TypeError, AttributeError): return original_value @@ -2222,8 +2215,8 @@ class UUIDField(BaseField): def to_mongo(self, value): if not self._binary: - return six.text_type(value) - elif isinstance(value, six.string_types): + return str(value) + elif isinstance(value, str): return uuid.UUID(value) return value @@ -2234,7 +2227,7 @@ class UUIDField(BaseField): def validate(self, value): if not isinstance(value, uuid.UUID): - if not isinstance(value, six.string_types): + if not isinstance(value, str): value = str(value) try: uuid.UUID(value) @@ -2433,7 +2426,7 @@ class LazyReferenceField(BaseField): document. Note this only work getting field (not setting or deleting). """ # XXX ValidationError raised outside of the "validate" method. - if not isinstance(document_type, six.string_types) and not issubclass( + if not isinstance(document_type, str) and not issubclass( document_type, Document ): self.error( @@ -2449,7 +2442,7 @@ class LazyReferenceField(BaseField): @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index b7d0e0ab..8a068e2e 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -349,20 +349,20 @@ class BaseQuerySet(object): ) except pymongo.errors.DuplicateKeyError as err: message = "Could not save document (%s)" - raise NotUniqueError(message % six.text_type(err)) + raise NotUniqueError(message % err) except pymongo.errors.BulkWriteError as err: # inserting documents that already have an _id field will # give huge performance debt or raise message = u"Bulk write error: (%s)" - raise BulkWriteError(message % six.text_type(err.details)) + raise BulkWriteError(message % err.details) except pymongo.errors.OperationFailure as err: message = "Could not save document (%s)" - if re.match("^E1100[01] duplicate key", six.text_type(err)): + if re.match("^E1100[01] duplicate key", str(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update message = u"Tried to save duplicate unique keys (%s)" - raise NotUniqueError(message % six.text_type(err)) - raise OperationError(message % six.text_type(err)) + raise NotUniqueError(message % err) + raise OperationError(message % err) # Apply inserted_ids to documents for doc, doc_id in zip(docs, ids): @@ -534,12 +534,12 @@ class BaseQuerySet(object): elif result.raw_result: return result.raw_result["n"] except pymongo.errors.DuplicateKeyError as err: - raise NotUniqueError(u"Update failed (%s)" % six.text_type(err)) + raise NotUniqueError("Update failed (%s)" % err) except pymongo.errors.OperationFailure as err: - if six.text_type(err) == u"multi not coded yet": - message = u"update() method requires MongoDB 1.1.3+" + if str(err) == "multi not coded yet": + message = "update() method requires MongoDB 1.1.3+" raise OperationError(message) - raise OperationError(u"Update failed (%s)" % six.text_type(err)) + raise OperationError("Update failed (%s)" % err) def upsert_one(self, write_concern=None, **update): """Overwrite or add the first document matched by the query. @@ -1348,13 +1348,13 @@ class BaseQuerySet(object): map_f_scope = {} if isinstance(map_f, Code): map_f_scope = map_f.scope - map_f = six.text_type(map_f) + map_f = str(map_f) map_f = Code(queryset._sub_js_fields(map_f), map_f_scope) reduce_f_scope = {} if isinstance(reduce_f, Code): reduce_f_scope = reduce_f.scope - reduce_f = six.text_type(reduce_f) + reduce_f = str(reduce_f) reduce_f_code = queryset._sub_js_fields(reduce_f) reduce_f = Code(reduce_f_code, reduce_f_scope) @@ -1364,7 +1364,7 @@ class BaseQuerySet(object): finalize_f_scope = {} if isinstance(finalize_f, Code): finalize_f_scope = finalize_f.scope - finalize_f = six.text_type(finalize_f) + finalize_f = str(finalize_f) finalize_f_code = queryset._sub_js_fields(finalize_f) finalize_f = Code(finalize_f_code, finalize_f_scope) mr_args["finalize"] = finalize_f @@ -1380,7 +1380,7 @@ class BaseQuerySet(object): else: map_reduce_function = "map_reduce" - if isinstance(output, six.string_types): + if isinstance(output, str): mr_args["out"] = output elif isinstance(output, dict): @@ -1838,7 +1838,7 @@ class BaseQuerySet(object): field_parts = field.split(".") try: field = ".".join( - f if isinstance(f, six.string_types) else f.db_field + f if isinstance(f, str) else f.db_field for f in self._document._lookup_field(field_parts) ) db_field_paths.append(field) @@ -1850,7 +1850,7 @@ class BaseQuerySet(object): for subdoc in subclasses: try: subfield = ".".join( - f if isinstance(f, six.string_types) else f.db_field + f if isinstance(f, str) else f.db_field for f in subdoc._lookup_field(field_parts) ) db_field_paths.append(subfield) diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index c179f541..efbdae4e 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -101,7 +101,7 @@ def query(_doc_cls=None, **kwargs): cleaned_fields = [] for field in fields: append_field = True - if isinstance(field, six.string_types): + if isinstance(field, str): parts.append(field) append_field = False # is last and CachedReferenceField @@ -281,7 +281,7 @@ def update(_doc_cls=None, **update): appended_sub_field = False for field in fields: append_field = True - if isinstance(field, six.string_types): + if isinstance(field, str): # Convert the S operator to $ if field == "S": field = "$" diff --git a/tests/fields/test_date_field.py b/tests/fields/test_date_field.py index e94ed0ce..5e1bfaa4 100644 --- a/tests/fields/test_date_field.py +++ b/tests/fields/test_date_field.py @@ -89,17 +89,6 @@ class TestDateField(MongoDBTestCase): assert log.date == d1.date() assert log.date == d2.date() - if not six.PY3: - # Pre UTC dates microseconds below 1000 are dropped - # This does not seem to be true in PY3 - d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) - d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) - log.date = d1 - log.save() - log.reload() - assert log.date == d1.date() - assert log.date == d2.date() - def test_regular_usage(self): """Tests for regular datetime fields""" diff --git a/tests/fields/test_datetime_field.py b/tests/fields/test_datetime_field.py index 70debac5..21ab523b 100644 --- a/tests/fields/test_datetime_field.py +++ b/tests/fields/test_datetime_field.py @@ -98,17 +98,6 @@ class TestDateTimeField(MongoDBTestCase): assert log.date != d1 assert log.date == d2 - if not six.PY3: - # Pre UTC dates microseconds below 1000 are dropped - # This does not seem to be true in PY3 - d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999) - d2 = dt.datetime(1969, 12, 31, 23, 59, 59) - log.date = d1 - log.save() - log.reload() - assert log.date != d1 - assert log.date == d2 - def test_regular_usage(self): """Tests for regular datetime fields""" @@ -213,7 +202,7 @@ class TestDateTimeField(MongoDBTestCase): # make sure that passing a parsable datetime works dtd = DTDoc() dtd.date = date_str - assert isinstance(dtd.date, six.string_types) + assert isinstance(dtd.date, str) dtd.save() dtd.reload() diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 7dea6dd5..cf4cd0dc 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -4445,24 +4445,14 @@ class TestQueryset(unittest.TestCase): "A0" == "%s" % self.Person.objects.order_by("name").scalar("name").first() ) assert "A0" == "%s" % self.Person.objects.scalar("name").order_by("name")[0] - if six.PY3: - assert ( - "['A1', 'A2']" - == "%s" % self.Person.objects.order_by("age").scalar("name")[1:3] - ) - assert ( - "['A51', 'A52']" - == "%s" % self.Person.objects.order_by("age").scalar("name")[51:53] - ) - else: - assert ( - "[u'A1', u'A2']" - == "%s" % self.Person.objects.order_by("age").scalar("name")[1:3] - ) - assert ( - "[u'A51', u'A52']" - == "%s" % self.Person.objects.order_by("age").scalar("name")[51:53] - ) + assert ( + "['A1', 'A2']" + == "%s" % self.Person.objects.order_by("age").scalar("name")[1:3] + ) + assert ( + "['A51', 'A52']" + == "%s" % self.Person.objects.order_by("age").scalar("name")[51:53] + ) # with_id and in_bulk person = self.Person.objects.order_by("name").first() From 8086576677d21bb1508bcf1c4c78efb0091d8f3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 11 Mar 2020 23:07:03 +0100 Subject: [PATCH 173/216] get rid of six --- README.rst | 1 - docs/upgrade.rst | 2 +- mongoengine/base/datastructures.py | 5 ++-- mongoengine/base/document.py | 18 ++++++------- mongoengine/base/fields.py | 9 +++---- mongoengine/base/metaclasses.py | 16 +++++------ mongoengine/context_managers.py | 3 +-- mongoengine/dereference.py | 16 +++++------ mongoengine/document.py | 12 ++++----- mongoengine/errors.py | 9 +++---- mongoengine/fields.py | 16 +++++------ mongoengine/queryset/base.py | 12 ++++----- mongoengine/queryset/queryset.py | 10 +++---- mongoengine/queryset/transform.py | 4 +-- requirements.txt | 1 - setup.py | 2 +- tests/document/test_indexes.py | 25 +++++++++--------- tests/document/test_inheritance.py | 3 +-- tests/document/test_instance.py | 5 ++-- tests/fields/test_binary_field.py | 17 ++++++------ tests/fields/test_date_field.py | 1 - tests/fields/test_datetime_field.py | 1 - tests/fields/test_file_field.py | 41 +++++++++++++++-------------- tests/fields/test_float_field.py | 6 ++--- tests/fields/test_long_field.py | 6 ++--- tests/queryset/test_queryset.py | 4 +-- tests/test_datastructures.py | 3 +-- tests/test_dereference.py | 26 +++++++++--------- 28 files changed, 118 insertions(+), 156 deletions(-) diff --git a/README.rst b/README.rst index 1ef1363f..619970af 100644 --- a/README.rst +++ b/README.rst @@ -50,7 +50,6 @@ All of the dependencies can easily be installed via `pip ` At the very least, you'll need these two packages to use MongoEngine: - pymongo>=3.4 -- six>=1.10.0 If you utilize a ``DateTimeField``, you might also use a more flexible date parser: diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 250347bf..f25bab8f 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -153,7 +153,7 @@ inherited classes like so: :: # 4. Remove indexes info = collection.index_information() - indexes_to_drop = [key for key, value in info.iteritems() + indexes_to_drop = [key for key, value in info.items() if '_types' in dict(value['key'])] for index in indexes_to_drop: collection.drop_index(index) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index 2a2c7e7d..bb70089e 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -1,7 +1,6 @@ import weakref from bson import DBRef -from six import iteritems from mongoengine.common import _import_class from mongoengine.errors import DoesNotExist, MultipleObjectsReturned @@ -360,7 +359,7 @@ class StrictDict(object): _classes = {} def __init__(self, **kwargs): - for k, v in iteritems(kwargs): + for k, v in kwargs.items(): setattr(self, k, v) def __getitem__(self, key): @@ -408,7 +407,7 @@ class StrictDict(object): return (key for key in self.__slots__ if hasattr(self, key)) def __len__(self): - return len(list(iteritems(self))) + return len(list(self.items())) def __eq__(self, other): return list(self.items()) == list(other.items()) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 1254d042..dff759a7 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -5,8 +5,6 @@ from functools import partial from bson import DBRef, ObjectId, SON, json_util import pymongo -import six -from six import iteritems from mongoengine import signals from mongoengine.base.common import get_document @@ -110,7 +108,7 @@ class BaseDocument(object): # Assign default values to the instance. # We set default values only for fields loaded from DB. See # https://github.com/mongoengine/mongoengine/issues/399 for more info. - for key, field in iteritems(self._fields): + for key, field in self._fields.items(): if self._db_field_map.get(key, key) in __only_fields: continue value = getattr(self, key, None) @@ -122,14 +120,14 @@ class BaseDocument(object): # Set passed values after initialisation if self._dynamic: dynamic_data = {} - for key, value in iteritems(values): + for key, value in values.items(): if key in self._fields or key == "_id": setattr(self, key, value) else: dynamic_data[key] = value else: FileField = _import_class("FileField") - for key, value in iteritems(values): + for key, value in values.items(): key = self._reverse_db_field_map.get(key, key) if key in self._fields or key in ("id", "pk", "_cls"): if __auto_convert and value is not None: @@ -145,7 +143,7 @@ class BaseDocument(object): if self._dynamic: self._dynamic_lock = False - for key, value in iteritems(dynamic_data): + for key, value in dynamic_data.items(): setattr(self, key, value) # Flag initialised @@ -575,7 +573,7 @@ class BaseDocument(object): if not hasattr(data, "items"): iterator = enumerate(data) else: - iterator = iteritems(data) + iterator = data.items() for index_or_key, value in iterator: item_key = "%s%s." % (base_key, index_or_key) @@ -741,7 +739,7 @@ class BaseDocument(object): # Convert SON to a data dict, making sure each key is a string and # corresponds to the right db field. data = {} - for key, value in iteritems(son): + for key, value in son.items(): key = str(key) key = cls._db_field_map.get(key, key) data[key] = value @@ -756,7 +754,7 @@ class BaseDocument(object): if not _auto_dereference: fields = copy.deepcopy(fields) - for field_name, field in iteritems(fields): + for field_name, field in fields.items(): field._auto_dereference = _auto_dereference if field.db_field in data: value = data[field.db_field] @@ -781,7 +779,7 @@ class BaseDocument(object): # In STRICT documents, remove any keys that aren't in cls._fields if cls.STRICT: - data = {k: v for k, v in iteritems(data) if k in cls._fields} + data = {k: v for k, v in data.items() if k in cls._fields} obj = cls( __auto_convert=False, _created=created, __only_fields=only_fields, **data diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 3c88149b..ac894d91 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -4,7 +4,6 @@ import weakref from bson import DBRef, ObjectId, SON import pymongo -from six import iteritems from mongoengine.base.common import UPDATE_OPERATORS from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList @@ -418,11 +417,11 @@ class ComplexBaseField(BaseField): if self.field: value_dict = { key: self.field._to_mongo_safe_call(item, use_db_field, fields) - for key, item in iteritems(value) + for key, item in value.items() } else: value_dict = {} - for k, v in iteritems(value): + for k, v in value.items(): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: @@ -461,8 +460,8 @@ class ComplexBaseField(BaseField): """If field is provided ensure the value is valid.""" errors = {} if self.field: - if hasattr(value, "iteritems") or hasattr(value, "items"): - sequence = iteritems(value) + if hasattr(value, "items"): + sequence = value.items() else: sequence = enumerate(value) for k, v in sequence: diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 0d6e08be..30a6fbab 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -1,8 +1,6 @@ import itertools import warnings -from six import iteritems, itervalues - from mongoengine.base.common import _document_registry from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField from mongoengine.common import _import_class @@ -68,7 +66,7 @@ class DocumentMetaclass(type): # Standard object mixin - merge in any Fields if not hasattr(base, "_meta"): base_fields = {} - for attr_name, attr_value in iteritems(base.__dict__): + for attr_name, attr_value in base.__dict__.items(): if not isinstance(attr_value, BaseField): continue attr_value.name = attr_name @@ -80,7 +78,7 @@ class DocumentMetaclass(type): # Discover any document fields field_names = {} - for attr_name, attr_value in iteritems(attrs): + for attr_name, attr_value in attrs.items(): if not isinstance(attr_value, BaseField): continue attr_value.name = attr_name @@ -110,9 +108,7 @@ class DocumentMetaclass(type): attrs["_fields_ordered"] = tuple( i[1] - for i in sorted( - (v.creation_counter, v.name) for v in itervalues(doc_fields) - ) + for i in sorted((v.creation_counter, v.name) for v in doc_fields.values()) ) # @@ -190,7 +186,7 @@ class DocumentMetaclass(type): # f.__dict__.update({"im_self": getattr(f, "__self__")}) # Handle delete rules - for field in itervalues(new_class._fields): + for field in new_class._fields.values(): f = field if f.owner_document is None: f.owner_document = new_class @@ -399,7 +395,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): new_class.objects = QuerySetManager() # Validate the fields and set primary key if needed - for field_name, field in iteritems(new_class._fields): + for field_name, field in new_class._fields.items(): if field.primary_key: # Ensure only one primary key is set current_pk = new_class._meta.get("id_field") @@ -476,7 +472,7 @@ class MetaDict(dict): _merge_options = ("indexes",) def merge(self, new_options): - for k, v in iteritems(new_options): + for k, v in new_options.items(): if k in self._merge_options: self[k] = self.get(k, []) + v else: diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 1592ceef..5e9a6e8b 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -1,7 +1,6 @@ from contextlib import contextmanager from pymongo.write_concern import WriteConcern -from six import iteritems from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db @@ -123,7 +122,7 @@ class no_dereference(object): self.deref_fields = [ k - for k, v in iteritems(self.cls._fields) + for k, v in self.cls._fields.items() if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField)) ] diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 01cd5f36..d0e6c527 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -1,6 +1,4 @@ from bson import DBRef, SON -import six -from six import iteritems from mongoengine.base import ( BaseDict, @@ -79,7 +77,7 @@ class DeReference(object): def _get_items_from_dict(items): new_items = {} - for k, v in iteritems(items): + for k, v in items.items(): value = v if isinstance(v, list): value = _get_items_from_list(v) @@ -120,7 +118,7 @@ class DeReference(object): depth += 1 for item in iterator: if isinstance(item, (Document, EmbeddedDocument)): - for field_name, field in iteritems(item._fields): + for field_name, field in item._fields.items(): v = item._data.get(field_name, None) if isinstance(v, LazyReference): # LazyReference inherits DBRef but should not be dereferenced here ! @@ -136,7 +134,7 @@ class DeReference(object): getattr(field, "field", None), "document_type", None ) references = self._find_references(v, depth) - for key, refs in iteritems(references): + for key, refs in references.items(): if isinstance( field_cls, (Document, TopLevelDocumentMetaclass) ): @@ -153,7 +151,7 @@ class DeReference(object): ) elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: references = self._find_references(item, depth - 1) - for key, refs in iteritems(references): + for key, refs in references.items(): reference_map.setdefault(key, set()).update(refs) return reference_map @@ -162,7 +160,7 @@ class DeReference(object): """Fetch all references and convert to their document objects """ object_map = {} - for collection, dbrefs in iteritems(self.reference_map): + for collection, dbrefs in self.reference_map.items(): # we use getattr instead of hasattr because hasattr swallows any exception under python2 # so it could hide nasty things without raising exceptions (cfr bug #1688)) @@ -174,7 +172,7 @@ class DeReference(object): dbref for dbref in dbrefs if (col_name, dbref) not in object_map ] references = collection.objects.in_bulk(refs) - for key, doc in iteritems(references): + for key, doc in references.items(): object_map[(col_name, key)] = doc else: # Generic reference: use the refs data to convert to document if isinstance(doc_type, (ListField, DictField, MapField)): @@ -250,7 +248,7 @@ class DeReference(object): data = [] else: is_list = False - iterator = iteritems(items) + iterator = items.items() data = {} depth += 1 diff --git a/mongoengine/document.py b/mongoengine/document.py index 13541da8..4ac979d1 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -4,8 +4,6 @@ import warnings from bson.dbref import DBRef import pymongo from pymongo.read_preferences import ReadPreference -import six -from six import iteritems from mongoengine import signals from mongoengine.base import ( @@ -55,7 +53,7 @@ class InvalidCollectionError(Exception): pass -class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): +class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass): """A :class:`~mongoengine.Document` that isn't stored in its own collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as fields on :class:`~mongoengine.Document`\ s through the @@ -103,7 +101,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): return data -class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): +class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): """The base class used for defining the structure and properties of collections of documents stored in MongoDB. Inherit from this class, and add fields as class attributes to define a document's structure. @@ -632,7 +630,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): # Delete FileFields separately FileField = _import_class("FileField") - for name, field in iteritems(self._fields): + for name, field in self._fields.items(): if isinstance(field, FileField): getattr(self, name).delete() @@ -1029,7 +1027,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): return {"missing": missing, "extra": extra} -class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): +class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass): """A Dynamic Document class allowing flexible, expandable and uncontrolled schemas. As a :class:`~mongoengine.Document` subclass, acts in the same way as an ordinary document but has expanded style properties. Any data @@ -1060,7 +1058,7 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): super(DynamicDocument, self).__delattr__(*args, **kwargs) -class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)): +class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass): """A Dynamic Embedded Document class allowing flexible, expandable and uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more information about dynamic documents. diff --git a/mongoengine/errors.py b/mongoengine/errors.py index 1ac01257..7045145c 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -1,6 +1,5 @@ from collections import defaultdict -from six import iteritems __all__ = ( "NotRegistered", @@ -125,7 +124,7 @@ class ValidationError(AssertionError): def build_dict(source): errors_dict = {} if isinstance(source, dict): - for field_name, error in iteritems(source): + for field_name, error in source.items(): errors_dict[field_name] = build_dict(error) elif isinstance(source, ValidationError) and source.errors: return build_dict(source.errors) @@ -146,15 +145,15 @@ class ValidationError(AssertionError): if isinstance(value, list): value = " ".join([generate_key(k) for k in value]) elif isinstance(value, dict): - value = " ".join([generate_key(v, k) for k, v in iteritems(value)]) + value = " ".join([generate_key(v, k) for k, v in value.items()]) results = "%s.%s" % (prefix, value) if prefix else value return results error_dict = defaultdict(list) - for k, v in iteritems(self.to_dict()): + for k, v in self.to_dict().items(): error_dict[generate_key(v)].append(k) - return " ".join(["%s: %s" % (k, v) for k, v in iteritems(error_dict)]) + return " ".join(["%s: %s" % (k, v) for k, v in error_dict.items()]) class DeprecatedError(Exception): diff --git a/mongoengine/fields.py b/mongoengine/fields.py index b6ddd566..391ad37b 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -13,8 +13,6 @@ from bson.int64 import Int64 import gridfs import pymongo from pymongo import ReturnDocument -import six -from six import iteritems try: import dateutil @@ -205,7 +203,7 @@ class EmailField(StringField): ) UTF8_USER_REGEX = LazyRegexCompiler( - six.u( + ( # RFC 6531 Section 3.3 extends `atext` (used by dot-atom) to # include `UTF8-non-ascii`. r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+)*\Z" @@ -387,7 +385,7 @@ class FloatField(BaseField): return value def validate(self, value): - if isinstance(value, six.integer_types): + if isinstance(value, int): try: value = float(value) except OverflowError: @@ -868,12 +866,12 @@ class DynamicField(BaseField): value = {k: v for k, v in enumerate(value)} data = {} - for k, v in iteritems(value): + for k, v in value.items(): data[k] = self.to_mongo(v, use_db_field, fields) value = data if is_list: # Convert back to a list - value = [v for k, v in sorted(iteritems(data), key=itemgetter(0))] + value = [v for k, v in sorted(data.items(), key=itemgetter(0))] return value def to_python(self, value): @@ -1607,10 +1605,10 @@ class BinaryField(BaseField): return Binary(value) def validate(self, value): - if not isinstance(value, (six.binary_type, Binary)): + if not isinstance(value, (bytes, Binary)): self.error( "BinaryField only accepts instances of " - "(%s, %s, Binary)" % (six.binary_type.__name__, Binary.__name__) + "(%s, %s, Binary)" % (bytes.__name__, Binary.__name__) ) if self.max_bytes is not None and len(value) > self.max_bytes: @@ -1829,7 +1827,7 @@ class FileField(BaseField): key = self.name if ( hasattr(value, "read") and not isinstance(value, GridFSProxy) - ) or isinstance(value, (six.binary_type, str)): + ) or isinstance(value, (bytes, str)): # using "FileField() = file/string" notation grid_file = instance._data.get(self.name) # If a file already exists, delete it diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 8a068e2e..e6901100 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -9,8 +9,6 @@ import pymongo import pymongo.errors from pymongo.collection import ReturnDocument from pymongo.common import validate_read_preference -import six -from six import iteritems from mongoengine import signals from mongoengine.base import get_document @@ -252,12 +250,12 @@ class BaseQuerySet(object): queryset = queryset.filter(*q_objs, **query) try: - result = six.next(queryset) + result = next(queryset) except StopIteration: msg = "%s matching query does not exist." % queryset._document._class_name raise queryset._document.DoesNotExist(msg) try: - six.next(queryset) + next(queryset) except StopIteration: return result @@ -1567,7 +1565,7 @@ class BaseQuerySet(object): if self._limit == 0 or self._none: raise StopIteration - raw_doc = six.next(self._cursor) + raw_doc = next(self._cursor) if self._as_pymongo: return raw_doc @@ -1812,13 +1810,13 @@ class BaseQuerySet(object): } """ total, data, types = self.exec_js(freq_func, field) - values = {types.get(k): int(v) for k, v in iteritems(data)} + values = {types.get(k): int(v) for k, v in data.items()} if normalize: values = {k: float(v) / total for k, v in values.items()} frequencies = {} - for k, v in iteritems(values): + for k, v in values.items(): if isinstance(k, float): if int(k) == k: k = int(k) diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 4ba62d46..39b09c9d 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1,5 +1,3 @@ -import six - from mongoengine.errors import OperationError from mongoengine.queryset.base import ( BaseQuerySet, @@ -127,8 +125,8 @@ class QuerySet(BaseQuerySet): # Pull in ITER_CHUNK_SIZE docs from the database and store them in # the result cache. try: - for _ in six.moves.range(ITER_CHUNK_SIZE): - self._result_cache.append(six.next(self)) + for _ in range(ITER_CHUNK_SIZE): + self._result_cache.append(next(self)) except StopIteration: # Getting this exception means there are no more docs in the # db cursor. Set _has_more to False so that we can use that @@ -180,9 +178,9 @@ class QuerySetNoCache(BaseQuerySet): return ".. queryset mid-iteration .." data = [] - for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): + for _ in range(REPR_OUTPUT_SIZE + 1): try: - data.append(six.next(self)) + data.append(next(self)) except StopIteration: break diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index efbdae4e..1202ec45 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -3,8 +3,6 @@ from collections import defaultdict from bson import ObjectId, SON from bson.dbref import DBRef import pymongo -import six -from six import iteritems from mongoengine.base import UPDATE_OPERATORS from mongoengine.common import _import_class @@ -180,7 +178,7 @@ def query(_doc_cls=None, **kwargs): "$near" in value_dict or "$nearSphere" in value_dict ): value_son = SON() - for k, v in iteritems(value_dict): + for k, v in value_dict.items(): if k == "$maxDistance" or k == "$minDistance": continue value_son[k] = v diff --git a/requirements.txt b/requirements.txt index 43e5261b..0ce39f74 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,3 @@ pymongo>=3.4 -six==1.10.0 Sphinx==1.5.5 sphinx-rtd-theme==0.2.4 diff --git a/setup.py b/setup.py index 6873284a..b60188c7 100644 --- a/setup.py +++ b/setup.py @@ -145,7 +145,7 @@ setup( platforms=["any"], classifiers=CLASSIFIERS, python_requires=">=3.5", - install_requires=["pymongo>=3.4, <4.0", "six>=1.10.0"], + install_requires=["pymongo>=3.4, <4.0"], cmdclass={"test": PyTest}, **extra_opts ) diff --git a/tests/document/test_indexes.py b/tests/document/test_indexes.py index be857b59..4179a026 100644 --- a/tests/document/test_indexes.py +++ b/tests/document/test_indexes.py @@ -5,7 +5,6 @@ from datetime import datetime from pymongo.collation import Collation from pymongo.errors import OperationFailure import pytest -from six import iteritems from mongoengine import * from mongoengine.connection import get_db @@ -59,7 +58,7 @@ class TestIndexes(unittest.TestCase): info = BlogPost.objects._collection.index_information() # _id, '-date', 'tags', ('cat', 'date') assert len(info) == 4 - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] for expected in expected_specs: assert expected["fields"] in info @@ -87,7 +86,7 @@ class TestIndexes(unittest.TestCase): # the indices on -date and tags will both contain # _cls as first element in the key assert len(info) == 4 - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] for expected in expected_specs: assert expected["fields"] in info @@ -102,7 +101,7 @@ class TestIndexes(unittest.TestCase): ExtendedBlogPost.ensure_indexes() info = ExtendedBlogPost.objects._collection.index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] for expected in expected_specs: assert expected["fields"] in info @@ -192,7 +191,7 @@ class TestIndexes(unittest.TestCase): # Indexes are lazy so use list() to perform query list(Person.objects) info = Person.objects._collection.index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("rank.title", 1)] in info def test_explicit_geo2d_index(self): @@ -207,7 +206,7 @@ class TestIndexes(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("location.point", "2d")] in info def test_explicit_geo2d_index_embedded(self): @@ -227,7 +226,7 @@ class TestIndexes(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("current.location.point", "2d")] in info def test_explicit_geosphere_index(self): @@ -244,7 +243,7 @@ class TestIndexes(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("location.point", "2dsphere")] in info def test_explicit_geohaystack_index(self): @@ -266,7 +265,7 @@ class TestIndexes(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("location.point", "geoHaystack")] in info def test_create_geohaystack_index(self): @@ -279,7 +278,7 @@ class TestIndexes(unittest.TestCase): Place.create_index({"fields": (")location.point", "name")}, bucketSize=10) info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("location.point", "geoHaystack"), ("name", 1)] in info def test_dictionary_indexes(self): @@ -308,7 +307,7 @@ class TestIndexes(unittest.TestCase): info = BlogPost.objects._collection.index_information() info = [ (value["key"], value.get("unique", False), value.get("sparse", False)) - for key, value in iteritems(info) + for key, value in info.items() ] assert ([("addDate", -1)], True, True) in info @@ -901,7 +900,7 @@ class TestIndexes(unittest.TestCase): self.fail("Unbound local error at index + pk definition") info = BlogPost.objects._collection.index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] index_item = [("_id", 1), ("comments.comment_id", 1)] assert index_item in info @@ -942,7 +941,7 @@ class TestIndexes(unittest.TestCase): meta = {"indexes": ["provider_ids.foo", "provider_ids.bar"]} info = MyDoc.objects._collection.index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("provider_ids.foo", 1)] in info assert [("provider_ids.bar", 1)] in info diff --git a/tests/document/test_inheritance.py b/tests/document/test_inheritance.py index 5072f841..0107c0a6 100644 --- a/tests/document/test_inheritance.py +++ b/tests/document/test_inheritance.py @@ -3,7 +3,6 @@ import unittest import warnings import pytest -from six import iteritems from mongoengine import ( BooleanField, @@ -550,7 +549,7 @@ class TestInheritance(MongoDBTestCase): class Human(Mammal): pass - for k, v in iteritems(defaults): + for k, v in defaults.items(): for cls in [Animal, Fish, Guppy]: assert cls._meta[k] == v diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index a5c21323..920bf392 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -10,7 +10,6 @@ import bson from bson import DBRef, ObjectId from pymongo.errors import DuplicateKeyError import pytest -from six import iteritems from mongoengine import * from mongoengine import signals @@ -3274,7 +3273,7 @@ class TestDocumentInstance(MongoDBTestCase): def expand(self): self.flattened_parameter = {} - for parameter_name, parameter in iteritems(self.parameters): + for parameter_name, parameter in self.parameters.items(): parameter.expand() class NodesSystem(Document): @@ -3282,7 +3281,7 @@ class TestDocumentInstance(MongoDBTestCase): nodes = MapField(ReferenceField(Node, dbref=False)) def save(self, *args, **kwargs): - for node_name, node in iteritems(self.nodes): + for node_name, node in self.nodes.items(): node.expand() node.save(*args, **kwargs) super(NodesSystem, self).save(*args, **kwargs) diff --git a/tests/fields/test_binary_field.py b/tests/fields/test_binary_field.py index a653b961..a9c0c7e5 100644 --- a/tests/fields/test_binary_field.py +++ b/tests/fields/test_binary_field.py @@ -3,13 +3,12 @@ import uuid from bson import Binary import pytest -import six from mongoengine import * from tests.utils import MongoDBTestCase -BIN_VALUE = six.b( - "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5" +BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode( + "latin-1" ) @@ -22,7 +21,7 @@ class TestBinaryField(MongoDBTestCase): content_type = StringField() blob = BinaryField() - BLOB = six.b("\xe6\x00\xc4\xff\x07") + BLOB = "\xe6\x00\xc4\xff\x07".encode("latin-1") MIME_TYPE = "application/octet-stream" Attachment.drop_collection() @@ -32,7 +31,7 @@ class TestBinaryField(MongoDBTestCase): attachment_1 = Attachment.objects().first() assert MIME_TYPE == attachment_1.content_type - assert BLOB == six.binary_type(attachment_1.blob) + assert BLOB == bytes(attachment_1.blob) def test_validation_succeeds(self): """Ensure that valid values can be assigned to binary fields. @@ -47,11 +46,11 @@ class TestBinaryField(MongoDBTestCase): attachment_required = AttachmentRequired() with pytest.raises(ValidationError): attachment_required.validate() - attachment_required.blob = Binary(six.b("\xe6\x00\xc4\xff\x07")) + attachment_required.blob = Binary("\xe6\x00\xc4\xff\x07".encode("latin-1")) attachment_required.validate() - _5_BYTES = six.b("\xe6\x00\xc4\xff\x07") - _4_BYTES = six.b("\xe6\x00\xc4\xff") + _5_BYTES = "\xe6\x00\xc4\xff\x07".encode("latin-1") + _4_BYTES = "\xe6\x00\xc4\xff".encode("latin-1") with pytest.raises(ValidationError): AttachmentSizeLimit(blob=_5_BYTES).validate() AttachmentSizeLimit(blob=_4_BYTES).validate() @@ -133,7 +132,7 @@ class TestBinaryField(MongoDBTestCase): MyDocument.drop_collection() - bin_data = six.b("\xe6\x00\xc4\xff\x07") + bin_data = "\xe6\x00\xc4\xff\x07".encode("latin-1") doc = MyDocument(bin_field=bin_data).save() n_updated = MyDocument.objects(bin_field=bin_data).update_one( diff --git a/tests/fields/test_date_field.py b/tests/fields/test_date_field.py index 5e1bfaa4..42a4b7f1 100644 --- a/tests/fields/test_date_field.py +++ b/tests/fields/test_date_field.py @@ -2,7 +2,6 @@ import datetime import pytest -import six try: import dateutil diff --git a/tests/fields/test_datetime_field.py b/tests/fields/test_datetime_field.py index 21ab523b..48936af7 100644 --- a/tests/fields/test_datetime_field.py +++ b/tests/fields/test_datetime_field.py @@ -2,7 +2,6 @@ import datetime as dt import pytest -import six try: import dateutil diff --git a/tests/fields/test_file_field.py b/tests/fields/test_file_field.py index 5ab6c93f..cbac9b69 100644 --- a/tests/fields/test_file_field.py +++ b/tests/fields/test_file_field.py @@ -7,7 +7,6 @@ from io import BytesIO import gridfs import pytest -import six from mongoengine import * from mongoengine.connection import get_db @@ -58,7 +57,7 @@ class TestFileField(MongoDBTestCase): PutFile.drop_collection() - text = six.b("Hello, World!") + text = "Hello, World!".encode("latin-1") content_type = "text/plain" putfile = PutFile() @@ -101,8 +100,8 @@ class TestFileField(MongoDBTestCase): StreamFile.drop_collection() - text = six.b("Hello, World!") - more_text = six.b("Foo Bar") + text = "Hello, World!".encode("latin-1") + more_text = "Foo Bar".encode("latin-1") content_type = "text/plain" streamfile = StreamFile() @@ -137,8 +136,8 @@ class TestFileField(MongoDBTestCase): StreamFile.drop_collection() - text = six.b("Hello, World!") - more_text = six.b("Foo Bar") + text = "Hello, World!".encode("latin-1") + more_text = "Foo Bar".encode("latin-1") streamfile = StreamFile() streamfile.save() @@ -167,8 +166,8 @@ class TestFileField(MongoDBTestCase): class SetFile(Document): the_file = FileField() - text = six.b("Hello, World!") - more_text = six.b("Foo Bar") + text = "Hello, World!".encode("latin-1") + more_text = "Foo Bar".encode("latin-1") SetFile.drop_collection() @@ -196,7 +195,7 @@ class TestFileField(MongoDBTestCase): GridDocument.drop_collection() with tempfile.TemporaryFile() as f: - f.write(six.b("Hello World!")) + f.write("Hello World!".encode("latin-1")) f.flush() # Test without default @@ -213,7 +212,7 @@ class TestFileField(MongoDBTestCase): assert doc_b.the_file.grid_id == doc_c.the_file.grid_id # Test with default - doc_d = GridDocument(the_file=six.b("")) + doc_d = GridDocument(the_file="".encode("latin-1")) doc_d.save() doc_e = GridDocument.objects.with_id(doc_d.id) @@ -240,7 +239,7 @@ class TestFileField(MongoDBTestCase): # First instance test_file = TestFile() test_file.name = "Hello, World!" - test_file.the_file.put(six.b("Hello, World!")) + test_file.the_file.put("Hello, World!".encode("latin-1")) test_file.save() # Second instance @@ -297,7 +296,9 @@ class TestFileField(MongoDBTestCase): test_file = TestFile() assert not bool(test_file.the_file) - test_file.the_file.put(six.b("Hello, World!"), content_type="text/plain") + test_file.the_file.put( + "Hello, World!".encode("latin-1"), content_type="text/plain" + ) test_file.save() assert bool(test_file.the_file) @@ -319,7 +320,7 @@ class TestFileField(MongoDBTestCase): class TestFile(Document): the_file = FileField() - text = six.b("Hello, World!") + text = "Hello, World!".encode("latin-1") content_type = "text/plain" testfile = TestFile() @@ -363,7 +364,7 @@ class TestFileField(MongoDBTestCase): testfile.the_file.put(text, content_type=content_type, filename="hello") testfile.save() - text = six.b("Bonjour, World!") + text = "Bonjour, World!".encode("latin-1") testfile.the_file.replace(text, content_type=content_type, filename="hello") testfile.save() @@ -387,7 +388,7 @@ class TestFileField(MongoDBTestCase): TestImage.drop_collection() with tempfile.TemporaryFile() as f: - f.write(six.b("Hello World!")) + f.write("Hello World!".encode("latin-1")) f.flush() t = TestImage() @@ -503,21 +504,21 @@ class TestFileField(MongoDBTestCase): # First instance test_file = TestFile() test_file.name = "Hello, World!" - test_file.the_file.put(six.b("Hello, World!"), name="hello.txt") + test_file.the_file.put("Hello, World!".encode("latin-1"), name="hello.txt") test_file.save() data = get_db("test_files").macumba.files.find_one() assert data.get("name") == "hello.txt" test_file = TestFile.objects.first() - assert test_file.the_file.read() == six.b("Hello, World!") + assert test_file.the_file.read() == "Hello, World!".encode("latin-1") test_file = TestFile.objects.first() - test_file.the_file = six.b("HELLO, WORLD!") + test_file.the_file = "Hello, World!".encode("latin-1") test_file.save() test_file = TestFile.objects.first() - assert test_file.the_file.read() == six.b("HELLO, WORLD!") + assert test_file.the_file.read() == "Hello, World!".encode("latin-1") def test_copyable(self): class PutFile(Document): @@ -525,7 +526,7 @@ class TestFileField(MongoDBTestCase): PutFile.drop_collection() - text = six.b("Hello, World!") + text = "Hello, World!".encode("latin-1") content_type = "text/plain" putfile = PutFile() diff --git a/tests/fields/test_float_field.py b/tests/fields/test_float_field.py index a1cd7a0a..839494a9 100644 --- a/tests/fields/test_float_field.py +++ b/tests/fields/test_float_field.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- import pytest -import six from mongoengine import * @@ -52,9 +51,8 @@ class TestFloatField(MongoDBTestCase): big_person = BigPerson() - for value, value_type in enumerate(six.integer_types): - big_person.height = value_type(value) - big_person.validate() + big_person.height = int(0) + big_person.validate() big_person.height = 2 ** 500 big_person.validate() diff --git a/tests/fields/test_long_field.py b/tests/fields/test_long_field.py index b39a714c..330051c3 100644 --- a/tests/fields/test_long_field.py +++ b/tests/fields/test_long_field.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- -import pytest from bson.int64 import Int64 -import six +import pytest from mongoengine import * from mongoengine.connection import get_db @@ -24,7 +22,7 @@ class TestLongField(MongoDBTestCase): assert isinstance( db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64 ) - assert isinstance(doc.some_long, six.integer_types) + assert isinstance(doc.some_long, int) def test_long_validation(self): """Ensure that invalid values cannot be assigned to long fields. diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index cf4cd0dc..fac6dfeb 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -10,8 +10,6 @@ import pymongo from pymongo.read_preferences import ReadPreference from pymongo.results import UpdateResult import pytest -import six -from six import iteritems from mongoengine import * from mongoengine.connection import get_db @@ -4093,7 +4091,7 @@ class TestQueryset(unittest.TestCase): info = Comment.objects._collection.index_information() info = [ (value["key"], value.get("unique", False), value.get("sparse", False)) - for key, value in iteritems(info) + for key, value in info.items() ] assert ([("_cls", 1), ("message", 1)], False, False) in info diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 734061ed..6d432e32 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -1,7 +1,6 @@ import unittest import pytest -from six import iterkeys from mongoengine import Document from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict @@ -372,7 +371,7 @@ class TestStrictDict(unittest.TestCase): def test_iterkeys(self): d = self.dtype(a=1) - assert list(iterkeys(d)) == ["a"] + assert list(d.keys()) == ["a"] def test_len(self): d = self.dtype(a=1) diff --git a/tests/test_dereference.py b/tests/test_dereference.py index b9d92883..0f9f412c 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -2,10 +2,8 @@ import unittest from bson import DBRef, ObjectId -from six import iteritems from mongoengine import * -from mongoengine.connection import get_db from mongoengine.context_managers import query_counter @@ -739,7 +737,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, User) # Document select_related @@ -752,7 +750,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, User) # Queryset select_related @@ -766,7 +764,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, User) User.drop_collection() @@ -820,7 +818,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ # Document select_related @@ -836,7 +834,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ # Queryset select_related @@ -853,7 +851,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ Group.objects.delete() @@ -910,7 +908,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, UserA) # Document select_related @@ -926,7 +924,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, UserA) # Queryset select_related @@ -943,7 +941,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, UserA) UserA.drop_collection() @@ -997,7 +995,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ # Document select_related @@ -1013,7 +1011,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ # Queryset select_related @@ -1030,7 +1028,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ Group.objects.delete() From b234aa48e4c17ac056afde75e43f06ef8f8200a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 11 Mar 2020 23:21:38 +0100 Subject: [PATCH 174/216] run pyupgrade --- mongoengine/base/datastructures.py | 18 ++++++++++-------- mongoengine/base/document.py | 26 +++++++++++++------------- mongoengine/base/fields.py | 6 ++++-- mongoengine/base/metaclasses.py | 21 ++------------------- mongoengine/connection.py | 2 +- mongoengine/dereference.py | 4 ++-- mongoengine/document.py | 2 +- mongoengine/errors.py | 8 ++++---- mongoengine/fields.py | 20 ++++++++++++-------- mongoengine/queryset/field_list.py | 2 +- mongoengine/queryset/transform.py | 4 +++- setup.py | 6 +----- 12 files changed, 54 insertions(+), 65 deletions(-) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index bb70089e..86fde15e 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -67,11 +67,11 @@ class BaseDict(dict): if isinstance(value, EmbeddedDocument) and value._instance is None: value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): - value = BaseDict(value, None, "%s.%s" % (self._name, key)) + value = BaseDict(value, None, "{}.{}".format(self._name, key)) super(BaseDict, self).__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): - value = BaseList(value, None, "%s.%s" % (self._name, key)) + value = BaseList(value, None, "{}.{}".format(self._name, key)) super(BaseDict, self).__setitem__(key, value) value._instance = self._instance return value @@ -97,7 +97,7 @@ class BaseDict(dict): def _mark_as_changed(self, key=None): if hasattr(self._instance, "_mark_as_changed"): if key: - self._instance._mark_as_changed("%s.%s" % (self._name, key)) + self._instance._mark_as_changed("{}.{}".format(self._name, key)) else: self._instance._mark_as_changed(self._name) @@ -133,12 +133,12 @@ class BaseList(list): value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): # Replace dict by BaseDict - value = BaseDict(value, None, "%s.%s" % (self._name, key)) + value = BaseDict(value, None, "{}.{}".format(self._name, key)) super(BaseList, self).__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): # Replace list by BaseList - value = BaseList(value, None, "%s.%s" % (self._name, key)) + value = BaseList(value, None, "{}.{}".format(self._name, key)) super(BaseList, self).__setitem__(key, value) value._instance = self._instance return value @@ -181,7 +181,9 @@ class BaseList(list): def _mark_as_changed(self, key=None): if hasattr(self._instance, "_mark_as_changed"): if key: - self._instance._mark_as_changed("%s.%s" % (self._name, key % len(self))) + self._instance._mark_as_changed( + "{}.{}".format(self._name, key % len(self)) + ) else: self._instance._mark_as_changed(self._name) @@ -428,7 +430,7 @@ class StrictDict(object): def __repr__(self): return "{%s}" % ", ".join( - '"{0!s}": {1!r}'.format(k, v) for k, v in self.items() + '"{!s}": {!r}'.format(k, v) for k, v in self.items() ) cls._classes[allowed_keys] = SpecificStrictDict @@ -473,4 +475,4 @@ class LazyReference(DBRef): raise AttributeError() def __repr__(self): - return "" % (self.document_type, self.pk) + return "".format(self.document_type, self.pk) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index dff759a7..83c12a96 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -93,7 +93,7 @@ class BaseDocument(object): list(self._fields.keys()) + ["id", "pk", "_cls", "_text_score"] ) if _undefined_fields: - msg = ('The fields "{0}" do not exist on the document "{1}"').format( + msg = ('The fields "{}" do not exist on the document "{}"').format( _undefined_fields, self._class_name ) raise FieldDoesNotExist(msg) @@ -286,7 +286,7 @@ class BaseDocument(object): except (UnicodeEncodeError, UnicodeDecodeError): u = "[Bad Unicode data]" repr_type = str if u is None else type(u) - return repr_type("<%s: %s>" % (self.__class__.__name__, u)) + return repr_type("<{}: {}>".format(self.__class__.__name__, u)) def __str__(self): # TODO this could be simpler? @@ -441,7 +441,7 @@ class BaseDocument(object): pk = self.pk elif self._instance and hasattr(self._instance, "pk"): pk = self._instance.pk - message = "ValidationError (%s:%s) " % (self._class_name, pk) + message = "ValidationError ({}:{}) ".format(self._class_name, pk) raise ValidationError(message, errors=errors) def to_json(self, *args, **kwargs): @@ -514,7 +514,7 @@ class BaseDocument(object): if "." in key: key, rest = key.split(".", 1) key = self._db_field_map.get(key, key) - key = "%s.%s" % (key, rest) + key = "{}.{}".format(key, rest) else: key = self._db_field_map.get(key, key) @@ -576,7 +576,7 @@ class BaseDocument(object): iterator = data.items() for index_or_key, value in iterator: - item_key = "%s%s." % (base_key, index_or_key) + item_key = "{}{}.".format(base_key, index_or_key) # don't check anything lower if this key is already marked # as changed. if item_key[:-1] in changed_fields: @@ -584,7 +584,7 @@ class BaseDocument(object): if hasattr(value, "_get_changed_fields"): changed = value._get_changed_fields() - changed_fields += ["%s%s" % (item_key, k) for k in changed if k] + changed_fields += ["{}{}".format(item_key, k) for k in changed if k] elif isinstance(value, (list, tuple, dict)): self._nestable_types_changed_fields(changed_fields, item_key, value) @@ -615,7 +615,7 @@ class BaseDocument(object): if isinstance(data, EmbeddedDocument): # Find all embedded fields that have been changed changed = data._get_changed_fields() - changed_fields += ["%s%s" % (key, k) for k in changed if k] + changed_fields += ["{}{}".format(key, k) for k in changed if k] elif isinstance(data, (list, tuple, dict)): if hasattr(field, "field") and isinstance( field.field, (ReferenceField, GenericReferenceField) @@ -769,11 +769,10 @@ class BaseDocument(object): if errors_dict: errors = "\n".join( - ["Field '%s' - %s" % (k, v) for k, v in errors_dict.items()] + ["Field '{}' - {}".format(k, v) for k, v in errors_dict.items()] ) - msg = "Invalid data to create a `%s` instance.\n%s" % ( - cls._class_name, - errors, + msg = "Invalid data to create a `{}` instance.\n{}".format( + cls._class_name, errors, ) raise InvalidDocumentError(msg) @@ -944,7 +943,8 @@ class BaseDocument(object): # Add the new index to the list fields = [ - ("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields + ("{}{}".format(namespace, f), pymongo.ASCENDING) + for f in unique_fields ] index = {"fields": fields, "unique": True, "sparse": sparse} unique_indexes.append(index) @@ -1001,7 +1001,7 @@ class BaseDocument(object): elif field._geo_index: field_name = field.db_field if parent_field: - field_name = "%s.%s" % (parent_field, field_name) + field_name = "{}.{}".format(parent_field, field_name) geo_indices.append({"fields": [(field_name, field._geo_index)]}) return geo_indices diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index ac894d91..2fadcfe1 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -474,7 +474,9 @@ class ComplexBaseField(BaseField): if errors: field_class = self.field.__class__.__name__ - self.error("Invalid %s item (%s)" % (field_class, value), errors=errors) + self.error( + "Invalid {} item ({})".format(field_class, value), errors=errors + ) # Don't allow empty values if required if self.required and not value: self.error("Field is required and cannot be empty") @@ -546,7 +548,7 @@ class GeoJsonBaseField(BaseField): if isinstance(value, dict): if set(value.keys()) == {"type", "coordinates"}: if value["type"] != self._type: - self.error('%s type must be "%s"' % (self._name, self._type)) + self.error('{} type must be "{}"'.format(self._name, self._type)) return self.validate(value["coordinates"]) else: self.error( diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 30a6fbab..473e6b18 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -168,23 +168,6 @@ class DocumentMetaclass(type): # Add class to the _document_registry _document_registry[new_class._class_name] = new_class - # In Python 2, User-defined methods objects have special read-only - # attributes 'im_func' and 'im_self' which contain the function obj - # and class instance object respectively. With Python 3 these special - # attributes have been replaced by __func__ and __self__. The Blinker - # module continues to use im_func and im_self, so the code below - # copies __func__ into im_func and __self__ into im_self for - # classmethod objects in Document derived classes. - # - # Relates to https://github.com/MongoEngine/mongoengine/issues/1107 - # for val in new_class.__dict__.values(): - # if isinstance(val, classmethod): - # f = val.__get__(new_class) - # if hasattr(f, "__func__") and not hasattr(f, "im_func"): - # f.__dict__.update({"im_func": getattr(f, "__func__")}) - # if hasattr(f, "__self__") and not hasattr(f, "im_self"): - # f.__dict__.update({"im_self": getattr(f, "__self__")}) - # Handle delete rules for field in new_class._fields.values(): f = field @@ -458,8 +441,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0) for i in itertools.count(): - id_name = "{0}_{1}".format(id_basename, i) - id_db_name = "{0}_{1}".format(id_db_basename, i) + id_name = "{}_{}".format(id_basename, i) + id_db_name = "{}_{}".format(id_db_basename, i) if id_name not in existing_fields and id_db_name not in existing_db_fields: return id_name, id_db_name diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 3f754619..b03e0b1d 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -317,7 +317,7 @@ def _create_connection(alias, connection_class, **connection_settings): try: return connection_class(**connection_settings) except Exception as e: - raise ConnectionFailure("Cannot connect to database %s :\n%s" % (alias, e)) + raise ConnectionFailure("Cannot connect to database {} :\n{}".format(alias, e)) def _find_existing_connection(connection_settings): diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index d0e6c527..3756d84a 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -272,12 +272,12 @@ class DeReference(object): (v["_ref"].collection, v["_ref"].id), v ) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - item_name = "{0}.{1}.{2}".format(name, k, field_name) + item_name = "{}.{}.{}".format(name, k, field_name) data[k]._data[field_name] = self._attach_objects( v, depth, instance=instance, name=item_name ) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - item_name = "%s.%s" % (name, k) if name else name + item_name = "{}.{}".format(name, k) if name else name data[k] = self._attach_objects( v, depth - 1, instance=instance, name=item_name ) diff --git a/mongoengine/document.py b/mongoengine/document.py index 4ac979d1..e13918c2 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -555,7 +555,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): if not getattr(ref, "_changed_fields", True): continue - ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) + ref_id = "{},{}".format(ref.__class__.__name__, str(ref._data)) if ref and ref_id not in _refs: _refs.append(ref_id) kwargs["_refs"] = _refs diff --git a/mongoengine/errors.py b/mongoengine/errors.py index 7045145c..76c25773 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -94,7 +94,7 @@ class ValidationError(AssertionError): return str(self.message) def __repr__(self): - return "%s(%s,)" % (self.__class__.__name__, self.message) + return "{}({},)".format(self.__class__.__name__, self.message) def __getattribute__(self, name): message = super(ValidationError, self).__getattribute__(name) @@ -102,7 +102,7 @@ class ValidationError(AssertionError): if self.field_name: message = "%s" % message if self.errors: - message = "%s(%s)" % (message, self._format_errors()) + message = "{}({})".format(message, self._format_errors()) return message def _get_message(self): @@ -147,13 +147,13 @@ class ValidationError(AssertionError): elif isinstance(value, dict): value = " ".join([generate_key(v, k) for k, v in value.items()]) - results = "%s.%s" % (prefix, value) if prefix else value + results = "{}.{}".format(prefix, value) if prefix else value return results error_dict = defaultdict(list) for k, v in self.to_dict().items(): error_dict[generate_key(v)].append(k) - return " ".join(["%s: %s" % (k, v) for k, v in error_dict.items()]) + return " ".join(["{}: {}".format(k, v) for k, v in error_dict.items()]) class DeprecatedError(Exception): diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 391ad37b..91f85d26 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -292,12 +292,16 @@ class EmailField(StringField): domain_part = domain_part.encode("idna").decode("ascii") except UnicodeError: self.error( - "%s %s" % (self.error_msg % value, "(domain failed IDN encoding)") + "{} {}".format( + self.error_msg % value, "(domain failed IDN encoding)" + ) ) else: if not self.validate_domain_part(domain_part): self.error( - "%s %s" % (self.error_msg % value, "(domain validation failed)") + "{} {}".format( + self.error_msg % value, "(domain validation failed)" + ) ) @@ -1344,7 +1348,7 @@ class CachedReferenceField(BaseField): return None update_kwargs = { - "set__%s__%s" % (self.name, key): val + "set__{}__{}".format(self.name, key): val for key, val in document._delta()[0].items() if key in self.fields } @@ -1688,12 +1692,12 @@ class GridFSProxy(object): return self.__copy__() def __repr__(self): - return "<%s: %s>" % (self.__class__.__name__, self.grid_id) + return "<{}: {}>".format(self.__class__.__name__, self.grid_id) def __str__(self): gridout = self.get() filename = getattr(gridout, "filename") if gridout else "" - return "<%s: %s (%s)>" % (self.__class__.__name__, filename, self.grid_id) + return "<{}: {} ({})>".format(self.__class__.__name__, filename, self.grid_id) def __eq__(self, other): if isinstance(other, GridFSProxy): @@ -2097,7 +2101,7 @@ class SequenceField(BaseField): Generate and Increment the counter """ sequence_name = self.get_sequence_name() - sequence_id = "%s.%s" % (sequence_name, self.name) + sequence_id = "{}.{}".format(sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] counter = collection.find_one_and_update( @@ -2111,7 +2115,7 @@ class SequenceField(BaseField): def set_next_value(self, value): """Helper method to set the next sequence value""" sequence_name = self.get_sequence_name() - sequence_id = "%s.%s" % (sequence_name, self.name) + sequence_id = "{}.{}".format(sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] counter = collection.find_one_and_update( filter={"_id": sequence_id}, @@ -2128,7 +2132,7 @@ class SequenceField(BaseField): as it is only fixed on set. """ sequence_name = self.get_sequence_name() - sequence_id = "%s.%s" % (sequence_name, self.name) + sequence_id = "{}.{}".format(sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] data = collection.find_one({"_id": sequence_id}) diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py index c2618ebd..e0d8e322 100644 --- a/mongoengine/queryset/field_list.py +++ b/mongoengine/queryset/field_list.py @@ -78,7 +78,7 @@ class QueryFieldList(object): return field_list def reset(self): - self.fields = set([]) + self.fields = set() self.slice = {} self.value = self.ONLY diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 1202ec45..3f1db8fa 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -433,7 +433,9 @@ def _geo_operator(field, op, value): value = {"$near": _infer_geometry(value)} else: raise NotImplementedError( - 'Geo method "%s" has not been implemented for a %s ' % (op, field._name) + 'Geo method "{}" has not been implemented for a {} '.format( + op, field._name + ) ) return value diff --git a/setup.py b/setup.py index b60188c7..fe3253ae 100644 --- a/setup.py +++ b/setup.py @@ -108,9 +108,6 @@ CLASSIFIERS = [ "Topic :: Software Development :: Libraries :: Python Modules", ] -PYTHON_VERSION = sys.version_info[0] -PY3 = PYTHON_VERSION == 3 - extra_opts = { "packages": find_packages(exclude=["tests", "tests.*"]), "tests_require": [ @@ -118,8 +115,7 @@ extra_opts = { "pytest-cov", "coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls "blinker", - "Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support - "zipp<2.0.0", # (dependency of pytest) dropped python2 support + "Pillow>=2.0.0", ], } From 1e110a2c41d2e4675d0195fafa00e49dcd331675 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 11 Mar 2020 23:26:37 +0100 Subject: [PATCH 175/216] run pyupgrade --py3-plus --- mongoengine/base/datastructures.py | 29 +++--- mongoengine/base/document.py | 8 +- mongoengine/base/fields.py | 6 +- mongoengine/base/metaclasses.py | 7 +- mongoengine/base/utils.py | 2 +- mongoengine/connection.py | 4 +- mongoengine/context_managers.py | 12 +-- mongoengine/dereference.py | 2 +- mongoengine/document.py | 26 +++--- mongoengine/errors.py | 4 +- mongoengine/fields.py | 144 ++++++++++++++--------------- mongoengine/queryset/base.py | 16 ++-- mongoengine/queryset/field_list.py | 2 +- mongoengine/queryset/manager.py | 2 +- mongoengine/queryset/queryset.py | 4 +- mongoengine/queryset/visitor.py | 4 +- mongoengine/signals.py | 4 +- setup.py | 2 +- 18 files changed, 134 insertions(+), 144 deletions(-) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index 86fde15e..d3bff2b3 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -51,7 +51,7 @@ class BaseDict(dict): if isinstance(instance, BaseDocument): self._instance = weakref.proxy(instance) self._name = name - super(BaseDict, self).__init__(dict_items) + super().__init__(dict_items) def get(self, key, default=None): # get does not use __getitem__ by default so we must override it as well @@ -61,18 +61,18 @@ class BaseDict(dict): return default def __getitem__(self, key): - value = super(BaseDict, self).__getitem__(key) + value = super().__getitem__(key) EmbeddedDocument = _import_class("EmbeddedDocument") if isinstance(value, EmbeddedDocument) and value._instance is None: value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): value = BaseDict(value, None, "{}.{}".format(self._name, key)) - super(BaseDict, self).__setitem__(key, value) + super().__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): value = BaseList(value, None, "{}.{}".format(self._name, key)) - super(BaseDict, self).__setitem__(key, value) + super().__setitem__(key, value) value._instance = self._instance return value @@ -115,13 +115,13 @@ class BaseList(list): if isinstance(instance, BaseDocument): self._instance = weakref.proxy(instance) self._name = name - super(BaseList, self).__init__(list_items) + super().__init__(list_items) def __getitem__(self, key): # change index to positive value because MongoDB does not support negative one if isinstance(key, int) and key < 0: key = len(self) + key - value = super(BaseList, self).__getitem__(key) + value = super().__getitem__(key) if isinstance(key, slice): # When receiving a slice operator, we don't convert the structure and bind @@ -134,18 +134,17 @@ class BaseList(list): elif isinstance(value, dict) and not isinstance(value, BaseDict): # Replace dict by BaseDict value = BaseDict(value, None, "{}.{}".format(self._name, key)) - super(BaseList, self).__setitem__(key, value) + super().__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): # Replace list by BaseList value = BaseList(value, None, "{}.{}".format(self._name, key)) - super(BaseList, self).__setitem__(key, value) + super().__setitem__(key, value) value._instance = self._instance return value def __iter__(self): - for v in super(BaseList, self).__iter__(): - yield v + yield from super().__iter__() def __getstate__(self): self.instance = None @@ -163,7 +162,7 @@ class BaseList(list): # instead, we simply marks the whole list as changed changed_key = None - result = super(BaseList, self).__setitem__(key, value) + result = super().__setitem__(key, value) self._mark_as_changed(changed_key) return result @@ -190,7 +189,7 @@ class BaseList(list): class EmbeddedDocumentList(BaseList): def __init__(self, list_items, instance, name): - super(EmbeddedDocumentList, self).__init__(list_items, instance, name) + super().__init__(list_items, instance, name) self._instance = instance @classmethod @@ -355,7 +354,7 @@ class EmbeddedDocumentList(BaseList): return len(values) -class StrictDict(object): +class StrictDict: __slots__ = () _special_fields = {"get", "pop", "iteritems", "items", "keys", "create"} _classes = {} @@ -455,9 +454,7 @@ class LazyReference(DBRef): self.document_type = document_type self._cached_doc = cached_doc self.passthrough = passthrough - super(LazyReference, self).__init__( - self.document_type._get_collection_name(), pk - ) + super().__init__(self.document_type._get_collection_name(), pk) def __getitem__(self, name): if not self.passthrough: diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 83c12a96..e697fe40 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -30,7 +30,7 @@ __all__ = ("BaseDocument", "NON_FIELD_ERRORS") NON_FIELD_ERRORS = "__all__" -class BaseDocument(object): +class BaseDocument: # TODO simplify how `_changed_fields` is used. # Currently, handling of `_changed_fields` seems unnecessarily convoluted: # 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's @@ -161,7 +161,7 @@ class BaseDocument(object): default = default() setattr(self, field_name, default) else: - super(BaseDocument, self).__delattr__(*args, **kwargs) + super().__delattr__(*args, **kwargs) def __setattr__(self, name, value): # Handle dynamic data only if an initialised dynamic document @@ -208,9 +208,9 @@ class BaseDocument(object): and self__created and name == self._meta.get("id_field") ): - super(BaseDocument, self).__setattr__("_created", False) + super().__setattr__("_created", False) - super(BaseDocument, self).__setattr__(name, value) + super().__setattr__(name, value) def __getstate__(self): data = {} diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 2fadcfe1..e44b5744 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -13,7 +13,7 @@ from mongoengine.errors import DeprecatedError, ValidationError __all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") -class BaseField(object): +class BaseField: """A base class for fields in a MongoDB document. Instances of this class may be added to subclasses of `Document` to define a document's schema. @@ -310,7 +310,7 @@ class ComplexBaseField(BaseField): if hasattr(instance._data[self.name], "_dereferenced"): instance._data[self.name]._dereferenced = True - value = super(ComplexBaseField, self).__get__(instance, owner) + value = super().__get__(instance, owner) # Convert lists / values so we can watch for any changes on them if isinstance(value, (list, tuple)): @@ -541,7 +541,7 @@ class GeoJsonBaseField(BaseField): self._name = "%sField" % self._type if not auto_index: self._geo_index = False - super(GeoJsonBaseField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def validate(self, value): """Validate the GeoJson object based on its type.""" diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 473e6b18..ce24ff58 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -22,7 +22,7 @@ class DocumentMetaclass(type): # TODO lower complexity of this method def __new__(mcs, name, bases, attrs): flattened_bases = mcs._get_bases(bases) - super_new = super(DocumentMetaclass, mcs).__new__ + super_new = super().__new__ # If a base class just call super metaclass = attrs.get("my_metaclass") @@ -231,8 +231,7 @@ class DocumentMetaclass(type): if base is object: continue yield base - for child_base in mcs.__get_bases(base.__bases__): - yield child_base + yield from mcs.__get_bases(base.__bases__) @classmethod def _import_classes(mcs): @@ -250,7 +249,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): def __new__(mcs, name, bases, attrs): flattened_bases = mcs._get_bases(bases) - super_new = super(TopLevelDocumentMetaclass, mcs).__new__ + super_new = super().__new__ # Set default _meta data if base class, otherwise get user defined meta if attrs.get("my_metaclass") == TopLevelDocumentMetaclass: diff --git a/mongoengine/base/utils.py b/mongoengine/base/utils.py index 8f27ee14..7753ad50 100644 --- a/mongoengine/base/utils.py +++ b/mongoengine/base/utils.py @@ -1,7 +1,7 @@ import re -class LazyRegexCompiler(object): +class LazyRegexCompiler: """Descriptor to allow lazy compilation of regex""" def __init__(self, pattern, flags=0): diff --git a/mongoengine/connection.py b/mongoengine/connection.py index b03e0b1d..13d170ec 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -395,8 +395,8 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): if new_conn_settings != prev_conn_setting: err_msg = ( - u"A different connection with alias `{}` was already " - u"registered. Use disconnect() first" + "A different connection with alias `{}` was already " + "registered. Use disconnect() first" ).format(alias) raise ConnectionFailure(err_msg) else: diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 5e9a6e8b..8bfb902b 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -16,7 +16,7 @@ __all__ = ( ) -class switch_db(object): +class switch_db: """switch_db alias context manager. Example :: @@ -57,7 +57,7 @@ class switch_db(object): self.cls._collection = self.collection -class switch_collection(object): +class switch_collection: """switch_collection alias context manager. Example :: @@ -99,7 +99,7 @@ class switch_collection(object): self.cls._get_collection_name = self.ori_get_collection_name -class no_dereference(object): +class no_dereference: """no_dereference context manager. Turns off all dereferencing in Documents for the duration of the context @@ -139,7 +139,7 @@ class no_dereference(object): return self.cls -class no_sub_classes(object): +class no_sub_classes: """no_sub_classes context manager. Only returns instances of this class and no sub (inherited) classes:: @@ -167,7 +167,7 @@ class no_sub_classes(object): self.cls._subclasses = self.cls_initial_subclasses -class query_counter(object): +class query_counter: """Query_counter context manager to get the number of queries. This works by updating the `profiling_level` of the database so that all queries get logged, resetting the db.system.profile collection at the beginning of the context and counting the new entries. @@ -234,7 +234,7 @@ class query_counter(object): def __repr__(self): """repr query_counter as the number of queries.""" - return u"%s" % self._get_count() + return "%s" % self._get_count() def _get_count(self): """Get the number of queries by counting the current number of entries in db.system.profile diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 3756d84a..ff608a3b 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -14,7 +14,7 @@ from mongoengine.fields import DictField, ListField, MapField, ReferenceField from mongoengine.queryset import QuerySet -class DeReference(object): +class DeReference: def __call__(self, items, max_depth=1, instance=None, name=None): """ Cheaply dereferences the items to a set depth. diff --git a/mongoengine/document.py b/mongoengine/document.py index e13918c2..9166a959 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -79,7 +79,7 @@ class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass): __hash__ = None def __init__(self, *args, **kwargs): - super(EmbeddedDocument, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._instance = None self._changed_fields = [] @@ -92,7 +92,7 @@ class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass): return not self.__eq__(other) def to_mongo(self, *args, **kwargs): - data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs) + data = super().to_mongo(*args, **kwargs) # remove _id from the SON if it's in it and it's None if "_id" in data and data["_id"] is None: @@ -256,7 +256,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): return db.create_collection(collection_name, **opts) def to_mongo(self, *args, **kwargs): - data = super(Document, self).to_mongo(*args, **kwargs) + data = super().to_mongo(*args, **kwargs) # If '_id' is None, try and set it from self._data. If that # doesn't exist either, remove '_id' from the SON completely. @@ -427,14 +427,14 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): self.cascade_save(**kwargs) except pymongo.errors.DuplicateKeyError as err: - message = u"Tried to save duplicate unique keys (%s)" + message = "Tried to save duplicate unique keys (%s)" raise NotUniqueError(message % err) except pymongo.errors.OperationFailure as err: message = "Could not save document (%s)" if re.match("^E1100[01] duplicate key", str(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update - message = u"Tried to save duplicate unique keys (%s)" + message = "Tried to save duplicate unique keys (%s)" raise NotUniqueError(message % err) raise OperationError(message % err) @@ -639,7 +639,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): write_concern=write_concern, _from_doc_delete=True ) except pymongo.errors.OperationFailure as err: - message = u"Could not delete document (%s)" % err.message + message = "Could not delete document (%s)" % err.message raise OperationError(message) signals.post_delete.send(self.__class__, document=self, **signal_kwargs) @@ -988,10 +988,10 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): indexes.append(index) # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed - if [(u"_id", 1)] not in indexes: - indexes.append([(u"_id", 1)]) + if [("_id", 1)] not in indexes: + indexes.append([("_id", 1)]) if cls._meta.get("index_cls", True) and cls._meta.get("allow_inheritance"): - indexes.append([(u"_cls", 1)]) + indexes.append([("_cls", 1)]) return indexes @@ -1015,14 +1015,14 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): extra = [index for index in existing if index not in required] # if { _cls: 1 } is missing, make sure it's *really* necessary - if [(u"_cls", 1)] in missing: + if [("_cls", 1)] in missing: cls_obsolete = False for index in existing: if includes_cls(index) and index not in extra: cls_obsolete = True break if cls_obsolete: - missing.remove([(u"_cls", 1)]) + missing.remove([("_cls", 1)]) return {"missing": missing, "extra": extra} @@ -1055,7 +1055,7 @@ class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass): setattr(self, field_name, None) self._dynamic_fields[field_name].null = False else: - super(DynamicDocument, self).__delattr__(*args, **kwargs) + super().__delattr__(*args, **kwargs) class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass): @@ -1083,7 +1083,7 @@ class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass): setattr(self, field_name, None) -class MapReduceDocument(object): +class MapReduceDocument: """A document returned from a map/reduce query. :param collection: An instance of :class:`~pymongo.Collection` diff --git a/mongoengine/errors.py b/mongoengine/errors.py index 76c25773..95564ff9 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -85,7 +85,7 @@ class ValidationError(AssertionError): _message = None def __init__(self, message="", **kwargs): - super(ValidationError, self).__init__(message) + super().__init__(message) self.errors = kwargs.get("errors", {}) self.field_name = kwargs.get("field_name") self.message = message @@ -97,7 +97,7 @@ class ValidationError(AssertionError): return "{}({},)".format(self.__class__.__name__, self.message) def __getattribute__(self, name): - message = super(ValidationError, self).__getattribute__(name) + message = super().__getattribute__(name) if name == "message": if self.field_name: message = "%s" % message diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 91f85d26..b4cf4d25 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -103,7 +103,7 @@ class StringField(BaseField): self.regex = re.compile(regex) if regex else None self.max_length = max_length self.min_length = min_length - super(StringField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): if isinstance(value, str): @@ -151,7 +151,7 @@ class StringField(BaseField): # escape unsafe characters which could lead to a re.error value = re.escape(value) value = re.compile(regex % value, flags) - return super(StringField, self).prepare_query_value(op, value) + return super().prepare_query_value(op, value) class URLField(StringField): @@ -175,17 +175,17 @@ class URLField(StringField): def __init__(self, url_regex=None, schemes=None, **kwargs): self.url_regex = url_regex or self._URL_REGEX self.schemes = schemes or self._URL_SCHEMES - super(URLField, self).__init__(**kwargs) + super().__init__(**kwargs) def validate(self, value): # Check first if the scheme is valid scheme = value.split("://")[0].lower() if scheme not in self.schemes: - self.error(u"Invalid scheme {} in URL: {}".format(scheme, value)) + self.error("Invalid scheme {} in URL: {}".format(scheme, value)) # Then check full URL if not self.url_regex.match(value): - self.error(u"Invalid URL: {}".format(value)) + self.error("Invalid URL: {}".format(value)) class EmailField(StringField): @@ -218,7 +218,7 @@ class EmailField(StringField): re.IGNORECASE, ) - error_msg = u"Invalid email address: %s" + error_msg = "Invalid email address: %s" def __init__( self, @@ -242,7 +242,7 @@ class EmailField(StringField): self.domain_whitelist = domain_whitelist or [] self.allow_utf8_user = allow_utf8_user self.allow_ip_domain = allow_ip_domain - super(EmailField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def validate_user_part(self, user_part): """Validate the user part of the email address. Return True if @@ -269,13 +269,13 @@ class EmailField(StringField): try: socket.inet_pton(addr_family, domain_part[1:-1]) return True - except (socket.error, UnicodeEncodeError): + except (OSError, UnicodeEncodeError): pass return False def validate(self, value): - super(EmailField, self).validate(value) + super().validate(value) if "@" not in value: self.error(self.error_msg % value) @@ -310,7 +310,7 @@ class IntField(BaseField): def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value - super(IntField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): try: @@ -335,7 +335,7 @@ class IntField(BaseField): if value is None: return value - return super(IntField, self).prepare_query_value(op, int(value)) + return super().prepare_query_value(op, int(value)) class LongField(BaseField): @@ -343,7 +343,7 @@ class LongField(BaseField): def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value - super(LongField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): try: @@ -371,7 +371,7 @@ class LongField(BaseField): if value is None: return value - return super(LongField, self).prepare_query_value(op, int(value)) + return super().prepare_query_value(op, int(value)) class FloatField(BaseField): @@ -379,7 +379,7 @@ class FloatField(BaseField): def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value - super(FloatField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): try: @@ -408,7 +408,7 @@ class FloatField(BaseField): if value is None: return value - return super(FloatField, self).prepare_query_value(op, float(value)) + return super().prepare_query_value(op, float(value)) class DecimalField(BaseField): @@ -455,7 +455,7 @@ class DecimalField(BaseField): self.precision = precision self.rounding = rounding - super(DecimalField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): if value is None: @@ -493,7 +493,7 @@ class DecimalField(BaseField): self.error("Decimal value is too large") def prepare_query_value(self, op, value): - return super(DecimalField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) class BooleanField(BaseField): @@ -533,7 +533,7 @@ class DateTimeField(BaseField): def validate(self, value): new_value = self.to_mongo(value) if not isinstance(new_value, (datetime.datetime, datetime.date)): - self.error(u'cannot parse date "%s"' % value) + self.error('cannot parse date "%s"' % value) def to_mongo(self, value): if value is None: @@ -590,19 +590,19 @@ class DateTimeField(BaseField): return None def prepare_query_value(self, op, value): - return super(DateTimeField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) class DateField(DateTimeField): def to_mongo(self, value): - value = super(DateField, self).to_mongo(value) + value = super().to_mongo(value) # drop hours, minutes, seconds if isinstance(value, datetime.datetime): value = datetime.datetime(value.year, value.month, value.day) return value def to_python(self, value): - value = super(DateField, self).to_python(value) + value = super().to_python(value) # convert datetime to date if isinstance(value, datetime.datetime): value = datetime.date(value.year, value.month, value.day) @@ -636,7 +636,7 @@ class ComplexDateTimeField(StringField): """ self.separator = separator self.format = separator.join(["%Y", "%m", "%d", "%H", "%M", "%S", "%f"]) - super(ComplexDateTimeField, self).__init__(**kwargs) + super().__init__(**kwargs) def _convert_from_datetime(self, val): """ @@ -667,14 +667,14 @@ class ComplexDateTimeField(StringField): if instance is None: return self - data = super(ComplexDateTimeField, self).__get__(instance, owner) + data = super().__get__(instance, owner) if isinstance(data, datetime.datetime) or data is None: return data return self._convert_from_string(data) def __set__(self, instance, value): - super(ComplexDateTimeField, self).__set__(instance, value) + super().__set__(instance, value) value = instance._data[self.name] if value is not None: instance._data[self.name] = self._convert_from_datetime(value) @@ -696,9 +696,7 @@ class ComplexDateTimeField(StringField): return self._convert_from_datetime(value) def prepare_query_value(self, op, value): - return super(ComplexDateTimeField, self).prepare_query_value( - op, self._convert_from_datetime(value) - ) + return super().prepare_query_value(op, self._convert_from_datetime(value)) class EmbeddedDocumentField(BaseField): @@ -718,7 +716,7 @@ class EmbeddedDocumentField(BaseField): ) self.document_type_obj = document_type - super(EmbeddedDocumentField, self).__init__(**kwargs) + super().__init__(**kwargs) @property def document_type(self): @@ -779,7 +777,7 @@ class EmbeddedDocumentField(BaseField): "Querying the embedded document '%s' failed, due to an invalid query value" % (self.document_type._class_name,) ) - super(EmbeddedDocumentField, self).prepare_query_value(op, value) + super().prepare_query_value(op, value) return self.to_mongo(value) @@ -795,9 +793,7 @@ class GenericEmbeddedDocumentField(BaseField): """ def prepare_query_value(self, op, value): - return super(GenericEmbeddedDocumentField, self).prepare_query_value( - op, self.to_mongo(value) - ) + return super().prepare_query_value(op, self.to_mongo(value)) def to_python(self, value): if isinstance(value, dict): @@ -885,7 +881,7 @@ class DynamicField(BaseField): value = doc_cls._get_db().dereference(value["_ref"]) return doc_cls._from_son(value) - return super(DynamicField, self).to_python(value) + return super().to_python(value) def lookup_member(self, member_name): return member_name @@ -893,7 +889,7 @@ class DynamicField(BaseField): def prepare_query_value(self, op, value): if isinstance(value, str): return StringField().prepare_query_value(op, value) - return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) def validate(self, value, clean=True): if hasattr(value, "validate"): @@ -914,7 +910,7 @@ class ListField(ComplexBaseField): self.field = field self.max_length = max_length kwargs.setdefault("default", lambda: []) - super(ListField, self).__init__(**kwargs) + super().__init__(**kwargs) def __get__(self, instance, owner): if instance is None: @@ -928,7 +924,7 @@ class ListField(ComplexBaseField): and value ): instance._data[self.name] = [self.field.build_lazyref(x) for x in value] - return super(ListField, self).__get__(instance, owner) + return super().__get__(instance, owner) def validate(self, value): """Make sure that a list of valid fields is being used.""" @@ -942,7 +938,7 @@ class ListField(ComplexBaseField): if self.max_length is not None and len(value) > self.max_length: self.error("List is too long") - super(ListField, self).validate(value) + super().validate(value) def prepare_query_value(self, op, value): # Validate that the `set` operator doesn't contain more items than `max_length`. @@ -963,7 +959,7 @@ class ListField(ComplexBaseField): return self.field.prepare_query_value(op, value) - return super(ListField, self).prepare_query_value(op, value) + return super().prepare_query_value(op, value) class EmbeddedDocumentListField(ListField): @@ -984,9 +980,7 @@ class EmbeddedDocumentListField(ListField): :param kwargs: Keyword arguments passed directly into the parent :class:`~mongoengine.ListField`. """ - super(EmbeddedDocumentListField, self).__init__( - field=EmbeddedDocumentField(document_type), **kwargs - ) + super().__init__(field=EmbeddedDocumentField(document_type), **kwargs) class SortedListField(ListField): @@ -1012,10 +1006,10 @@ class SortedListField(ListField): self._ordering = kwargs.pop("ordering") if "reverse" in kwargs.keys(): self._order_reverse = kwargs.pop("reverse") - super(SortedListField, self).__init__(field, **kwargs) + super().__init__(field, **kwargs) def to_mongo(self, value, use_db_field=True, fields=None): - value = super(SortedListField, self).to_mongo(value, use_db_field, fields) + value = super().to_mongo(value, use_db_field, fields) if self._ordering is not None: return sorted( value, key=itemgetter(self._ordering), reverse=self._order_reverse @@ -1068,7 +1062,7 @@ class DictField(ComplexBaseField): self._auto_dereference = False kwargs.setdefault("default", lambda: {}) - super(DictField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def validate(self, value): """Make sure that a list of valid fields is being used.""" @@ -1090,7 +1084,7 @@ class DictField(ComplexBaseField): self.error( 'Invalid dictionary key name - keys may not startswith "$" characters' ) - super(DictField, self).validate(value) + super().validate(value) def lookup_member(self, member_name): return DictField(db_field=member_name) @@ -1119,7 +1113,7 @@ class DictField(ComplexBaseField): } return self.field.prepare_query_value(op, value) - return super(DictField, self).prepare_query_value(op, value) + return super().prepare_query_value(op, value) class MapField(DictField): @@ -1134,7 +1128,7 @@ class MapField(DictField): # XXX ValidationError raised outside of the "validate" method. if not isinstance(field, BaseField): self.error("Argument to MapField constructor must be a valid field") - super(MapField, self).__init__(field=field, *args, **kwargs) + super().__init__(field=field, *args, **kwargs) class ReferenceField(BaseField): @@ -1205,7 +1199,7 @@ class ReferenceField(BaseField): self.dbref = dbref self.document_type_obj = document_type self.reverse_delete_rule = reverse_delete_rule - super(ReferenceField, self).__init__(**kwargs) + super().__init__(**kwargs) @property def document_type(self): @@ -1238,7 +1232,7 @@ class ReferenceField(BaseField): else: instance._data[self.name] = cls._from_son(dereferenced) - return super(ReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def to_mongo(self, document): if isinstance(document, DBRef): @@ -1289,7 +1283,7 @@ class ReferenceField(BaseField): def prepare_query_value(self, op, value): if value is None: return None - super(ReferenceField, self).prepare_query_value(op, value) + super().prepare_query_value(op, value) return self.to_mongo(value) def validate(self, value): @@ -1336,7 +1330,7 @@ class CachedReferenceField(BaseField): self.auto_sync = auto_sync self.document_type_obj = document_type self.fields = fields - super(CachedReferenceField, self).__init__(**kwargs) + super().__init__(**kwargs) def start_listener(self): from mongoengine import signals @@ -1394,7 +1388,7 @@ class CachedReferenceField(BaseField): else: instance._data[self.name] = self.document_type._from_son(dereferenced) - return super(CachedReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def to_mongo(self, document, use_db_field=True, fields=None): id_field_name = self.document_type._meta["id_field"] @@ -1493,7 +1487,7 @@ class GenericReferenceField(BaseField): def __init__(self, *args, **kwargs): choices = kwargs.pop("choices", None) - super(GenericReferenceField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.choices = [] # Keep the choices as a list of allowed Document class names if choices: @@ -1517,7 +1511,7 @@ class GenericReferenceField(BaseField): value = value.get("_cls") elif isinstance(value, Document): value = value._class_name - super(GenericReferenceField, self)._validate_choices(value) + super()._validate_choices(value) def __get__(self, instance, owner): if instance is None: @@ -1533,7 +1527,7 @@ class GenericReferenceField(BaseField): else: instance._data[self.name] = dereferenced - return super(GenericReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def validate(self, value): if not isinstance(value, (Document, DBRef, dict, SON)): @@ -1597,13 +1591,13 @@ class BinaryField(BaseField): def __init__(self, max_bytes=None, **kwargs): self.max_bytes = max_bytes - super(BinaryField, self).__init__(**kwargs) + super().__init__(**kwargs) def __set__(self, instance, value): """Handle bytearrays in python 3.1""" if isinstance(value, bytearray): value = bytes(value) - return super(BinaryField, self).__set__(instance, value) + return super().__set__(instance, value) def to_mongo(self, value): return Binary(value) @@ -1621,14 +1615,14 @@ class BinaryField(BaseField): def prepare_query_value(self, op, value): if value is None: return value - return super(BinaryField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) class GridFSError(Exception): pass -class GridFSProxy(object): +class GridFSProxy: """Proxy object to handle writing and reading of files to and from GridFS .. versionadded:: 0.4 @@ -1808,7 +1802,7 @@ class FileField(BaseField): def __init__( self, db_alias=DEFAULT_CONNECTION_NAME, collection_name="fs", **kwargs ): - super(FileField, self).__init__(**kwargs) + super().__init__(**kwargs) self.collection_name = collection_name self.db_alias = db_alias @@ -1953,7 +1947,7 @@ class ImageGridFsProxy(GridFSProxy): img.save(io, img_format, progressive=progressive) io.seek(0) - return super(ImageGridFsProxy, self).put( + return super().put( io, width=w, height=h, format=img_format, thumbnail_id=thumb_id, **kwargs ) @@ -1963,7 +1957,7 @@ class ImageGridFsProxy(GridFSProxy): if out and out.thumbnail_id: self.fs.delete(out.thumbnail_id) - return super(ImageGridFsProxy, self).delete() + return super().delete() def _put_thumbnail(self, thumbnail, format, progressive, **kwargs): w, h = thumbnail.size @@ -2042,7 +2036,7 @@ class ImageField(FileField): setattr(self, att_name, value) - super(ImageField, self).__init__(collection_name=collection_name, **kwargs) + super().__init__(collection_name=collection_name, **kwargs) class SequenceField(BaseField): @@ -2094,7 +2088,7 @@ class SequenceField(BaseField): self.value_decorator = ( value_decorator if callable(value_decorator) else self.VALUE_DECORATOR ) - super(SequenceField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def generate(self): """ @@ -2155,7 +2149,7 @@ class SequenceField(BaseField): ) def __get__(self, instance, owner): - value = super(SequenceField, self).__get__(instance, owner) + value = super().__get__(instance, owner) if value is None and instance._initialised: value = self.generate() instance._data[self.name] = value @@ -2168,7 +2162,7 @@ class SequenceField(BaseField): if value is None and instance._initialised: value = self.generate() - return super(SequenceField, self).__set__(instance, value) + return super().__set__(instance, value) def prepare_query_value(self, op, value): """ @@ -2202,7 +2196,7 @@ class UUIDField(BaseField): .. versionchanged:: 0.6.19 """ self._binary = binary - super(UUIDField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): if not self._binary: @@ -2440,7 +2434,7 @@ class LazyReferenceField(BaseField): self.passthrough = passthrough self.document_type_obj = document_type self.reverse_delete_rule = reverse_delete_rule - super(LazyReferenceField, self).__init__(**kwargs) + super().__init__(**kwargs) @property def document_type(self): @@ -2483,7 +2477,7 @@ class LazyReferenceField(BaseField): if value: instance._data[self.name] = value - return super(LazyReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def to_mongo(self, value): if isinstance(value, LazyReference): @@ -2547,7 +2541,7 @@ class LazyReferenceField(BaseField): def prepare_query_value(self, op, value): if value is None: return None - super(LazyReferenceField, self).prepare_query_value(op, value) + super().prepare_query_value(op, value) return self.to_mongo(value) def lookup_member(self, member_name): @@ -2574,12 +2568,12 @@ class GenericLazyReferenceField(GenericReferenceField): def __init__(self, *args, **kwargs): self.passthrough = kwargs.pop("passthrough", False) - super(GenericLazyReferenceField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def _validate_choices(self, value): if isinstance(value, LazyReference): value = value.document_type._class_name - super(GenericLazyReferenceField, self)._validate_choices(value) + super()._validate_choices(value) def build_lazyref(self, value): if isinstance(value, LazyReference): @@ -2608,7 +2602,7 @@ class GenericLazyReferenceField(GenericReferenceField): if value: instance._data[self.name] = value - return super(GenericLazyReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def validate(self, value): if isinstance(value, LazyReference) and value.pk is None: @@ -2616,7 +2610,7 @@ class GenericLazyReferenceField(GenericReferenceField): "You can only reference documents once they have been" " saved to the database" ) - return super(GenericLazyReferenceField, self).validate(value) + return super().validate(value) def to_mongo(self, document): if document is None: @@ -2635,4 +2629,4 @@ class GenericLazyReferenceField(GenericReferenceField): ) ) else: - return super(GenericLazyReferenceField, self).to_mongo(document) + return super().to_mongo(document) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index e6901100..4dbf7d47 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -37,7 +37,7 @@ DENY = 3 PULL = 4 -class BaseQuerySet(object): +class BaseQuerySet: """A set of results returned from a query. Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as the results. """ @@ -262,7 +262,7 @@ class BaseQuerySet(object): # If we were able to retrieve the 2nd doc, rewind the cursor and # raise the MultipleObjectsReturned exception. queryset.rewind() - message = u"%d items returned, instead of 1" % queryset.count() + message = "%d items returned, instead of 1" % queryset.count() raise queryset._document.MultipleObjectsReturned(message) def create(self, **kwargs): @@ -351,14 +351,14 @@ class BaseQuerySet(object): except pymongo.errors.BulkWriteError as err: # inserting documents that already have an _id field will # give huge performance debt or raise - message = u"Bulk write error: (%s)" + message = "Bulk write error: (%s)" raise BulkWriteError(message % err.details) except pymongo.errors.OperationFailure as err: message = "Could not save document (%s)" if re.match("^E1100[01] duplicate key", str(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update - message = u"Tried to save duplicate unique keys (%s)" + message = "Tried to save duplicate unique keys (%s)" raise NotUniqueError(message % err) raise OperationError(message % err) @@ -655,9 +655,9 @@ class BaseQuerySet(object): **self._cursor_args ) except pymongo.errors.DuplicateKeyError as err: - raise NotUniqueError(u"Update failed (%s)" % err) + raise NotUniqueError("Update failed (%s)" % err) except pymongo.errors.OperationFailure as err: - raise OperationError(u"Update failed (%s)" % err) + raise OperationError("Update failed (%s)" % err) if full_response: if result["value"] is not None: @@ -686,7 +686,7 @@ class BaseQuerySet(object): return queryset.filter(pk=object_id).first() def in_bulk(self, object_ids): - """Retrieve a set of documents by their ids. + """"Retrieve a set of documents by their ids. :param object_ids: a list or tuple of ``ObjectId``\ s :rtype: dict of ObjectIds as keys and collection-specific @@ -1922,7 +1922,7 @@ class BaseQuerySet(object): field_name = match.group(1).split(".") fields = self._document._lookup_field(field_name) # Substitute the correct name for the field into the javascript - return u'["%s"]' % fields[-1].db_field + return '["%s"]' % fields[-1].db_field def field_path_sub(match): # Extract just the field name, and look up the field objects diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py index e0d8e322..443c895c 100644 --- a/mongoengine/queryset/field_list.py +++ b/mongoengine/queryset/field_list.py @@ -1,7 +1,7 @@ __all__ = ("QueryFieldList",) -class QueryFieldList(object): +class QueryFieldList: """Object that handles combinations of .only() and .exclude() calls""" ONLY = 1 diff --git a/mongoengine/queryset/manager.py b/mongoengine/queryset/manager.py index 5067ffbf..699526fd 100644 --- a/mongoengine/queryset/manager.py +++ b/mongoengine/queryset/manager.py @@ -4,7 +4,7 @@ from mongoengine.queryset.queryset import QuerySet __all__ = ("queryset_manager", "QuerySetManager") -class QuerySetManager(object): +class QuerySetManager: """ The default QuerySet Manager. diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 39b09c9d..8b5872f8 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -141,10 +141,10 @@ class QuerySet(BaseQuerySet): getting the count """ if with_limit_and_skip is False: - return super(QuerySet, self).count(with_limit_and_skip) + return super().count(with_limit_and_skip) if self._len is None: - self._len = super(QuerySet, self).count(with_limit_and_skip) + self._len = super().count(with_limit_and_skip) return self._len diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 72e36ac0..0eacc2ef 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -7,7 +7,7 @@ from mongoengine.queryset import transform __all__ = ("Q", "QNode") -class QNodeVisitor(object): +class QNodeVisitor: """Base visitor class for visiting Q-object nodes in a query tree. """ @@ -79,7 +79,7 @@ class QueryCompilerVisitor(QNodeVisitor): return transform.query(self.document, **query.query) -class QNode(object): +class QNode: """Base class for nodes in query trees.""" AND = 0 diff --git a/mongoengine/signals.py b/mongoengine/signals.py index 0db63604..582b533d 100644 --- a/mongoengine/signals.py +++ b/mongoengine/signals.py @@ -15,11 +15,11 @@ try: signals_available = True except ImportError: - class Namespace(object): + class Namespace: def signal(self, name, doc=None): return _FakeSignal(name, doc) - class _FakeSignal(object): + class _FakeSignal: """If blinker is unavailable, create a fake class with the same interface that allows sending of signals but will fail with an error on anything else. Instead of doing anything on send, it diff --git a/setup.py b/setup.py index fe3253ae..b6c8ea64 100644 --- a/setup.py +++ b/setup.py @@ -115,7 +115,7 @@ extra_opts = { "pytest-cov", "coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls "blinker", - "Pillow>=2.0.0", + "Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support ], } From b57946ec989defa457b70a2b18bc0543a54bae40 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 14 Mar 2020 21:39:47 +0100 Subject: [PATCH 176/216] remove virtualenv installation in travis --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index d34f8a36..5d04571a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -60,8 +60,7 @@ install: - pip install --upgrade pip - pip install coveralls - pip install flake8 flake8-import-order - - pip install tox # tox 3.11.0 has requirement virtualenv>=14.0.0 - - pip install virtualenv # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) + - pip install tox # tox dryrun to setup the tox venv (we run a mock test). - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder" # Install black for Python v3.7 only. From aa4a6ae0234be23f237f68c3061f9db28c274741 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 15 Mar 2020 21:02:44 +0100 Subject: [PATCH 177/216] Fix invalid escape seq in codebase --- mongoengine/document.py | 2 +- mongoengine/queryset/base.py | 6 +++--- tests/fields/test_complex_datetime_field.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 5e812510..3cc0046e 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -56,7 +56,7 @@ class InvalidCollectionError(Exception): class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): - """A :class:`~mongoengine.Document` that isn't stored in its own + r"""A :class:`~mongoengine.Document` that isn't stored in its own collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as fields on :class:`~mongoengine.Document`\ s through the :class:`~mongoengine.EmbeddedDocumentField` field type. diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 50cb37ac..7941e970 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -694,8 +694,8 @@ class BaseQuerySet(object): def in_bulk(self, object_ids): """Retrieve a set of documents by their ids. - :param object_ids: a list or tuple of ``ObjectId``\ s - :rtype: dict of ObjectIds as keys and collection-specific + :param object_ids: a list or tuple of ObjectId's + :rtype: dict of ObjectId's as keys and collection-specific Document subclasses as values. .. versionadded:: 0.3 @@ -1140,7 +1140,7 @@ class BaseQuerySet(object): def explain(self): """Return an explain plan record for the - :class:`~mongoengine.queryset.QuerySet`\ 's cursor. + :class:`~mongoengine.queryset.QuerySet` cursor. """ return self._cursor.explain() diff --git a/tests/fields/test_complex_datetime_field.py b/tests/fields/test_complex_datetime_field.py index f0a6b96e..5bd6c56b 100644 --- a/tests/fields/test_complex_datetime_field.py +++ b/tests/fields/test_complex_datetime_field.py @@ -65,7 +65,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"] assert ( - re.match("^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored) + re.match(r"^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored) is not None ) @@ -74,7 +74,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): "date_with_dots" ] assert ( - re.match("^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None + re.match(r"^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None ) def test_complexdatetime_usage(self): From c0c0efce188b32f4e59da89ead845a17b824b236 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 15 Mar 2020 22:14:26 +0100 Subject: [PATCH 178/216] improve docstring related to #2267 and document the change in the changelog --- docs/changelog.rst | 1 + mongoengine/queryset/base.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8dcea62a..41ff8c85 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -8,6 +8,7 @@ Development - (Fill this out as you fix issues and develop your features). - Add Mongo 4.0 to Travis - Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 +- Add support for the `elemMatch` projection operator in .fields (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 Changes in 0.19.1 ================= diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 9671a3dc..fbf0a1ba 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1026,9 +1026,11 @@ class BaseQuerySet(object): posts = BlogPost.objects(...).fields(comments=0) - To retrieve a subrange of array elements: + To retrieve a subrange or sublist of array elements, + support exist for both the `slice` and `elemMatch` projection operator: posts = BlogPost.objects(...).fields(slice__comments=5) + posts = BlogPost.objects(...).fields(elemMatch__comments="test") :param kwargs: A set of keyword arguments identifying what to include, exclude, or slice. From a3f9016ae938a91871657373ade2970a0fc157d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 15 Mar 2020 22:27:19 +0100 Subject: [PATCH 179/216] reformat import for flake8 --- mongoengine/context_managers.py | 1 + mongoengine/queryset/base.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 0c58b57c..6891e21d 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -14,6 +14,7 @@ __all__ = ( "no_sub_classes", "query_counter", "set_write_concern", + "set_read_write_concern", ) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 0788f563..d8a4b96f 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -20,8 +20,8 @@ from mongoengine.base import get_document from mongoengine.common import _import_class from mongoengine.connection import get_db from mongoengine.context_managers import ( - set_write_concern, set_read_write_concern, + set_write_concern, switch_db, ) from mongoengine.errors import ( From ad0669a32605bb7d9984b1830349d82c208b3c94 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Mon, 16 Mar 2020 22:39:31 +0100 Subject: [PATCH 180/216] update changelog --- docs/changelog.rst | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 7f68bbe5..1bfb190d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -8,13 +8,9 @@ Development - (Fill this out as you fix issues and develop your features). - Add Mongo 4.0 to Travis - Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 -- Add support for the `elemMatch` projection operator in .fields (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 - -Changes in 0.19.2 -================= +- Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 - DictField validate failed without default connection (bug introduced in 0.19.0) #2239 - Changes in 0.19.1 ================= - Requires Pillow < 7.0.0 as it dropped Python2 support From aadc6262edc5627bafbef41a7c814fedbe66fdb2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 17 Mar 2020 21:10:52 +0100 Subject: [PATCH 181/216] remove qs.slave_okay() that is deprecated since pymongo3 --- docs/changelog.rst | 1 + mongoengine/queryset/base.py | 16 ---------------- 2 files changed, 1 insertion(+), 16 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 1bfb190d..b2090645 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -10,6 +10,7 @@ Development - Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 - Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 - DictField validate failed without default connection (bug introduced in 0.19.0) #2239 +- Remove method queryset.slave_okay() that was deprecated a while ago and disappeared since pymongo3 Changes in 0.19.1 ================= diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index fbf0a1ba..95606f2a 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -60,7 +60,6 @@ class BaseQuerySet(object): self._ordering = None self._snapshot = False self._timeout = True - self._slave_okay = False self._read_preference = None self._iter = False self._scalar = [] @@ -775,7 +774,6 @@ class BaseQuerySet(object): "_ordering", "_snapshot", "_timeout", - "_slave_okay", "_read_preference", "_iter", "_scalar", @@ -1172,20 +1170,6 @@ class BaseQuerySet(object): queryset._timeout = enabled return queryset - # DEPRECATED. Has no more impact on PyMongo 3+ - def slave_okay(self, enabled): - """Enable or disable the slave_okay when querying. - - :param enabled: whether or not the slave_okay is enabled - - .. deprecated:: Ignored with PyMongo 3+ - """ - msg = "slave_okay is deprecated as it has no impact when using PyMongo 3+." - warnings.warn(msg, DeprecationWarning) - queryset = self.clone() - queryset._slave_okay = enabled - return queryset - def read_preference(self, read_preference): """Change the read_preference when querying. From 8eb51790b50858d351068c3f37316d16143d3b90 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 17 Mar 2020 21:26:41 +0100 Subject: [PATCH 182/216] Remove Field(name='...') which was deprecated when db_field was introduced a while ago --- docs/changelog.rst | 1 + mongoengine/base/fields.py | 7 +------ 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 1bfb190d..32cfe647 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -10,6 +10,7 @@ Development - Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 - Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 - DictField validate failed without default connection (bug introduced in 0.19.0) #2239 +- Remove name parameter in Field constructor e.g `StringField(name="...")`, it was deprecated a while ago in favor of db_field Changes in 0.19.1 ================= diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index cd1039cb..379098e5 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -36,7 +36,6 @@ class BaseField(object): def __init__( self, db_field=None, - name=None, required=False, default=None, unique=False, @@ -51,7 +50,6 @@ class BaseField(object): """ :param db_field: The database field to store this field in (defaults to the name of the field) - :param name: Deprecated - use db_field :param required: If the field is required. Whether it has to have a value or not. Defaults to False. :param default: (optional) The default value for this field if no value @@ -75,11 +73,8 @@ class BaseField(object): existing attributes. Common metadata includes `verbose_name` and `help_text`. """ - self.db_field = (db_field or name) if not primary_key else "_id" + self.db_field = db_field if not primary_key else "_id" - if name: - msg = 'Field\'s "name" attribute deprecated in favour of "db_field"' - warnings.warn(msg, DeprecationWarning) self.required = required or primary_key self.default = default self.unique = bool(unique or unique_with) From ee2d50b2d1b3549887512bfaefdb1abff3b831fc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 17 Mar 2020 21:38:50 +0100 Subject: [PATCH 183/216] remove drop_dups index option, deprecated with MongoDB3 --- docs/changelog.rst | 6 ++++-- docs/guide/defining-documents.rst | 6 ------ mongoengine/base/metaclasses.py | 1 - mongoengine/document.py | 15 +-------------- tests/document/test_indexes.py | 16 ---------------- tests/document/test_inheritance.py | 1 - 6 files changed, 5 insertions(+), 40 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index a0784050..1037ca13 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -10,8 +10,10 @@ Development - Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 - Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 - DictField validate failed without default connection (bug introduced in 0.19.0) #2239 -- Remove name parameter in Field constructor e.g `StringField(name="...")`, it was deprecated a while ago in favor of db_field -- Remove method queryset.slave_okay() that was deprecated a while ago and disappeared since pymongo3 +- Remove methods deprecated years ago: + - name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field + - Queryset.slave_okay() was deprecated since pymongo3 + - dropDups was dropped with MongoDB3 Changes in 0.19.1 ================= diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index bd2b43e2..6dc35c30 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -555,7 +555,6 @@ There are a few top level defaults for all indexes that can be set:: 'index_background': True, 'index_cls': False, 'auto_create_index': True, - 'index_drop_dups': True, } @@ -574,11 +573,6 @@ There are a few top level defaults for all indexes that can be set:: in systems where indexes are managed separately. Disabling this will improve performance. -:attr:`index_drop_dups` (Optional) - Set the default value for if an index should drop duplicates - Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning - and has no effect - Compound Indexes and Indexing sub documents ------------------------------------------- diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index e4d26811..3bba796b 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -284,7 +284,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): "indexes": [], # indexes to be ensured at runtime "id_field": None, "index_background": False, - "index_drop_dups": False, "index_opts": None, "delete_rules": None, # allow_inheritance can be True, False, and None. True means diff --git a/mongoengine/document.py b/mongoengine/document.py index 3cc0046e..c8710fb5 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -851,17 +851,13 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): index_spec = cls._build_index_spec(keys) index_spec = index_spec.copy() fields = index_spec.pop("fields") - drop_dups = kwargs.get("drop_dups", False) - if drop_dups: - msg = "drop_dups is deprecated and is removed when using PyMongo 3+." - warnings.warn(msg, DeprecationWarning) index_spec["background"] = background index_spec.update(kwargs) return cls._get_collection().create_index(fields, **index_spec) @classmethod - def ensure_index(cls, key_or_list, drop_dups=False, background=False, **kwargs): + def ensure_index(cls, key_or_list, background=False, **kwargs): """Ensure that the given indexes are in place. Deprecated in favour of create_index. @@ -869,12 +865,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): construct a multi-field index); keys may be prefixed with a **+** or a **-** to determine the index ordering :param background: Allows index creation in the background - :param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value - will be removed if PyMongo3+ is used """ - if drop_dups: - msg = "drop_dups is deprecated and is removed when using PyMongo 3+." - warnings.warn(msg, DeprecationWarning) return cls.create_index(key_or_list, background=background, **kwargs) @classmethod @@ -887,12 +878,8 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): `auto_create_index` to False in the documents meta data """ background = cls._meta.get("index_background", False) - drop_dups = cls._meta.get("index_drop_dups", False) index_opts = cls._meta.get("index_opts") or {} index_cls = cls._meta.get("index_cls", True) - if drop_dups: - msg = "drop_dups is deprecated and is removed when using PyMongo 3+." - warnings.warn(msg, DeprecationWarning) collection = cls._get_collection() # 746: when connection is via mongos, the read preference is not necessarily an indication that diff --git a/tests/document/test_indexes.py b/tests/document/test_indexes.py index be857b59..b08306a0 100644 --- a/tests/document/test_indexes.py +++ b/tests/document/test_indexes.py @@ -806,18 +806,6 @@ class TestIndexes(unittest.TestCase): info = Log.objects._collection.index_information() assert 3600 == info["created_1"]["expireAfterSeconds"] - def test_index_drop_dups_silently_ignored(self): - class Customer(Document): - cust_id = IntField(unique=True, required=True) - meta = { - "indexes": ["cust_id"], - "index_drop_dups": True, - "allow_inheritance": False, - } - - Customer.drop_collection() - Customer.objects.first() - def test_unique_and_indexes(self): """Ensure that 'unique' constraints aren't overridden by meta.indexes. @@ -1058,10 +1046,6 @@ class TestIndexes(unittest.TestCase): del index_info[key][ "ns" ] # drop the index namespace - we don't care about that here, MongoDB 3+ - if "dropDups" in index_info[key]: - del index_info[key][ - "dropDups" - ] # drop the index dropDups - it is deprecated in MongoDB 3+ assert index_info == { "txt_1": {"key": [("txt", 1)], "background": False}, diff --git a/tests/document/test_inheritance.py b/tests/document/test_inheritance.py index 5072f841..d7bd0632 100644 --- a/tests/document/test_inheritance.py +++ b/tests/document/test_inheritance.py @@ -523,7 +523,6 @@ class TestInheritance(MongoDBTestCase): defaults = { "index_background": True, - "index_drop_dups": True, "index_opts": {"hello": "world"}, "allow_inheritance": True, "queryset_class": "QuerySet", From 3d80637fa414354b2d05b0ca6ac3afb71222d8f1 Mon Sep 17 00:00:00 2001 From: Agustin Barto Date: Tue, 17 Mar 2020 19:08:43 -0300 Subject: [PATCH 184/216] Refactor set_read_write_concern so read_conern is consistent with the write_concerns argument. --- mongoengine/context_managers.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 6891e21d..a87c9bb3 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -261,10 +261,14 @@ def set_write_concern(collection, write_concerns): @contextmanager -def set_read_write_concern(collection, write_concerns, read_concern): +def set_read_write_concern(collection, write_concerns, read_concerns): combined_write_concerns = dict(collection.write_concern.document.items()) combined_write_concerns.update(write_concerns) + combined_read_concerns = dict(collection.read_concern.document.items()) + combined_read_concerns.update(read_concerns) + yield collection.with_options( - write_concern=WriteConcern(**combined_write_concerns), read_concern=read_concern + write_concern=WriteConcern(**combined_write_concerns), + read_concern=ReadConcern(**combined_read_concerns) ) From 4c62a060f0c460b1d68ebba861eb7a3fd9f9e14c Mon Sep 17 00:00:00 2001 From: Agustin Barto Date: Tue, 17 Mar 2020 19:37:21 -0300 Subject: [PATCH 185/216] Add tests for set_write_conern and set_read_write_concern --- tests/test_context_managers.py | 48 ++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index fa3f5960..f445cf57 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -10,11 +10,59 @@ from mongoengine.context_managers import ( query_counter, switch_collection, switch_db, + set_write_concern, + set_read_write_concern, ) from mongoengine.pymongo_support import count_documents class TestContextManagers: + def test_set_write_concern(self): + connect("mongoenginetest") + + class User(Document): + name = StringField() + + collection = User._get_collection() + original_write_concern = collection.write_concern + + with set_write_concern( + collection, {"w": "majority", "j": True, "wtimeout": 1234} + ) as updated_collection: + assert updated_collection.write_concern.document == { + "w": "majority", + "j": True, + "wtimeout": 1234, + } + + assert original_write_concern.document == collection.write_concern.document + + def test_set_read_write_concern(self): + connect("mongoenginetest") + + class User(Document): + name = StringField() + + collection = User._get_collection() + + original_read_concern = collection.read_concern + original_write_concern = collection.write_concern + + with set_read_write_concern( + collection, + {"w": "majority", "j": True, "wtimeout": 1234}, + {"level": "local"}, + ) as update_collection: + assert update_collection.read_concern.document == {"level": "local"} + assert update_collection.write_concern.document == { + "w": "majority", + "j": True, + "wtimeout": 1234, + } + + assert original_read_concern.document == collection.read_concern.document + assert original_write_concern.document == collection.write_concern.document + def test_switch_db_context_manager(self): connect("mongoenginetest") register_connection("testdb-1", "mongoenginetest2") From 476b07af6ebe26e0418d1ab8da9e8179794ddc4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 17 Mar 2020 23:59:54 +0100 Subject: [PATCH 186/216] reformat changelog --- docs/changelog.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 9a844761..9b864b02 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -7,8 +7,6 @@ Development =========== - (Fill this out as you fix issues and develop your features). - Add Mongo 4.0 to Travis -- BREAKING CHANGE: Removed ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes`` that were deprecated in 2013. - ``Document.ensure_indexes`` still exists and is the right method to use - Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 - Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 - DictField validate failed without default connection (bug introduced in 0.19.0) #2239 @@ -16,6 +14,7 @@ Development - name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field - Queryset.slave_okay() was deprecated since pymongo3 - dropDups was dropped with MongoDB3 + - ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes`` Changes in 0.19.1 ================= From af35b25d155e050cba3e4925bf76529f7ae52137 Mon Sep 17 00:00:00 2001 From: Agustin Barto Date: Tue, 17 Mar 2020 21:28:08 -0300 Subject: [PATCH 187/216] Refactor read_preference to accept a dictionary instead of a ReadPreference instance. --- mongoengine/queryset/base.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index d8a4b96f..f8671f8f 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -5,6 +5,8 @@ import itertools import re import warnings +from collections.abc import Mapping + from bson import SON, json_util from bson.code import Code import pymongo @@ -1221,11 +1223,11 @@ class BaseQuerySet(object): :param read_concern: override ReplicaSetConnection-level preference. """ - if read_concern is not None and not isinstance(read_concern, ReadConcern): - raise TypeError("%r is not a read concern." % (read_concern,)) + if read_concern is not None and not isinstance(read_concern, Mapping): + raise TypeError("%r is not a valid read concern." % (read_concern,)) queryset = self.clone() - queryset._read_concern = read_concern + queryset._read_concern = ReadConcern(**read_concern) queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_concern return queryset From 8913a74a86e01c3a96aaebf3372b1898c4efe060 Mon Sep 17 00:00:00 2001 From: Agustin Barto Date: Tue, 17 Mar 2020 21:31:05 -0300 Subject: [PATCH 188/216] Allow setting the read concern to None --- mongoengine/queryset/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index f8671f8f..76427c89 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1227,7 +1227,7 @@ class BaseQuerySet(object): raise TypeError("%r is not a valid read concern." % (read_concern,)) queryset = self.clone() - queryset._read_concern = ReadConcern(**read_concern) + queryset._read_concern = ReadConcern(**read_concern) if read_concern is not None else None queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_concern return queryset From bc77322c2f166e28390e1492a5a67440f90af021 Mon Sep 17 00:00:00 2001 From: Agustin Barto Date: Tue, 17 Mar 2020 21:49:17 -0300 Subject: [PATCH 189/216] Update unit tests --- tests/queryset/test_queryset.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 8098b137..a8954526 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -4739,33 +4739,33 @@ class TestQueryset(unittest.TestCase): bars = list(Bar.objects.read_concern(None)) assert bars == [bar] - bars = Bar.objects.read_concern(ReadConcern(level="local")) - assert bars._read_concern == ReadConcern(level="local") - assert bars._cursor.collection.read_concern == ReadConcern(level="local") + bars = Bar.objects.read_concern({"level": "local"}) + assert bars._read_concern.document == {"level": "local"} + assert bars._cursor.collection.read_concern.document == {"level": "local"} - # Make sure that `.read_concern(...)` does accept string values. + # Make sure that `.read_concern(...)` does not accept string values. with pytest.raises(TypeError): Bar.objects.read_concern("local") def assert_read_concern(qs, expected_read_concern): - assert qs._read_concern == expected_read_concern - assert qs._cursor.collection.read_concern == expected_read_concern + assert qs._read_concern.document == expected_read_concern + assert qs._cursor.collection.read_concern.document == expected_read_concern # Make sure read concern is respected after a `.skip(...)`. - bars = Bar.objects.skip(1).read_concern(ReadConcern("majority")) - assert_read_concern(bars, ReadConcern("majority")) + bars = Bar.objects.skip(1).read_concern({"level": "local"}) + assert_read_concern(bars, {"level": "local"}) # Make sure read concern is respected after a `.limit(...)`. - bars = Bar.objects.limit(1).read_concern(ReadConcern("majority")) - assert_read_concern(bars, ReadConcern("majority")) + bars = Bar.objects.limit(1).read_concern({"level": "local"}) + assert_read_concern(bars, {"level": "local"}) # Make sure read concern is respected after an `.order_by(...)`. - bars = Bar.objects.order_by("txt").read_concern(ReadConcern("majority")) - assert_read_concern(bars, ReadConcern("majority")) + bars = Bar.objects.order_by("txt").read_concern({"level": "local"}) + assert_read_concern(bars, {"level": "local"}) # Make sure read concern is respected after a `.hint(...)`. - bars = Bar.objects.hint([("txt", 1)]).read_concern(ReadConcern("majority")) - assert_read_concern(bars, ReadConcern("majority")) + bars = Bar.objects.hint([("txt", 1)]).read_concern({"level": "majority"}) + assert_read_concern(bars, {"level": "majority"}) def test_json_simple(self): class Embedded(EmbeddedDocument): From 7cc964c7d86d9e6dccf4f819ad291a4150b1d1ee Mon Sep 17 00:00:00 2001 From: Agustin Barto Date: Tue, 17 Mar 2020 21:58:36 -0300 Subject: [PATCH 190/216] Add missing import --- mongoengine/context_managers.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index a87c9bb3..9319abae 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -1,6 +1,7 @@ from contextlib import contextmanager from pymongo.write_concern import WriteConcern +from pymongo.read_concern import ReadConcern from six import iteritems from mongoengine.common import _import_class @@ -263,10 +264,14 @@ def set_write_concern(collection, write_concerns): @contextmanager def set_read_write_concern(collection, write_concerns, read_concerns): combined_write_concerns = dict(collection.write_concern.document.items()) - combined_write_concerns.update(write_concerns) + + if write_concerns is not None: + combined_write_concerns.update(write_concerns) combined_read_concerns = dict(collection.read_concern.document.items()) - combined_read_concerns.update(read_concerns) + + if read_concerns is not None: + combined_read_concerns.update(read_concerns) yield collection.with_options( write_concern=WriteConcern(**combined_write_concerns), From 2b0157aecddb4a77d25a7fbb724578be3c1e097c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 22 Mar 2020 14:05:04 +0100 Subject: [PATCH 191/216] Improve Queryset.get to avoid confusing message in case multiple match are found --- docs/changelog.rst | 1 + mongoengine/queryset/base.py | 8 +++++--- tests/queryset/test_queryset.py | 33 ++++++++++++++++++++++++--------- 3 files changed, 30 insertions(+), 12 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 9b864b02..ec0209f2 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -7,6 +7,7 @@ Development =========== - (Fill this out as you fix issues and develop your features). - Add Mongo 4.0 to Travis +- Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630 - Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 - Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 - DictField validate failed without default connection (bug introduced in 0.19.0) #2239 diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index eed30413..15c58481 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -259,16 +259,18 @@ class BaseQuerySet(object): except StopIteration: msg = "%s matching query does not exist." % queryset._document._class_name raise queryset._document.DoesNotExist(msg) + try: + # Check if there is another match six.next(queryset) except StopIteration: return result # If we were able to retrieve the 2nd doc, rewind the cursor and # raise the MultipleObjectsReturned exception. - queryset.rewind() - message = u"%d items returned, instead of 1" % queryset.count() - raise queryset._document.MultipleObjectsReturned(message) + raise queryset._document.MultipleObjectsReturned( + u"2 or more items returned, instead of 1" + ) def create(self, **kwargs): """Create new object. Returns the saved object instance. diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index f6d1a916..f15b9748 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -274,32 +274,47 @@ class TestQueryset(unittest.TestCase): with pytest.raises(InvalidQueryError): self.Person.objects(name="User A").with_id(person1.id) - def test_find_only_one(self): - """Ensure that a query using ``get`` returns at most one result. - """ + def test_get_no_document_exists_raises_doesnotexist(self): + assert self.Person.objects.count() == 0 # Try retrieving when no objects exists with pytest.raises(DoesNotExist): self.Person.objects.get() with pytest.raises(self.Person.DoesNotExist): self.Person.objects.get() + def test_get_multiple_match_raises_multipleobjectsreturned(self): + """Ensure that a query using ``get`` returns at most one result. + """ + assert self.Person.objects().count() == 0 + person1 = self.Person(name="User A", age=20) person1.save() - person2 = self.Person(name="User B", age=30) + + p = self.Person.objects.get() + assert p == person1 + + person2 = self.Person(name="User B", age=20) person2.save() - # Retrieve the first person from the database + person3 = self.Person(name="User C", age=30) + person3.save() + + # .get called without argument with pytest.raises(MultipleObjectsReturned): self.Person.objects.get() with pytest.raises(self.Person.MultipleObjectsReturned): self.Person.objects.get() + # check filtering + with pytest.raises(MultipleObjectsReturned): + self.Person.objects.get(age__lt=30) + with pytest.raises(MultipleObjectsReturned) as exc_info: + self.Person.objects(age__lt=30).get() + assert "2 or more items returned, instead of 1" == str(exc_info.value) + # Use a query to filter the people found to just person2 person = self.Person.objects.get(age=30) - assert person.name == "User B" - - person = self.Person.objects.get(age__lt=30) - assert person.name == "User A" + assert person == person3 def test_find_array_position(self): """Ensure that query by array position works. From ce74978b1ece68710b33295524b7563271cd44d4 Mon Sep 17 00:00:00 2001 From: Kes Date: Thu, 26 Mar 2020 17:23:16 +0100 Subject: [PATCH 192/216] Correct and improve docstring on EmbeddedDocumentList.create() --- mongoengine/base/datastructures.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index dcc1f092..0572e22e 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -303,11 +303,11 @@ class EmbeddedDocumentList(BaseList): def create(self, **values): """ - Creates a new embedded document and saves it to the database. + Creates a new instance of the EmbeddedDocument and appends it to this EmbeddedDocumentList. .. note:: - The embedded document changes are not automatically saved - to the database after calling this method. + the instance of the EmbeddedDocument is not automatically saved to the database. + You still need to call save() o this EmbeddedDocumentList. :param values: A dictionary of values for the embedded document. :return: The new embedded document instance. From d73f0bb1af7fa63424137174da76f89dca762010 Mon Sep 17 00:00:00 2001 From: Kes Date: Thu, 26 Mar 2020 17:25:25 +0100 Subject: [PATCH 193/216] fix typo --- mongoengine/base/datastructures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index 0572e22e..fd93c1db 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -307,7 +307,7 @@ class EmbeddedDocumentList(BaseList): .. note:: the instance of the EmbeddedDocument is not automatically saved to the database. - You still need to call save() o this EmbeddedDocumentList. + You still need to call save() on this EmbeddedDocumentList. :param values: A dictionary of values for the embedded document. :return: The new embedded document instance. From 707923e3f5d7682df58652a598edf2127a4ff95a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 8 Apr 2020 21:43:50 +0200 Subject: [PATCH 194/216] fix subdependencies that dropped Py2 support --- setup.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setup.py b/setup.py index 5cba5d9e..11185fd2 100644 --- a/setup.py +++ b/setup.py @@ -121,6 +121,8 @@ extra_opts = { "blinker", "Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support "zipp<2.0.0", # (dependency of pytest) dropped python2 support + "pyparsing<3", # sub-dependency that dropped py2 support + "configparser<5", # sub-dependency that dropped py2 support ], } if PY3: From 43724e40b2871d8ca97b437a97ff6d2db01d8001 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 25 Apr 2020 14:16:56 +0200 Subject: [PATCH 195/216] improve doc related to dropping Py2 support --- CONTRIBUTING.rst | 2 +- setup.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index d939e2ee..4afcd69e 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -20,7 +20,7 @@ post to the `user group ` Supported Interpreters ---------------------- -MongoEngine supports CPython 3.7 and newer as well as Pypy3. +MongoEngine supports CPython 3.5 and newer as well as Pypy3. Language features not supported by all interpreters can not be used. Python3 codebase diff --git a/setup.py b/setup.py index b6c8ea64..4d63b5b2 100644 --- a/setup.py +++ b/setup.py @@ -97,11 +97,11 @@ CLASSIFIERS = [ "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database", From f4d7e72426f7fe26c913bc5872c6a12711d85123 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 25 Apr 2020 20:43:55 +0200 Subject: [PATCH 196/216] improve .gitignore --- .gitignore | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 048a2d19..16633bae 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,15 @@ -.* !.gitignore *~ *.py[co] .*.sw[po] +.cache/ +.coverage +.coveragerc +.env +.idea/ +.pytest_cache/ +.tox/ +.eggs/ *.egg docs/.build docs/_build @@ -13,8 +20,6 @@ env/ .settings .project .pydevproject -tests/test_bugfix.py htmlcov/ venv venv3 -scratchpad From ef7da36ac681e572461971c87549e3d5f7e108b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 25 Apr 2020 21:36:07 +0200 Subject: [PATCH 197/216] Update pre-commit and fix existing flake8 warnings once for all --- .pre-commit-config.yaml | 14 +++++++------- .travis.yml | 8 +++----- tests/document/test_instance.py | 18 +++++++++--------- tests/fields/test_dict_field.py | 2 +- tests/fields/test_lazy_reference_field.py | 6 +++--- tests/queryset/test_queryset.py | 20 ++++++++++---------- tests/test_connection.py | 6 +++--- tests/test_context_managers.py | 12 ++++++------ tests/test_signals.py | 2 +- 9 files changed, 43 insertions(+), 45 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cac25e41..8b794103 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,13 +1,13 @@ +fail_fast: false repos: - repo: https://github.com/ambv/black - rev: 19.3b0 + rev: 19.10b0 hooks: - id: black - language_version: python3 - - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.2.3 +# language_version: python3 + - repo: https://gitlab.com/pycqa/flake8 + rev: 3.8.0a2 hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - id: flake8 + additional_dependencies: + - flake8-import-order diff --git a/.travis.yml b/.travis.yml index 5d04571a..a99f1117 100644 --- a/.travis.yml +++ b/.travis.yml @@ -59,18 +59,16 @@ install: # Install Python dependencies. - pip install --upgrade pip - pip install coveralls - - pip install flake8 flake8-import-order + - pip install pre-commit - pip install tox # tox dryrun to setup the tox venv (we run a mock test). - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder" - # Install black for Python v3.7 only. - - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then pip install black; fi before_script: - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork - - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then flake8 .; else echo "flake8 only runs on py37"; fi - - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then black --check .; else echo "black only runs on py37"; fi + # Run pre-commit hooks (black, flake8, etc) on entire codebase + - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then pre-commit run -a; else echo "pre-commit checks only runs on py37"; fi - mongo --eval 'db.version();' # Make sure mongo is awake script: diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 920bf392..993cc161 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -1414,7 +1414,7 @@ class TestDocumentInstance(MongoDBTestCase): assert raw_doc["first_name"] == "John" def test_inserts_if_you_set_the_pk(self): - p1 = self.Person(name="p1", id=bson.ObjectId()).save() + _ = self.Person(name="p1", id=bson.ObjectId()).save() p2 = self.Person(name="p2") p2.id = bson.ObjectId() p2.save() @@ -2195,7 +2195,7 @@ class TestDocumentInstance(MongoDBTestCase): user = User(name="Mike").save() reviewer = User(name="John").save() - book = Book(author=user, reviewer=reviewer).save() + _ = Book(author=user, reviewer=reviewer).save() reviewer.delete() assert Book.objects.count() == 1 @@ -2221,7 +2221,7 @@ class TestDocumentInstance(MongoDBTestCase): user_1 = User(id=1).save() user_2 = User(id=2).save() - book_1 = Book(id=1, author=user_2).save() + _ = Book(id=1, author=user_2).save() book_2 = Book(id=2, author=user_1).save() user_2.delete() @@ -2230,7 +2230,7 @@ class TestDocumentInstance(MongoDBTestCase): assert Book.objects.get() == book_2 user_3 = User(id=3).save() - book_3 = Book(id=3, author=user_3).save() + _ = Book(id=3, author=user_3).save() user_3.delete() # Deleting user_3 should also delete book_3 @@ -3204,7 +3204,7 @@ class TestDocumentInstance(MongoDBTestCase): def test_positional_creation(self): """Document cannot be instantiated using positional arguments.""" with pytest.raises(TypeError) as exc_info: - person = self.Person("Test User", 42) + self.Person("Test User", 42) expected_msg = ( "Instantiating a document with positional arguments is not " @@ -3606,13 +3606,13 @@ class TestDocumentInstance(MongoDBTestCase): v = StringField() class A(Document): - l = ListField(EmbeddedDocumentField(B)) + array = ListField(EmbeddedDocumentField(B)) A.objects.delete() - A(l=[B(v="1"), B(v="2"), B(v="3")]).save() + A(array=[B(v="1"), B(v="2"), B(v="3")]).save() a = A.objects.get() - assert a.l._instance == a - for idx, b in enumerate(a.l): + assert a.array._instance == a + for idx, b in enumerate(a.array): assert b._instance == a assert idx == 2 diff --git a/tests/fields/test_dict_field.py b/tests/fields/test_dict_field.py index 6850cd58..f423bf8b 100644 --- a/tests/fields/test_dict_field.py +++ b/tests/fields/test_dict_field.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -import pytest from bson import InvalidDocument +import pytest from mongoengine import * from mongoengine.base import BaseDict diff --git a/tests/fields/test_lazy_reference_field.py b/tests/fields/test_lazy_reference_field.py index b5b8690e..50e60262 100644 --- a/tests/fields/test_lazy_reference_field.py +++ b/tests/fields/test_lazy_reference_field.py @@ -152,7 +152,7 @@ class TestLazyReferenceField(MongoDBTestCase): LazyReference(BadDoc, animal.pk), ): with pytest.raises(ValidationError): - p = Ocurrence(person="test", animal=bad).save() + Ocurrence(person="test", animal=bad).save() def test_lazy_reference_query_conversion(self): """Ensure that LazyReferenceFields can be queried using objects and values @@ -386,7 +386,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): mineral = Mineral(name="Granite").save() occ_animal = Ocurrence(living_thing=animal, thing=animal).save() - occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save() + _ = Ocurrence(living_thing=vegetal, thing=vegetal).save() with pytest.raises(ValidationError): Ocurrence(living_thing=mineral).save() @@ -458,7 +458,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): baddoc = BadDoc().save() for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)): with pytest.raises(ValidationError): - p = Ocurrence(person="test", animal=bad).save() + Ocurrence(person="test", animal=bad).save() def test_generic_lazy_reference_query_conversion(self): class Member(Document): diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 984e2bd1..cb8e7bba 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -116,7 +116,7 @@ class TestQueryset(unittest.TestCase): def test_limit(self): """Ensure that QuerySet.limit works as expected.""" user_a = self.Person.objects.create(name="User A", age=20) - user_b = self.Person.objects.create(name="User B", age=30) + _ = self.Person.objects.create(name="User B", age=30) # Test limit on a new queryset people = list(self.Person.objects.limit(1)) @@ -148,6 +148,11 @@ class TestQueryset(unittest.TestCase): user_b = self.Person.objects.create(name="User B", age=30) # Test skip on a new queryset + people = list(self.Person.objects.skip(0)) + assert len(people) == 2 + assert people[0] == user_a + assert people[1] == user_b + people = list(self.Person.objects.skip(1)) assert len(people) == 1 assert people[0] == user_b @@ -2586,13 +2591,8 @@ class TestQueryset(unittest.TestCase): age = IntField() with db_ops_tracker() as q: - adult1 = ( - User.objects.filter(age__gte=18).comment("looking for an adult").first() - ) - - adult2 = ( - User.objects.comment("looking for an adult").filter(age__gte=18).first() - ) + User.objects.filter(age__gte=18).comment("looking for an adult").first() + User.objects.comment("looking for an adult").filter(age__gte=18).first() ops = q.get_ops() assert len(ops) == 2 @@ -4518,7 +4518,7 @@ class TestQueryset(unittest.TestCase): foos_without_y = list(Foo.objects.order_by("y").fields(y=0)) - assert all(o.y is None for o in foos_with_x) + assert all(o.y is None for o in foos_without_y) foos_with_sliced_items = list(Foo.objects.order_by("y").fields(slice__items=1)) @@ -5595,7 +5595,7 @@ class TestQueryset(unittest.TestCase): self.Person.objects.create(name="Baz") assert self.Person.objects.count(with_limit_and_skip=True) == 3 - newPerson = self.Person.objects.create(name="Foo_1") + self.Person.objects.create(name="Foo_1") assert self.Person.objects.count(with_limit_and_skip=True) == 4 def test_no_cursor_timeout(self): diff --git a/tests/test_connection.py b/tests/test_connection.py index e40a6994..56bc22cd 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -348,7 +348,7 @@ class ConnectionTest(unittest.TestCase): def test_disconnect_cleans_cached_collection_attribute_in_document(self): """Ensure that the disconnect() method works properly""" - conn1 = connect("mongoenginetest") + connect("mongoenginetest") class History(Document): pass @@ -518,7 +518,7 @@ class ConnectionTest(unittest.TestCase): """Ensure connect() uses the username & password params if the URI doesn't explicitly specify them. """ - c = connect( + connect( host="mongodb://localhost/mongoenginetest", username="user", password="pass" ) @@ -632,7 +632,7 @@ class ConnectionTest(unittest.TestCase): """Ensure connect() works when specifying a replicaSet via the MongoDB URI. """ - c = connect(host="mongodb://localhost/test?replicaSet=local-rs") + connect(host="mongodb://localhost/test?replicaSet=local-rs") db = get_db() assert isinstance(db, pymongo.database.Database) assert db.name == "test" diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index fa3f5960..4410fa90 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -216,7 +216,7 @@ class TestContextManagers: def test_query_counter_does_not_swallow_exception(self): with pytest.raises(TypeError): - with query_counter() as q: + with query_counter(): raise TypeError() def test_query_counter_temporarily_modifies_profiling_level(self): @@ -226,12 +226,12 @@ class TestContextManagers: initial_profiling_level = db.profiling_level() try: - NEW_LEVEL = 1 - db.set_profiling_level(NEW_LEVEL) - assert db.profiling_level() == NEW_LEVEL - with query_counter() as q: + new_level = 1 + db.set_profiling_level(new_level) + assert db.profiling_level() == new_level + with query_counter(): assert db.profiling_level() == 2 - assert db.profiling_level() == NEW_LEVEL + assert db.profiling_level() == new_level except Exception: db.set_profiling_level( initial_profiling_level diff --git a/tests/test_signals.py b/tests/test_signals.py index 451e01ff..64976e25 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -267,7 +267,7 @@ class TestSignal(unittest.TestCase): a = self.Author(name="Bill Shakespeare") a.save() self.get_signal_output(lambda: None) # eliminate signal output - a1 = self.Author.objects(name="Bill Shakespeare")[0] + _ = self.Author.objects(name="Bill Shakespeare")[0] assert self.get_signal_output(create_author) == [ "pre_init signal, Author", From 394da67cf12f2339452e3730cce36bec6b60f7e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 25 Apr 2020 21:47:37 +0200 Subject: [PATCH 198/216] fix travis env var --- .pre-commit-config.yaml | 1 - .travis.yml | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8b794103..e11640b8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,6 @@ repos: rev: 19.10b0 hooks: - id: black -# language_version: python3 - repo: https://gitlab.com/pycqa/flake8 rev: 3.8.0a2 hooks: diff --git a/.travis.yml b/.travis.yml index a99f1117..2316124a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -33,7 +33,7 @@ env: - PYMONGO_3_9=3.9 - PYMONGO_3_10=3.10 - - MAIN_PYTHON_VERSION = "3.7" + - MAIN_PYTHON_VERSION=3.7 matrix: - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_10} From fd0095b73f9130bd537a07dc5b13c329ef16a871 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 26 Apr 2020 22:28:14 +0200 Subject: [PATCH 199/216] improve recent docstring of EmbeddedDocumentList.create --- mongoengine/base/datastructures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index 85f05c9b..d08d4930 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -293,7 +293,7 @@ class EmbeddedDocumentList(BaseList): .. note:: the instance of the EmbeddedDocument is not automatically saved to the database. - You still need to call save() on this EmbeddedDocumentList. + You still need to call .save() on the parent Document. :param values: A dictionary of values for the embedded document. :return: The new embedded document instance. From 78c9e9745d82dff40c0c4e0c5931c4ae557f6374 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 26 Apr 2020 22:51:56 +0200 Subject: [PATCH 200/216] fix linting + update changelog & contributors --- AUTHORS | 1 + docs/changelog.rst | 1 + mongoengine/context_managers.py | 6 +++--- mongoengine/queryset/base.py | 4 +++- tests/test_context_managers.py | 4 ++-- 5 files changed, 10 insertions(+), 6 deletions(-) diff --git a/AUTHORS b/AUTHORS index 7d3000ce..02e43955 100644 --- a/AUTHORS +++ b/AUTHORS @@ -256,3 +256,4 @@ that much better: * Eric Timmons (https://github.com/daewok) * Matthew Simpson (https://github.com/mcsimps2) * Leonardo Domingues (https://github.com/leodmgs) + * Agustin Barto (https://github.com/abarto) diff --git a/docs/changelog.rst b/docs/changelog.rst index 76545559..625526a3 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -21,6 +21,7 @@ Development - ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes`` - Added pre-commit #2212 - Renamed requirements-lint.txt to requirements-dev.txt #2212 +- Support for setting ReadConcern #2255 Changes in 0.19.1 ================= diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 77e6b55c..5f2b5229 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -1,7 +1,7 @@ from contextlib import contextmanager -from pymongo.write_concern import WriteConcern from pymongo.read_concern import ReadConcern +from pymongo.write_concern import WriteConcern from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db @@ -268,11 +268,11 @@ def set_read_write_concern(collection, write_concerns, read_concerns): combined_write_concerns.update(write_concerns) combined_read_concerns = dict(collection.read_concern.document.items()) - + if read_concerns is not None: combined_read_concerns.update(read_concerns) yield collection.with_options( write_concern=WriteConcern(**combined_write_concerns), - read_concern=ReadConcern(**combined_read_concerns) + read_concern=ReadConcern(**combined_read_concerns), ) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 317ec698..39c44b29 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1206,7 +1206,9 @@ class BaseQuerySet: raise TypeError("%r is not a valid read concern." % (read_concern,)) queryset = self.clone() - queryset._read_concern = ReadConcern(**read_concern) if read_concern is not None else None + queryset._read_concern = ( + ReadConcern(**read_concern) if read_concern is not None else None + ) queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_concern return queryset diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index 8f3dd555..a4864c40 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -8,10 +8,10 @@ from mongoengine.context_managers import ( no_dereference, no_sub_classes, query_counter, + set_read_write_concern, + set_write_concern, switch_collection, switch_db, - set_write_concern, - set_read_write_concern, ) from mongoengine.pymongo_support import count_documents From 3fbe9c3cdda00fc7307dfc971df25e4573344d6a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Fri, 1 May 2020 13:13:30 +0200 Subject: [PATCH 201/216] Bump version to 0.20.0 and update CHANGELOG accordingly --- docs/changelog.rst | 7 +++++-- mongoengine/__init__.py | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 76545559..73f27efe 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,6 +6,9 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). + +Changes in 0.20.0 +================= - ATTENTION: Drop support for Python2 - Add Mongo 4.0 to Travis - Fix error when setting a string as a ComplexDateTimeField #2253 @@ -14,12 +17,12 @@ Development - Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 - Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 - DictField validate failed without default connection (bug introduced in 0.19.0) #2239 -- Remove methods deprecated years ago: +- Remove methods that were deprecated years ago: - name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field - Queryset.slave_okay() was deprecated since pymongo3 - dropDups was dropped with MongoDB3 - ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes`` -- Added pre-commit #2212 +- Added pre-commit for development/CI #2212 - Renamed requirements-lint.txt to requirements-dev.txt #2212 Changes in 0.19.1 diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index e45dfc2b..dbd88a68 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -28,7 +28,7 @@ __all__ = ( ) -VERSION = (0, 19, 1) +VERSION = (0, 20, 0) def get_version(): From d8657be320ca5fdebb1257b70c201d97f7ba8d14 Mon Sep 17 00:00:00 2001 From: Terence Honles Date: Tue, 19 May 2020 10:23:07 -0700 Subject: [PATCH 202/216] Fix requirement Pillow < 7 to mention it is for tests only --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2dd5bcd5..71f9ac5d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -28,7 +28,7 @@ Changes in 0.20.0 Changes in 0.19.1 ================= -- Requires Pillow < 7.0.0 as it dropped Python2 support +- Tests require Pillow < 7.0.0 as it dropped Python2 support - DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079 From 22bff8566d35dc0c0d598f0ca391b0ac5f6ed50e Mon Sep 17 00:00:00 2001 From: "Terence D. Honles" Date: Tue, 19 May 2020 11:00:30 -0700 Subject: [PATCH 203/216] fix self inflicted deprecation warnings in QNode --- mongoengine/queryset/visitor.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 0eacc2ef..a2448f28 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -7,6 +7,11 @@ from mongoengine.queryset import transform __all__ = ("Q", "QNode") +def warn_empty_is_deprecated(): + msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" + warnings.warn(msg, DeprecationWarning, stacklevel=2) + + class QNodeVisitor: """Base visitor class for visiting Q-object nodes in a query tree. """ @@ -98,19 +103,18 @@ class QNode: object. """ # If the other Q() is empty, ignore it and just use `self`. - if getattr(other, "empty", True): + if not bool(other): return self # Or if this Q is empty, ignore it and just use `other`. - if self.empty: + if not bool(self): return other return QCombination(operation, [self, other]) @property def empty(self): - msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" - warnings.warn(msg, DeprecationWarning) + warn_empty_is_deprecated() return False def __or__(self, other): @@ -152,8 +156,7 @@ class QCombination(QNode): @property def empty(self): - msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" - warnings.warn(msg, DeprecationWarning) + warn_empty_is_deprecated() return not bool(self.children) def __eq__(self, other): @@ -186,4 +189,5 @@ class Q(QNode): @property def empty(self): + warn_empty_is_deprecated() return not bool(self.query) From 1698f398eb11a85633a1aa3def93386322d648f3 Mon Sep 17 00:00:00 2001 From: Agustin Barto Date: Wed, 20 May 2020 18:56:13 -0300 Subject: [PATCH 204/216] Add _read_concern to copied properties. Add read_concern to aggregate. Add test to check the read_concern and read_preference values are kept after cloning. --- mongoengine/queryset/base.py | 6 ++++-- tests/queryset/test_queryset.py | 26 ++++++++++++++++++++++++++ 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 39c44b29..b70fae64 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -789,6 +789,7 @@ class BaseQuerySet: "_snapshot", "_timeout", "_read_preference", + "_read_concern", "_iter", "_scalar", "_as_pymongo", @@ -1311,10 +1312,11 @@ class BaseQuerySet: final_pipeline = initial_pipeline + user_pipeline collection = self._collection - if self._read_preference is not None: + if self._read_preference is not None or self._read_concern is not None: collection = self._collection.with_options( - read_preference=self._read_preference + read_preference=self._read_preference, read_concern=self._read_concern ) + return collection.aggregate(final_pipeline, cursor={}, **kwargs) # JS functionality diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 6b6000c9..88f51104 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -4021,6 +4021,32 @@ class TestQueryset(unittest.TestCase): Number.drop_collection() + def test_clone_retains_settings(self): + """Ensure that cloning retains the read_preference and read_concern + """ + + class Number(Document): + n = IntField() + + Number.drop_collection() + + qs = Number.objects + qs_clone = qs.clone() + assert qs._read_preference == qs_clone._read_preference + assert qs._read_concern == qs_clone._read_concern + + qs = Number.objects.read_preference(ReadPreference.PRIMARY_PREFERRED) + qs_clone = qs.clone() + assert qs._read_preference == ReadPreference.PRIMARY_PREFERRED + assert qs._read_preference == qs_clone._read_preference + + qs = Number.objects.read_concern({'level': 'majority'}) + qs_clone = qs.clone() + assert qs._read_concern.document == {'level': 'majority'} + assert qs._read_concern == qs_clone._read_concern + + Number.drop_collection() + def test_using(self): """Ensure that switching databases for a queryset is possible """ From 31498bd7dd1f7316a766aae7f511cd5e4ac4accd Mon Sep 17 00:00:00 2001 From: Agustin Barto Date: Wed, 20 May 2020 18:58:18 -0300 Subject: [PATCH 205/216] Update changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2dd5bcd5..acf20195 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,6 +6,7 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). +- Fixed a bug that made the queryset drop the read_preference after clone(). Changes in 0.20.0 ================= From 49f9bca23b081c69ba6a8e0c26102f8cd677d778 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 23 May 2020 23:08:56 +0200 Subject: [PATCH 206/216] fix black formatting --- tests/queryset/test_queryset.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 88f51104..4900f3a3 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -4040,9 +4040,9 @@ class TestQueryset(unittest.TestCase): assert qs._read_preference == ReadPreference.PRIMARY_PREFERRED assert qs._read_preference == qs_clone._read_preference - qs = Number.objects.read_concern({'level': 'majority'}) + qs = Number.objects.read_concern({"level": "majority"}) qs_clone = qs.clone() - assert qs._read_concern.document == {'level': 'majority'} + assert qs._read_concern.document == {"level": "majority"} assert qs._read_concern == qs_clone._read_concern Number.drop_collection() From adb5f74ddbad4478444af24431bf6b4cc8206670 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 26 May 2020 23:37:55 +0200 Subject: [PATCH 207/216] Fix a bug in limit0 #2311 --- mongoengine/queryset/base.py | 21 ++++++++++++++----- tests/queryset/test_queryset.py | 36 ++++++++++++++++++++++++++++++++- 2 files changed, 51 insertions(+), 6 deletions(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 39c44b29..f755a05c 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -83,6 +83,7 @@ class BaseQuerySet: self._cursor_obj = None self._limit = None self._skip = None + self._empty = False self._hint = -1 # Using -1 as None is a valid value for hint self._collation = None self._batch_size = None @@ -162,6 +163,7 @@ class BaseQuerySet: [, ] """ queryset = self.clone() + queryset._empty = False # Handle a slice if isinstance(key, slice): @@ -169,6 +171,8 @@ class BaseQuerySet: queryset._skip, queryset._limit = key.start, key.stop if key.start and key.stop: queryset._limit = key.stop - key.start + if queryset._limit == 0: + queryset._empty = True # Allow further QuerySet modifications to be performed return queryset @@ -394,7 +398,12 @@ class BaseQuerySet: :meth:`skip` that has been applied to this cursor into account when getting the count """ - if self._limit == 0 and with_limit_and_skip is False or self._none: + if ( + self._limit == 0 + and with_limit_and_skip is False + or self._none + or self._empty + ): return 0 count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) self._cursor_obj = None @@ -735,7 +744,9 @@ class BaseQuerySet: return doc_map def none(self): - """Helper that just returns a list""" + """Returns a queryset that never returns any objects and no query will be executed when accessing the results + inspired by django none() https://docs.djangoproject.com/en/dev/ref/models/querysets/#none + """ queryset = self.clone() queryset._none = True return queryset @@ -794,6 +805,7 @@ class BaseQuerySet: "_as_pymongo", "_limit", "_skip", + "_empty", "_hint", "_collation", "_auto_dereference", @@ -834,6 +846,7 @@ class BaseQuerySet: """ queryset = self.clone() queryset._limit = n + queryset._empty = False # cancels the effect of empty # If a cursor object has already been created, apply the limit to it. if queryset._cursor_obj: @@ -1584,7 +1597,7 @@ class BaseQuerySet: def __next__(self): """Wrap the result in a :class:`~mongoengine.Document` object. """ - if self._limit == 0 or self._none: + if self._none or self._empty: raise StopIteration raw_doc = next(self._cursor) @@ -1603,8 +1616,6 @@ class BaseQuerySet: return doc - next = __next__ # For Python2 support - def rewind(self): """Rewind the cursor to its unevaluated state. diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 6b6000c9..aece69bf 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -114,6 +114,38 @@ class TestQueryset(unittest.TestCase): assert person.name == "User A" assert person.age == 20 + def test_slicing_sets_empty_limit_skip(self): + self.Person.objects.insert( + [self.Person(name="User {}".format(i), age=i) for i in range(5)], + load_bulk=False, + ) + + self.Person.objects.create(name="User B", age=30) + self.Person.objects.create(name="User C", age=40) + + qs = self.Person.objects()[1:2] + assert (qs._empty, qs._skip, qs._limit) == (False, 1, 1) + assert len(list(qs)) == 1 + + # Test edge case of [1:1] which should return nothing + # and require a hack so that it doesn't clash with limit(0) + qs = self.Person.objects()[1:1] + assert (qs._empty, qs._skip, qs._limit) == (True, 1, 0) + assert len(list(qs)) == 0 + + qs2 = qs[1:5] # Make sure that further slicing resets _empty + assert (qs2._empty, qs2._skip, qs2._limit) == (False, 1, 4) + assert len(list(qs2)) == 4 + + def test_limit_0_returns_all_documents(self): + self.Person.objects.create(name="User A", age=20) + self.Person.objects.create(name="User B", age=30) + + n_docs = self.Person.objects().count() + + persons = list(self.Person.objects().limit(0)) + assert len(persons) == 2 == n_docs + def test_limit(self): """Ensure that QuerySet.limit works as expected.""" user_a = self.Person.objects.create(name="User A", age=20) @@ -4442,7 +4474,9 @@ class TestQueryset(unittest.TestCase): assert len(people) == 1 assert people[0] == "User B" - people = list(self.Person.objects[1:1].scalar("name")) + # people = list(self.Person.objects[1:1].scalar("name")) + people = self.Person.objects[1:1] + people = people.scalar("name") assert len(people) == 0 # Test slice out of range From 194b0cac88073a4118a9827b4fbd5ecf9b28dccd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 26 May 2020 23:45:35 +0200 Subject: [PATCH 208/216] improve doc + changelog --- docs/changelog.rst | 3 ++- mongoengine/queryset/base.py | 9 ++++++++- tests/queryset/test_queryset.py | 3 +++ 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index d543b169..819f4e0b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,7 +6,8 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). -- Fixed a bug that made the queryset drop the read_preference after clone(). +- Fix a bug that made the queryset drop the read_preference after clone(). +- Fix the behavior of Doc.objects.limit(0) which should return all documents (similar to mongodb) #2311 Changes in 0.20.0 ================= diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 875cbfda..6ad08617 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -83,7 +83,7 @@ class BaseQuerySet: self._cursor_obj = None self._limit = None self._skip = None - self._empty = False + self._hint = -1 # Using -1 as None is a valid value for hint self._collation = None self._batch_size = None @@ -91,6 +91,13 @@ class BaseQuerySet: self._max_time_ms = None self._comment = None + # Hack - As people expect cursor[5:5] to return + # an empty result set. It's hard to do that right, though, because the + # server uses limit(0) to mean 'no limit'. So we set _empty + # in that case and check for it when iterating. We also unset + # it anytime we change _limit. Inspired by how it is done in pymongo.Cursor + self._empty = False + def __call__(self, q_obj=None, **query): """Filter the selected documents by calling the :class:`~mongoengine.queryset.QuerySet` with a query. diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 36da5d74..73c419b3 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -409,6 +409,9 @@ class TestQueryset(unittest.TestCase): assert list(A.objects.none()) == [] assert list(A.objects.none().all()) == [] + assert list(A.objects.none().limit(1)) == [] + assert list(A.objects.none().skip(1)) == [] + assert list(A.objects.none()[:5]) == [] def test_chaining(self): class A(Document): From e431e27cb27d06290e451b7fbc62e811b264eeac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 1 Aug 2020 15:09:10 +0200 Subject: [PATCH 209/216] #2360 fix py3 incompatible code --- mongoengine/document.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index db64054a..4a57d511 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -639,7 +639,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): write_concern=write_concern, _from_doc_delete=True ) except pymongo.errors.OperationFailure as err: - message = "Could not delete document (%s)" % err.message + message = "Could not delete document (%s)" % err.args raise OperationError(message) signals.post_delete.send(self.__class__, document=self, **signal_kwargs) From 3e1c83f8fab28c37ab2e25eab2d7dcc6121f50cc Mon Sep 17 00:00:00 2001 From: Johnny Chang Date: Tue, 4 Aug 2020 00:30:15 +0800 Subject: [PATCH 210/216] Fix query transformation regarding special operators --- mongoengine/fields.py | 3 +++ tests/queryset/test_transform.py | 30 ++++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index b05e726a..f50e6045 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -773,6 +773,9 @@ class EmbeddedDocumentField(BaseField): def prepare_query_value(self, op, value): if value is not None and not isinstance(value, self.document_type): + # Short circuit for special operators, returning them as is + if isinstance(value, dict) and all(k.startswith('$') for k in value.keys()): + return value try: value = self.document_type._from_son(value) except ValueError: diff --git a/tests/queryset/test_transform.py b/tests/queryset/test_transform.py index 8d6c2d06..0fba3975 100644 --- a/tests/queryset/test_transform.py +++ b/tests/queryset/test_transform.py @@ -344,6 +344,36 @@ class TestTransform(unittest.TestCase): ) assert update == {"$pull": {"content.text": {"word": {"$nin": ["foo", "bar"]}}}} + def test_transform_embedded_document_list_fields(self): + """ + Test added to check filtering + EmbeddedDocumentListField which is inside a EmbeddedDocumentField + """ + + class Drink(EmbeddedDocument): + id = StringField() + meta = { + 'strict': False + } + + class Shop(Document): + drinks = EmbeddedDocumentListField(Drink) + + Shop.drop_collection() + drinks = [Drink(id='drink_1'), Drink(id='drink_2')] + Shop.objects.create(drinks=drinks) + q_obj = transform.query( + Shop, + drinks__all=[{'$elemMatch': {'_id': x.id}} for x in drinks] + ) + assert q_obj == { + 'drinks': { + '$all': [{'$elemMatch': {'_id': x.id}} for x in drinks] + } + } + + Shop.drop_collection() + if __name__ == "__main__": unittest.main() From 7116dec74a394900d2ffd9bcca5b3318d03598aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Tue, 11 Aug 2020 21:55:22 +0200 Subject: [PATCH 211/216] run black to please ci --- mongoengine/fields.py | 2 +- tests/queryset/test_transform.py | 13 ++++--------- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index f50e6045..8fdeae15 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -774,7 +774,7 @@ class EmbeddedDocumentField(BaseField): def prepare_query_value(self, op, value): if value is not None and not isinstance(value, self.document_type): # Short circuit for special operators, returning them as is - if isinstance(value, dict) and all(k.startswith('$') for k in value.keys()): + if isinstance(value, dict) and all(k.startswith("$") for k in value.keys()): return value try: value = self.document_type._from_son(value) diff --git a/tests/queryset/test_transform.py b/tests/queryset/test_transform.py index 0fba3975..f5d248af 100644 --- a/tests/queryset/test_transform.py +++ b/tests/queryset/test_transform.py @@ -352,24 +352,19 @@ class TestTransform(unittest.TestCase): class Drink(EmbeddedDocument): id = StringField() - meta = { - 'strict': False - } + meta = {"strict": False} class Shop(Document): drinks = EmbeddedDocumentListField(Drink) Shop.drop_collection() - drinks = [Drink(id='drink_1'), Drink(id='drink_2')] + drinks = [Drink(id="drink_1"), Drink(id="drink_2")] Shop.objects.create(drinks=drinks) q_obj = transform.query( - Shop, - drinks__all=[{'$elemMatch': {'_id': x.id}} for x in drinks] + Shop, drinks__all=[{"$elemMatch": {"_id": x.id}} for x in drinks] ) assert q_obj == { - 'drinks': { - '$all': [{'$elemMatch': {'_id': x.id}} for x in drinks] - } + "drinks": {"$all": [{"$elemMatch": {"_id": x.id}} for x in drinks]} } Shop.drop_collection() From 7f77084e0ec12f1a3f299138e8369474f2c26b5d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 12 Aug 2020 21:56:38 +0200 Subject: [PATCH 212/216] minor fixes in doc links --- docs/changelog.rst | 3 --- docs/guide/querying.rst | 2 +- mongoengine/fields.py | 2 +- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 819f4e0b..1556266b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -458,9 +458,6 @@ Changes in 0.8.3 - Document.select_related() now respects ``db_alias`` (#377) - Reload uses shard_key if applicable (#384) - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) - - **Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3 - - Fixed pickling dynamic documents ``_dynamic_fields`` (#387) - Fixed ListField setslice and delslice dirty tracking (#390) - Added Django 1.5 PY3 support (#392) diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 07de0378..7307b003 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -609,7 +609,7 @@ to push values with index:: .. note:: Currently only top level lists are handled, future versions of mongodb / pymongo plan to support nested positional operators. See `The $ positional - operator `_. + operator `_. Server-side javascript execution ================================ diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 8fdeae15..bba05ea7 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -2045,7 +2045,7 @@ class ImageField(FileField): class SequenceField(BaseField): """Provides a sequential counter see: - http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers + https://docs.mongodb.com/manual/reference/method/ObjectId/#ObjectIDs-SequenceNumbers .. note:: From 1f2a5db016a8c3b42de91baf76696482415238ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 12 Aug 2020 22:30:52 +0200 Subject: [PATCH 213/216] fix deprecated use of .update in test suite --- tests/fields/test_fields.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/fields/test_fields.py b/tests/fields/test_fields.py index 652f6903..25ecb2e7 100644 --- a/tests/fields/test_fields.py +++ b/tests/fields/test_fields.py @@ -336,7 +336,7 @@ class TestField(MongoDBTestCase): doc.save() # Unset all the fields - HandleNoneFields._get_collection().update( + HandleNoneFields._get_collection().update_one( {"_id": doc.id}, {"$unset": {"str_fld": 1, "int_fld": 1, "flt_fld": 1, "comp_dt_fld": 1}}, ) From 0d289fd5a139d96a1b23966dd4df0053a077424f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Wed, 7 Oct 2020 20:36:50 +0200 Subject: [PATCH 214/216] upgrade pymongo and mongodb versions used in CI --- .travis.yml | 16 ++++++++-------- tox.ini | 4 ++-- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2316124a..c5b37b6f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -24,18 +24,18 @@ python: env: global: - - MONGODB_3_4=3.4.17 - - MONGODB_3_6=3.6.12 + - MONGODB_3_4=3.4.19 + - MONGODB_3_6=3.6.13 - MONGODB_4_0=4.0.13 - PYMONGO_3_4=3.4 - PYMONGO_3_6=3.6 - PYMONGO_3_9=3.9 - - PYMONGO_3_10=3.10 + - PYMONGO_3_11=3.11 - MAIN_PYTHON_VERSION=3.7 matrix: - - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_10} + - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_11} matrix: # Finish the build as soon as one job fails @@ -47,9 +47,9 @@ matrix: - python: 3.7 env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} - python: 3.7 - env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_10} + env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_11} - python: 3.8 - env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_10} + env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_11} install: # Install Mongo @@ -75,7 +75,7 @@ script: - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" after_success: -- - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi + - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi notifications: irc: irc.freenode.org#mongoengine @@ -103,5 +103,5 @@ deploy: on: tags: true repo: MongoEngine/mongoengine - condition: ($PYMONGO = ${PYMONGO_3_10}) && ($MONGODB = ${MONGODB_3_4}) + condition: ($PYMONGO = ${PYMONGO_3_11}) && ($MONGODB = ${MONGODB_3_4}) python: 3.7 diff --git a/tox.ini b/tox.ini index 675b6d9a..6f33772c 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py35,pypy3}-{mg34,mg36,mg39,mg310} +envlist = {py35,pypy3}-{mg34,mg36,mg39,mg311} [testenv] commands = @@ -8,6 +8,6 @@ deps = mg34: pymongo>=3.4,<3.5 mg36: pymongo>=3.6,<3.7 mg39: pymongo>=3.9,<3.10 - mg310: pymongo>=3.10,<3.11 + mg311: pymongo>=3.11,<3.12 setenv = PYTHON_EGG_CACHE = {envdir}/python-eggs From d4350e7da425ea98a9122f2aa812730a0e4c704a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sat, 10 Oct 2020 23:32:22 +0200 Subject: [PATCH 215/216] Fix for ListField that isnt detecting properly that item 0 is changed --- mongoengine/base/datastructures.py | 2 +- tests/document/test_delta.py | 50 ++++++++++++++++++------------ tests/document/test_instance.py | 7 +++-- tests/fields/test_dict_field.py | 2 +- tests/fields/test_fields.py | 4 +-- tests/fields/test_geo_fields.py | 4 +-- tests/test_datastructures.py | 6 +++- 7 files changed, 45 insertions(+), 30 deletions(-) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index d08d4930..8c69cc73 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -179,7 +179,7 @@ class BaseList(list): def _mark_as_changed(self, key=None): if hasattr(self._instance, "_mark_as_changed"): - if key: + if key is not None: self._instance._mark_as_changed( "{}.{}".format(self._name, key % len(self)) ) diff --git a/tests/document/test_delta.py b/tests/document/test_delta.py index 2324211b..e7baaa23 100644 --- a/tests/document/test_delta.py +++ b/tests/document/test_delta.py @@ -29,7 +29,8 @@ class TestDelta(MongoDBTestCase): self.delta(Document) self.delta(DynamicDocument) - def delta(self, DocClass): + @staticmethod + def delta(DocClass): class Doc(DocClass): string_field = StringField() int_field = IntField() @@ -428,13 +429,20 @@ class TestDelta(MongoDBTestCase): assert doc.dict_field == {"hello": "world"} assert doc.list_field == ["1", 2, {"hello": "world"}] - def test_delta_recursive_db_field(self): + def test_delta_recursive_db_field_on_doc_and_embeddeddoc(self): self.delta_recursive_db_field(Document, EmbeddedDocument) + + def test_delta_recursive_db_field_on_doc_and_dynamicembeddeddoc(self): self.delta_recursive_db_field(Document, DynamicEmbeddedDocument) + + def test_delta_recursive_db_field_on_dynamicdoc_and_embeddeddoc(self): self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument) + + def test_delta_recursive_db_field_on_dynamicdoc_and_dynamicembeddeddoc(self): self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument) - def delta_recursive_db_field(self, DocClass, EmbeddedClass): + @staticmethod + def delta_recursive_db_field(DocClass, EmbeddedClass): class Embedded(EmbeddedClass): string_field = StringField(db_field="db_string_field") int_field = IntField(db_field="db_int_field") @@ -487,6 +495,7 @@ class TestDelta(MongoDBTestCase): doc = doc.reload(10) assert doc.embedded_field.dict_field == {} + assert doc._get_changed_fields() == [] doc.embedded_field.list_field = [] assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"] assert doc.embedded_field._delta() == ({}, {"db_list_field": 1}) @@ -634,6 +643,7 @@ class TestDelta(MongoDBTestCase): doc.save() doc = doc.reload(10) + assert doc._delta() == ({}, {},) del doc.embedded_field.list_field[2].list_field assert doc._delta() == ( {}, @@ -732,12 +742,12 @@ class TestDelta(MongoDBTestCase): assert organization._get_changed_fields() == [] updates, removals = organization._delta() - assert {} == removals - assert {} == updates + assert removals == {} + assert updates == {} organization.employees.append(person) updates, removals = organization._delta() - assert {} == removals + assert removals == {} assert "employees" in updates def test_delta_with_dbref_false(self): @@ -749,12 +759,12 @@ class TestDelta(MongoDBTestCase): assert organization._get_changed_fields() == [] updates, removals = organization._delta() - assert {} == removals - assert {} == updates + assert removals == {} + assert updates == {} organization.employees.append(person) updates, removals = organization._delta() - assert {} == removals + assert removals == {} assert "employees" in updates def test_nested_nested_fields_mark_as_changed(self): @@ -775,11 +785,11 @@ class TestDelta(MongoDBTestCase): subdoc = mydoc.subs["a"]["b"] subdoc.name = "bar" - assert ["name"] == subdoc._get_changed_fields() - assert ["subs.a.b.name"] == mydoc._get_changed_fields() + assert subdoc._get_changed_fields() == ["name"] + assert mydoc._get_changed_fields() == ["subs.a.b.name"] mydoc._clear_changed_fields() - assert [] == mydoc._get_changed_fields() + assert mydoc._get_changed_fields() == [] def test_lower_level_mark_as_changed(self): class EmbeddedDoc(EmbeddedDocument): @@ -794,17 +804,17 @@ class TestDelta(MongoDBTestCase): mydoc = MyDoc.objects.first() mydoc.subs["a"] = EmbeddedDoc() - assert ["subs.a"] == mydoc._get_changed_fields() + assert mydoc._get_changed_fields() == ["subs.a"] subdoc = mydoc.subs["a"] subdoc.name = "bar" - assert ["name"] == subdoc._get_changed_fields() - assert ["subs.a"] == mydoc._get_changed_fields() + assert subdoc._get_changed_fields() == ["name"] + assert mydoc._get_changed_fields() == ["subs.a"] mydoc.save() mydoc._clear_changed_fields() - assert [] == mydoc._get_changed_fields() + assert mydoc._get_changed_fields() == [] def test_upper_level_mark_as_changed(self): class EmbeddedDoc(EmbeddedDocument): @@ -821,15 +831,15 @@ class TestDelta(MongoDBTestCase): subdoc = mydoc.subs["a"] subdoc.name = "bar" - assert ["name"] == subdoc._get_changed_fields() - assert ["subs.a.name"] == mydoc._get_changed_fields() + assert subdoc._get_changed_fields() == ["name"] + assert mydoc._get_changed_fields() == ["subs.a.name"] mydoc.subs["a"] = EmbeddedDoc() - assert ["subs.a"] == mydoc._get_changed_fields() + assert mydoc._get_changed_fields() == ["subs.a"] mydoc.save() mydoc._clear_changed_fields() - assert [] == mydoc._get_changed_fields() + assert mydoc._get_changed_fields() == [] def test_referenced_object_changed_attributes(self): """Ensures that when you save a new reference to a field, the referenced object isn't altered""" diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 993cc161..8d42d15b 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -188,7 +188,7 @@ class TestDocumentInstance(MongoDBTestCase): def test_queryset_resurrects_dropped_collection(self): self.Person.drop_collection() - assert [] == list(self.Person.objects()) + assert list(self.Person.objects()) == [] # Ensure works correctly with inhertited classes class Actor(self.Person): @@ -196,7 +196,7 @@ class TestDocumentInstance(MongoDBTestCase): Actor.objects() self.Person.drop_collection() - assert [] == list(Actor.objects()) + assert list(Actor.objects()) == [] def test_polymorphic_references(self): """Ensure that the correct subclasses are returned from a query @@ -578,7 +578,8 @@ class TestDocumentInstance(MongoDBTestCase): doc.embedded_field.list_field.append(1) doc.embedded_field.dict_field["woot"] = "woot" - assert doc._get_changed_fields() == [ + changed = doc._get_changed_fields() + assert changed == [ "list_field", "dict_field.woot", "embedded_field.list_field", diff --git a/tests/fields/test_dict_field.py b/tests/fields/test_dict_field.py index f423bf8b..12140916 100644 --- a/tests/fields/test_dict_field.py +++ b/tests/fields/test_dict_field.py @@ -113,7 +113,7 @@ class TestDictField(MongoDBTestCase): post.info.setdefault("authors", []) post.save() post.reload() - assert [] == post.info["authors"] + assert post.info["authors"] == [] def test_dictfield_dump_document(self): """Ensure a DictField can handle another document's dump.""" diff --git a/tests/fields/test_fields.py b/tests/fields/test_fields.py index 25ecb2e7..fe349d1e 100644 --- a/tests/fields/test_fields.py +++ b/tests/fields/test_fields.py @@ -1084,7 +1084,7 @@ class TestField(MongoDBTestCase): e = Simple().save() e.mapping = [] - assert [] == e._changed_fields + assert e._changed_fields == [] class Simple(Document): mapping = DictField() @@ -1093,7 +1093,7 @@ class TestField(MongoDBTestCase): e = Simple().save() e.mapping = {} - assert [] == e._changed_fields + assert e._changed_fields == [] def test_slice_marks_field_as_changed(self): class Simple(Document): diff --git a/tests/fields/test_geo_fields.py b/tests/fields/test_geo_fields.py index 1b912a4b..7618b3a0 100644 --- a/tests/fields/test_geo_fields.py +++ b/tests/fields/test_geo_fields.py @@ -381,7 +381,7 @@ class TestGeoField(MongoDBTestCase): meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]} - assert [] == Log._geo_indices() + assert Log._geo_indices() == [] Log.drop_collection() Log.ensure_indexes() @@ -401,7 +401,7 @@ class TestGeoField(MongoDBTestCase): "indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}] } - assert [] == Log._geo_indices() + assert Log._geo_indices() == [] Log.drop_collection() Log.ensure_indexes() diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 6d432e32..f4b63f05 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -9,10 +9,14 @@ from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict class DocumentStub(object): def __init__(self): self._changed_fields = [] + self._unset_fields = [] def _mark_as_changed(self, key): self._changed_fields.append(key) + def _mark_as_unset(self, key): + self._unset_fields.append(key) + class TestBaseDict: @staticmethod @@ -314,7 +318,7 @@ class TestBaseList: def test___setitem___item_0_calls_mark_as_changed(self): base_list = self._get_baselist([True]) base_list[0] = False - assert base_list._instance._changed_fields == ["my_name"] + assert base_list._instance._changed_fields == ["my_name.0"] assert base_list == [False] def test___setitem___item_1_calls_mark_as_changed(self): From 3adb67901b64e1e87668247cfd8183b4090a922b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastien=20G=C3=A9rard?= Date: Sun, 11 Oct 2020 00:53:46 +0200 Subject: [PATCH 216/216] update changelog for #2392 --- docs/changelog.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 14dfb8d0..f616f4a6 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -11,6 +11,8 @@ Development This should have a negative impact on performance of count see Issue #2219 - Fix a bug that made the queryset drop the read_preference after clone(). - Fix the behavior of Doc.objects.limit(0) which should return all documents (similar to mongodb) #2311 +- Bug fix in ListField when updating the first item, it was saving the whole list, instead of + just replacing the first item (as it's usually done) #2392 Changes in 0.20.0 =================