From 7a749b88c7e9bcbaf083f520163f952317c713b2 Mon Sep 17 00:00:00 2001 From: mrigal Date: Wed, 10 Dec 2014 16:44:16 +0100 Subject: [PATCH 01/36] added new test like defined in issue #712 and changed ObjectIdField to_python() method to use a try except similar to other Field classes --- mongoengine/base/fields.py | 7 +++++-- tests/queryset/queryset.py | 7 +++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index aa16804e..bee02879 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -410,8 +410,11 @@ class ObjectIdField(BaseField): """ def to_python(self, value): - if not isinstance(value, ObjectId): - value = ObjectId(value) + try: + if not isinstance(value, ObjectId): + value = ObjectId(value) + except: + pass return value def to_mongo(self, value): diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 6cbac495..5c721f0b 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -4578,6 +4578,13 @@ class QuerySetTest(unittest.TestCase): self.assertEquals(Animal.objects(folded_ears=True).count(), 1) self.assertEquals(Animal.objects(whiskers_length=5.1).count(), 1) + def test_loop_via_invalid_id_does_not_crash(self): + class Person(Document): + name = StringField() + Person.objects.delete() + Person._get_collection().update({"name": "a"}, {"$set": {"_id": ""}}, upsert=True) + for p in Person.objects(): + self.assertEqual(p.name, 'a') if __name__ == '__main__': unittest.main() From f730591f2cc6cb63ed8be69b1d814d9d3a5c1e2f Mon Sep 17 00:00:00 2001 From: Marcel van den Elst Date: Wed, 20 May 2015 13:01:41 +0200 Subject: [PATCH 02/36] added passing test for updates on related models ref #570: test would fail from v0.8.5 up, but fixed in master --- tests/queryset/queryset.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 65d84305..6e4d86d2 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -630,6 +630,40 @@ class QuerySetTest(unittest.TestCase): self.assertRaises(ValidationError, Doc.objects().update, dt_f="datetime", upsert=True) self.assertRaises(ValidationError, Doc.objects().update, ed_f__str_f=1, upsert=True) + def test_update_related_models( self ): + class TestPerson( Document ): + name = StringField() + + class TestOrganization( Document ): + name = StringField() + owner = ReferenceField( TestPerson ) + + TestPerson.drop_collection() + TestOrganization.drop_collection() + + p = TestPerson( name='p1' ) + p.save() + o = TestOrganization( name='o1' ) + o.save() + + o.owner = p + p.name = 'p2' + + self.assertListEqual( o._get_changed_fields(), [ 'owner' ] ) + self.assertListEqual( p._get_changed_fields(), [ 'name' ] ) + + o.save() + + self.assertListEqual( o._get_changed_fields(), [] ) + self.assertListEqual( p._get_changed_fields(), [ 'name' ] ) # Fails; it's empty + + # This will do NOTHING at all, even though we changed the name + p.save() + + p.reload() + + self.assertEqual( p.name, 'p2' ) # Fails; it's still `p1` + def test_upsert(self): self.Person.drop_collection() From 283e92d55d1acc219c1dd1f8f0e374300b6bfdbe Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Thu, 7 May 2015 15:11:33 +0200 Subject: [PATCH 03/36] Added hashed index, a bit more of geo-indexes, possibility to give _cls and docs --- docs/guide/defining-documents.rst | 22 ++++++--- mongoengine/base/document.py | 22 ++++++--- mongoengine/document.py | 7 +++ tests/document/indexes.py | 79 +++++++++++++++++++++++++++++++ 4 files changed, 118 insertions(+), 12 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 56370c14..13519a28 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -465,19 +465,26 @@ You can specify indexes on collections to make querying faster. This is done by creating a list of index specifications called :attr:`indexes` in the :attr:`~mongoengine.Document.meta` dictionary, where an index specification may either be a single field name, a tuple containing multiple field names, or a -dictionary containing a full index definition. A direction may be specified on -fields by prefixing the field name with a **+** (for ascending) or a **-** sign -(for descending). Note that direction only matters on multi-field indexes. -Text indexes may be specified by prefixing the field name with a **$**. :: +dictionary containing a full index definition. + +A direction may be specified on fields by prefixing the field name with a +**+** (for ascending) or a **-** sign (for descending). Note that direction +only matters on multi-field indexes. Text indexes may be specified by prefixing +the field name with a **$**. Hashed indexes may be specified by prefixing +the field name with a **#**:: class Page(Document): + category = IntField() title = StringField() rating = StringField() created = DateTimeField() meta = { 'indexes': [ 'title', + '$title', # text index + '#title', # hashed index ('title', '-rating'), + ('category', '_cls'), { 'fields': ['created'], 'expireAfterSeconds': 3600 @@ -532,11 +539,14 @@ There are a few top level defaults for all indexes that can be set:: :attr:`index_background` (Optional) Set the default value for if an index should be indexed in the background +:attr:`index_cls` (Optional) + A way to turn off a specific index for _cls. + :attr:`index_drop_dups` (Optional) Set the default value for if an index should drop duplicates -:attr:`index_cls` (Optional) - A way to turn off a specific index for _cls. +.. note:: Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning + and has no effect Compound Indexes and Indexing sub documents diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 3eba16ca..5707992a 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -782,7 +782,7 @@ class BaseDocument(object): allow_inheritance = cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) include_cls = (allow_inheritance and not spec.get('sparse', False) and - spec.get('cls', True)) + spec.get('cls', True) and '_cls' not in spec['fields']) # 733: don't include cls if index_cls is False unless there is an explicit cls with the index include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True)) @@ -795,16 +795,25 @@ class BaseDocument(object): # ASCENDING from + # DESCENDING from - - # GEO2D from * # TEXT from $ + # HASHED from # + # GEOSPHERE from ( + # GEOHAYSTACK from ) + # GEO2D from * direction = pymongo.ASCENDING if key.startswith("-"): direction = pymongo.DESCENDING - elif key.startswith("*"): - direction = pymongo.GEO2D elif key.startswith("$"): direction = pymongo.TEXT - if key.startswith(("+", "-", "*", "$")): + elif key.startswith("#"): + direction = pymongo.HASHED + elif key.startswith("("): + direction = pymongo.GEOSPHERE + elif key.startswith(")"): + direction = pymongo.GEOHAYSTACK + elif key.startswith("*"): + direction = pymongo.GEO2D + if key.startswith(("+", "-", "*", "$", "#", "(", ")")): key = key[1:] # Use real field name, do it manually because we need field @@ -827,7 +836,8 @@ class BaseDocument(object): index_list.append((key, direction)) # Don't add cls to a geo index - if include_cls and direction is not pymongo.GEO2D: + if include_cls and direction not in ( + pymongo.GEO2D, pymongo.GEOHAYSTACK, pymongo.GEOSPHERE): index_list.insert(0, ('_cls', 1)) if index_list: diff --git a/mongoengine/document.py b/mongoengine/document.py index 838feb81..3d919e7d 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -637,14 +637,19 @@ class Document(BaseDocument): :param key_or_list: a single index key or a list of index keys (to construct a multi-field index); keys may be prefixed with a **+** or a **-** to determine the index ordering + :param background: Allows index creation in the background + :param drop_dups: Was removed with MongoDB 3. The value will be + removed if PyMongo3+ is used """ index_spec = cls._build_index_spec(key_or_list) index_spec = index_spec.copy() fields = index_spec.pop('fields') index_spec['drop_dups'] = drop_dups + # TODO: raise warning if dropdups given and remove with PyMongo3+ index_spec['background'] = background index_spec.update(kwargs) + # TODO: ensure_index is deprecated return cls._get_collection().ensure_index(fields, **index_spec) @classmethod @@ -688,6 +693,7 @@ class Document(BaseDocument): if 'cls' in opts: del opts['cls'] + # TODO: ensure_index is deprecated in PyMongo 3+ and drop_dups removed collection.ensure_index(fields, background=background, drop_dups=drop_dups, **opts) @@ -701,6 +707,7 @@ class Document(BaseDocument): if 'cls' in index_opts: del index_opts['cls'] + # TODO: ensure_index is deprecated in PyMongo 3+ collection.ensure_index('_cls', background=background, **index_opts) diff --git a/tests/document/indexes.py b/tests/document/indexes.py index d43b22e5..cecb8bf2 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -275,6 +275,49 @@ class IndexesTest(unittest.TestCase): info = [value['key'] for key, value in info.iteritems()] self.assertTrue([('current.location.point', '2d')] in info) + def test_explicit_geosphere_index(self): + """Ensure that geosphere indexes work when created via meta[indexes] + """ + class Place(Document): + location = DictField() + meta = { + 'allow_inheritance': True, + 'indexes': [ + '(location.point', + ] + } + + self.assertEqual([{'fields': [('location.point', '2dsphere')]}], + Place._meta['index_specs']) + + Place.ensure_indexes() + info = Place._get_collection().index_information() + info = [value['key'] for key, value in info.iteritems()] + self.assertTrue([('location.point', '2dsphere')] in info) + + def test_explicit_geohaystack_index(self): + """Ensure that geohaystack indexes work when created via meta[indexes] + """ + raise SkipTest('GeoHaystack index creation seems broken on PyMongo' + 'side, as it requires a bucketSize parameter.') + + class Place(Document): + location = DictField() + meta = { + 'allow_inheritance': True, + 'indexes': [ + ')location.point', + ] + } + + self.assertEqual([{'fields': [('location.point', 'geoHaystack')]}], + Place._meta['index_specs']) + + Place.ensure_indexes() + info = Place._get_collection().index_information() + info = [value['key'] for key, value in info.iteritems()] + self.assertTrue([('location.point', 'geoHaystack')] in info) + def test_dictionary_indexes(self): """Ensure that indexes are used when meta[indexes] contains dictionaries instead of lists. @@ -822,6 +865,18 @@ class IndexesTest(unittest.TestCase): key = indexes["title_text"]["key"] self.assertTrue(('_fts', 'text') in key) + def test_hashed_indexes(self): + + class Book(Document): + ref_id = StringField() + meta = { + "indexes": ["#ref_id"], + } + + indexes = Book.objects._collection.index_information() + self.assertTrue("ref_id_hashed" in indexes) + self.assertTrue(('ref_id', 'hashed') in indexes["ref_id_hashed"]["key"]) + def test_indexes_after_database_drop(self): """ Test to ensure that indexes are re-created on a collection even @@ -909,6 +964,30 @@ class IndexesTest(unittest.TestCase): } }) + def test_compound_index_underscore_cls_not_overwritten(self): + """ + Test that the compound index doesn't get another _cls when it is specified + """ + class TestDoc(Document): + shard_1 = StringField() + txt_1 = StringField() + + meta = { + 'collection': 'test', + 'allow_inheritance': True, + 'sparse': True, + 'shard_key': 'shard_1', + 'indexes': [ + ('shard_1', '_cls', 'txt_1'), + ] + } + + TestDoc.drop_collection() + TestDoc.ensure_indexes() + + index_info = TestDoc._get_collection().index_information() + self.assertTrue('shard_1_1__cls_1_txt_1_1' in index_info) + if __name__ == '__main__': unittest.main() From f35d0b2b3778f249b7b49f42e9a372ecf6deb53a Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Mon, 1 Jun 2015 23:12:43 +0200 Subject: [PATCH 04/36] Added create_index method, warnings for drop_dups and a geohaystack test --- mongoengine/document.py | 72 ++++++++++++++++++++++++++++----------- tests/document/indexes.py | 23 +++++++++---- 2 files changed, 69 insertions(+), 26 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 3d919e7d..e4507c1c 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -1,4 +1,4 @@ - +import warnings import pymongo import re @@ -17,6 +17,7 @@ from mongoengine.base import ( get_document ) from mongoengine.errors import InvalidQueryError, InvalidDocumentError +from mongoengine.python_support import IS_PYMONGO_3 from mongoengine.queryset import (OperationError, NotUniqueError, QuerySet, transform) from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME @@ -629,28 +630,51 @@ class Document(BaseDocument): db = cls._get_db() db.drop_collection(cls._get_collection_name()) + @classmethod + def create_index(cls, keys, background=False, **kwargs): + """Creates the given indexes if required. + + :param keys: a single index key or a list of index keys (to + construct a multi-field index); keys may be prefixed with a **+** + or a **-** to determine the index ordering + :param background: Allows index creation in the background + """ + index_spec = cls._build_index_spec(keys) + index_spec = index_spec.copy() + fields = index_spec.pop('fields') + drop_dups = kwargs.get('drop_dups', False) + if IS_PYMONGO_3 and drop_dups: + msg = "drop_dups is deprecated and is removed when using PyMongo 3+." + warnings.warn(msg, DeprecationWarning) + elif not IS_PYMONGO_3: + index_spec['drop_dups'] = drop_dups + index_spec['background'] = background + index_spec.update(kwargs) + + if IS_PYMONGO_3: + return cls._get_collection().create_index(fields, **index_spec) + else: + return cls._get_collection().ensure_index(fields, **index_spec) + @classmethod def ensure_index(cls, key_or_list, drop_dups=False, background=False, **kwargs): - """Ensure that the given indexes are in place. + """Ensure that the given indexes are in place. Deprecated in favour + of create_index. :param key_or_list: a single index key or a list of index keys (to construct a multi-field index); keys may be prefixed with a **+** or a **-** to determine the index ordering :param background: Allows index creation in the background - :param drop_dups: Was removed with MongoDB 3. The value will be - removed if PyMongo3+ is used + :param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value + will be removed if PyMongo3+ is used """ - index_spec = cls._build_index_spec(key_or_list) - index_spec = index_spec.copy() - fields = index_spec.pop('fields') - index_spec['drop_dups'] = drop_dups - # TODO: raise warning if dropdups given and remove with PyMongo3+ - index_spec['background'] = background - index_spec.update(kwargs) - - # TODO: ensure_index is deprecated - return cls._get_collection().ensure_index(fields, **index_spec) + if IS_PYMONGO_3 and drop_dups: + msg = "drop_dups is deprecated and is removed when using PyMongo 3+." + warnings.warn(msg, DeprecationWarning) + elif not IS_PYMONGO_3: + kwargs.update({'drop_dups': drop_dups}) + return cls.create_index(key_or_list, background=background, **kwargs) @classmethod def ensure_indexes(cls): @@ -665,6 +689,9 @@ class Document(BaseDocument): drop_dups = cls._meta.get('index_drop_dups', False) index_opts = cls._meta.get('index_opts') or {} index_cls = cls._meta.get('index_cls', True) + if IS_PYMONGO_3 and drop_dups: + msg = "drop_dups is deprecated and is removed when using PyMongo 3+." + warnings.warn(msg, DeprecationWarning) collection = cls._get_collection() # 746: when connection is via mongos, the read preference is not necessarily an indication that @@ -693,9 +720,11 @@ class Document(BaseDocument): if 'cls' in opts: del opts['cls'] - # TODO: ensure_index is deprecated in PyMongo 3+ and drop_dups removed - collection.ensure_index(fields, background=background, - drop_dups=drop_dups, **opts) + if IS_PYMONGO_3: + collection.create_index(fields, background=background, **opts) + else: + collection.ensure_index(fields, background=background, + drop_dups=drop_dups, **opts) # If _cls is being used (for polymorphism), it needs an index, # only if another index doesn't begin with _cls @@ -707,9 +736,12 @@ class Document(BaseDocument): if 'cls' in index_opts: del index_opts['cls'] - # TODO: ensure_index is deprecated in PyMongo 3+ - collection.ensure_index('_cls', background=background, - **index_opts) + if IS_PYMONGO_3: + collection.create_index('_cls', background=background, + **index_opts) + else: + collection.ensure_index('_cls', background=background, + **index_opts) @classmethod def list_indexes(cls, go_up=True, go_down=True): diff --git a/tests/document/indexes.py b/tests/document/indexes.py index cecb8bf2..f9f68e57 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -298,19 +298,18 @@ class IndexesTest(unittest.TestCase): def test_explicit_geohaystack_index(self): """Ensure that geohaystack indexes work when created via meta[indexes] """ - raise SkipTest('GeoHaystack index creation seems broken on PyMongo' - 'side, as it requires a bucketSize parameter.') + raise SkipTest('GeoHaystack index creation is not supported for now' + 'from meta, as it requires a bucketSize parameter.') class Place(Document): location = DictField() + name = StringField() meta = { - 'allow_inheritance': True, 'indexes': [ - ')location.point', + (')location.point', 'name') ] } - - self.assertEqual([{'fields': [('location.point', 'geoHaystack')]}], + self.assertEqual([{'fields': [('location.point', 'geoHaystack'), ('name', 1)]}], Place._meta['index_specs']) Place.ensure_indexes() @@ -318,6 +317,18 @@ class IndexesTest(unittest.TestCase): info = [value['key'] for key, value in info.iteritems()] self.assertTrue([('location.point', 'geoHaystack')] in info) + def test_create_geohaystack_index(self): + """Ensure that geohaystack indexes can be created + """ + class Place(Document): + location = DictField() + name = StringField() + + Place.ensure_index({'fields': (')location.point', 'name')}, bucketSize=10) + info = Place._get_collection().index_information() + info = [value['key'] for key, value in info.iteritems()] + self.assertTrue([('location.point', 'geoHaystack'), ('name', 1)] in info) + def test_dictionary_indexes(self): """Ensure that indexes are used when meta[indexes] contains dictionaries instead of lists. From a0257ed7e73383a9bd395877c83351bea8311cd3 Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Tue, 2 Jun 2015 00:14:18 +0200 Subject: [PATCH 05/36] Updated test to use new create_index method --- tests/document/indexes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/document/indexes.py b/tests/document/indexes.py index f9f68e57..dcc71e5b 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -324,7 +324,7 @@ class IndexesTest(unittest.TestCase): location = DictField() name = StringField() - Place.ensure_index({'fields': (')location.point', 'name')}, bucketSize=10) + Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10) info = Place._get_collection().index_information() info = [value['key'] for key, value in info.iteritems()] self.assertTrue([('location.point', 'geoHaystack'), ('name', 1)] in info) From 65d6f8c01817ce7a7c104391db087f9a90f91269 Mon Sep 17 00:00:00 2001 From: Frederik Creemers Date: Tue, 2 Jun 2015 12:35:25 +0200 Subject: [PATCH 06/36] Solution for documentation issue #1003 Solution for documentation issue #1003. The explanation about reverse_delete_rule was a bit mixed up. --- docs/guide/defining-documents.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 79332493..62676983 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -315,9 +315,9 @@ reference with a delete rule specification. A delete rule is specified by supplying the :attr:`reverse_delete_rule` attributes on the :class:`ReferenceField` definition, like this:: - class Employee(Document): + class ProfilePage(Document): ... - profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY) + profile_page = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE) The declaration in this example means that when an :class:`Employee` object is removed, the :class:`ProfilePage` that belongs to that employee is removed as From 621350515e60118d309767974d57d6a2b5729dd7 Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Wed, 3 Jun 2015 01:02:19 +0200 Subject: [PATCH 07/36] Added test was still failing and implemented solution as described in #517 --- docs/changelog.rst | 1 + mongoengine/base/fields.py | 1 + mongoengine/fields.py | 2 +- tests/queryset/queryset.py | 37 ++++++++++++++++++++++++++++++++++++- 4 files changed, 39 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index ffbc8c54..42d98597 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -19,6 +19,7 @@ Changes in 0.9.X - DEV - Added __ support to escape field name in fields lookup keywords that match operators names #949 - Support for PyMongo 3+ #946 - Fix for issue where FileField deletion did not free space in GridFS. +- No_dereference() not respected on embedded docs containing reference. #517 Changes in 0.9.0 ================ diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 91f95b4f..b3ec0763 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -290,6 +290,7 @@ class ComplexBaseField(BaseField): return value if self.field: + self.field._auto_dereference = self._auto_dereference value_dict = dict([(key, self.field.to_python(item)) for key, item in value.items()]) else: diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 53d4ac95..9176828c 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -546,7 +546,7 @@ class EmbeddedDocumentField(BaseField): def to_python(self, value): if not isinstance(value, self.document_type): - return self.document_type._from_son(value) + return self.document_type._from_son(value, _auto_dereference=self._auto_dereference) return value def to_mongo(self, value, use_db_field=True, fields=[]): diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 65d84305..3dc3ef20 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -13,7 +13,7 @@ import pymongo from pymongo.errors import ConfigurationError from pymongo.read_preferences import ReadPreference -from bson import ObjectId +from bson import ObjectId, DBRef from mongoengine import * from mongoengine.connection import get_connection, get_db @@ -4185,6 +4185,41 @@ class QuerySetTest(unittest.TestCase): Organization)) self.assertTrue(isinstance(qs.first().organization, Organization)) + def test_no_dereference_embedded_doc(self): + + class User(Document): + name = StringField() + + class Member(EmbeddedDocument): + name = StringField() + user = ReferenceField(User) + + class Organization(Document): + name = StringField() + members = ListField(EmbeddedDocumentField(Member)) + ceo = ReferenceField(User) + member = EmbeddedDocumentField(Member) + admin = ListField(ReferenceField(User)) + + Organization.drop_collection() + User.drop_collection() + + user = User(name="Flash") + user.save() + + member = Member(name="Flash", user=user) + + company = Organization(name="Mongo Inc", ceo=user, member=member) + company.admin.append(user) + company.members.append(member) + company.save() + + result = Organization.objects().no_dereference().first() + + self.assertTrue(isinstance(result.admin[0], (DBRef, ObjectId))) + self.assertTrue(isinstance(result.member.user, (DBRef, ObjectId))) + self.assertTrue(isinstance(result.members[0].user, (DBRef, ObjectId))) + def test_cached_queryset(self): class Person(Document): name = StringField() From 0f63e26641a113feae8e21a81beb0c55ffc996a2 Mon Sep 17 00:00:00 2001 From: Marcel van den Elst Date: Thu, 4 Jun 2015 15:02:32 +0200 Subject: [PATCH 08/36] use AssertEqual instead of AssertListEqual for py2.6 compatibility --- tests/queryset/queryset.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 6e4d86d2..8daccfd0 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -649,13 +649,13 @@ class QuerySetTest(unittest.TestCase): o.owner = p p.name = 'p2' - self.assertListEqual( o._get_changed_fields(), [ 'owner' ] ) - self.assertListEqual( p._get_changed_fields(), [ 'name' ] ) + self.assertEqual( o._get_changed_fields(), [ 'owner' ] ) + self.assertEqual( p._get_changed_fields(), [ 'name' ] ) o.save() - self.assertListEqual( o._get_changed_fields(), [] ) - self.assertListEqual( p._get_changed_fields(), [ 'name' ] ) # Fails; it's empty + self.assertEqual( o._get_changed_fields(), [] ) + self.assertEqual( p._get_changed_fields(), [ 'name' ] ) # Fails; it's empty # This will do NOTHING at all, even though we changed the name p.save() From 91aa4586e2a7684ff3a73f21a032e1702c04dbc5 Mon Sep 17 00:00:00 2001 From: Frederik Creemers Date: Thu, 4 Jun 2015 22:38:11 +0200 Subject: [PATCH 09/36] Fixes after code review --- docs/guide/defining-documents.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 62676983..4d81b054 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -317,10 +317,10 @@ supplying the :attr:`reverse_delete_rule` attributes on the class ProfilePage(Document): ... - profile_page = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE) + employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE) The declaration in this example means that when an :class:`Employee` object is -removed, the :class:`ProfilePage` that belongs to that employee is removed as +removed, the :class:`ProfilePage` that references that employee is removed as well. If a whole batch of employees is removed, all profile pages that are linked are removed as well. From 3e000f9be1d7e4fdd93317b6da0ab4a0e07b5431 Mon Sep 17 00:00:00 2001 From: Emmanuel Leblond Date: Thu, 21 May 2015 17:35:51 +0200 Subject: [PATCH 10/36] Raise error if save_condition fails #991 --- mongoengine/document.py | 6 +++++- tests/document/instance.py | 11 +++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 060919ca..2c4872e9 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -266,7 +266,8 @@ class Document(BaseDocument): to cascading saves. Implies ``cascade=True``. :param _refs: A list of processed references used in cascading saves :param save_condition: only perform save if matching record in db - satisfies condition(s) (e.g., version number) + satisfies condition(s) (e.g., version number). + Raises :class:`OperationError` if the conditions are not satisfied .. versionchanged:: 0.5 In existing documents it only saves changed fields using @@ -348,6 +349,9 @@ class Document(BaseDocument): upsert = save_condition is None last_error = collection.update(select_dict, update_query, upsert=upsert, **write_concern) + if save_condition is not None and last_error['nModified'] == 0: + raise OperationError('Race condition preventing' + ' document update detected') created = is_new_object(last_error) if cascade is None: diff --git a/tests/document/instance.py b/tests/document/instance.py index 2cfdef65..3ccabacd 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -954,11 +954,12 @@ class InstanceTest(unittest.TestCase): self.assertEqual(w1.save_id, UUID(1)) self.assertEqual(w1.count, 0) - # mismatch in save_condition prevents save + # mismatch in save_condition prevents save and raise exception flip(w1) self.assertTrue(w1.toggle) self.assertEqual(w1.count, 1) - w1.save(save_condition={'save_id': UUID(42)}) + self.assertRaises(OperationError, + w1.save, save_condition={'save_id': UUID(42)}) w1.reload() self.assertFalse(w1.toggle) self.assertEqual(w1.count, 0) @@ -986,7 +987,8 @@ class InstanceTest(unittest.TestCase): self.assertEqual(w1.count, 2) flip(w2) flip(w2) - w2.save(save_condition={'save_id': old_id}) + self.assertRaises(OperationError, + w2.save, save_condition={'save_id': old_id}) w2.reload() self.assertFalse(w2.toggle) self.assertEqual(w2.count, 2) @@ -998,7 +1000,8 @@ class InstanceTest(unittest.TestCase): self.assertTrue(w1.toggle) self.assertEqual(w1.count, 3) flip(w1) - w1.save(save_condition={'count__gte': w1.count}) + self.assertRaises(OperationError, + w1.save, save_condition={'count__gte': w1.count}) w1.reload() self.assertTrue(w1.toggle) self.assertEqual(w1.count, 3) From 682db9b81f24d9160bc5707296ac3a1b507f200a Mon Sep 17 00:00:00 2001 From: Emmanuel Leblond Date: Tue, 2 Jun 2015 17:44:57 +0200 Subject: [PATCH 11/36] Add versionchanged to document save_condition --- mongoengine/document.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 2c4872e9..f798780e 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -266,7 +266,7 @@ class Document(BaseDocument): to cascading saves. Implies ``cascade=True``. :param _refs: A list of processed references used in cascading saves :param save_condition: only perform save if matching record in db - satisfies condition(s) (e.g., version number). + satisfies condition(s) (e.g. version number). Raises :class:`OperationError` if the conditions are not satisfied .. versionchanged:: 0.5 @@ -285,6 +285,8 @@ class Document(BaseDocument): .. versionchanged:: 0.8.5 Optional save_condition that only overwrites existing documents if the condition is satisfied in the current db record. + .. versionchanged:: 0.10 + :class:`OperationError` exception raised if save_condition fails. """ signals.pre_save.send(self.__class__, document=self) From cca0222e1d415346eb1ee439ddba21daaf530851 Mon Sep 17 00:00:00 2001 From: Emmanuel Leblond Date: Tue, 2 Jun 2015 17:45:10 +0200 Subject: [PATCH 12/36] Update AUTHORS --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 37170ffa..f64093d8 100644 --- a/AUTHORS +++ b/AUTHORS @@ -223,3 +223,4 @@ that much better: * Kiryl Yermakou (https://github.com/rma4ok) * Matthieu Rigal (https://github.com/MRigal) * Charanpal Dhanjal (https://github.com/charanpald) + * Emmanuel Leblond (https://github.com/touilleMan) From 9c917c3bd389a222b3400dc61fb95de4c322cb75 Mon Sep 17 00:00:00 2001 From: Emmanuel Leblond Date: Tue, 2 Jun 2015 17:48:31 +0200 Subject: [PATCH 13/36] Update changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 42d98597..52e76ffc 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -20,6 +20,7 @@ Changes in 0.9.X - DEV - Support for PyMongo 3+ #946 - Fix for issue where FileField deletion did not free space in GridFS. - No_dereference() not respected on embedded docs containing reference. #517 +- Document save raise an exception if save_condition fails Changes in 0.9.0 ================ From 4034ab41829ba7da18af5dc2e8cc1544dbff8614 Mon Sep 17 00:00:00 2001 From: Emmanuel Leblond Date: Mon, 8 Jun 2015 18:40:28 +0200 Subject: [PATCH 14/36] Clean save_condition exception implementation and related tests --- mongoengine/document.py | 2 +- tests/document/instance.py | 15 +++++++++------ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index f798780e..eedd01d2 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -351,7 +351,7 @@ class Document(BaseDocument): upsert = save_condition is None last_error = collection.update(select_dict, update_query, upsert=upsert, **write_concern) - if save_condition is not None and last_error['nModified'] == 0: + if not upsert and last_error['nModified'] == 0: raise OperationError('Race condition preventing' ' document update detected') created = is_new_object(last_error) diff --git a/tests/document/instance.py b/tests/document/instance.py index 3ccabacd..29692c20 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -958,8 +958,9 @@ class InstanceTest(unittest.TestCase): flip(w1) self.assertTrue(w1.toggle) self.assertEqual(w1.count, 1) - self.assertRaises(OperationError, - w1.save, save_condition={'save_id': UUID(42)}) + self.assertRaisesRegexp(OperationError, + "Race condition preventing document update detected", + w1.save, save_condition={'save_id': UUID(42)}) w1.reload() self.assertFalse(w1.toggle) self.assertEqual(w1.count, 0) @@ -987,8 +988,9 @@ class InstanceTest(unittest.TestCase): self.assertEqual(w1.count, 2) flip(w2) flip(w2) - self.assertRaises(OperationError, - w2.save, save_condition={'save_id': old_id}) + self.assertRaisesRegexp(OperationError, + "Race condition preventing document update detected", + w2.save, save_condition={'save_id': old_id}) w2.reload() self.assertFalse(w2.toggle) self.assertEqual(w2.count, 2) @@ -1000,8 +1002,9 @@ class InstanceTest(unittest.TestCase): self.assertTrue(w1.toggle) self.assertEqual(w1.count, 3) flip(w1) - self.assertRaises(OperationError, - w1.save, save_condition={'count__gte': w1.count}) + self.assertRaisesRegexp(OperationError, + "Race condition preventing document update detected", + w1.save, save_condition={'count__gte': w1.count}) w1.reload() self.assertTrue(w1.toggle) self.assertEqual(w1.count, 3) From 153c239c9bcb9acbc3f87c0d89995a128e4e934f Mon Sep 17 00:00:00 2001 From: Emmanuel Leblond Date: Thu, 11 Jun 2015 14:36:51 +0200 Subject: [PATCH 15/36] Replace assertRaisesRegexp by assertRaises (python2.6 compatibility) --- tests/document/instance.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/tests/document/instance.py b/tests/document/instance.py index 29692c20..e1710b9f 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -958,8 +958,7 @@ class InstanceTest(unittest.TestCase): flip(w1) self.assertTrue(w1.toggle) self.assertEqual(w1.count, 1) - self.assertRaisesRegexp(OperationError, - "Race condition preventing document update detected", + self.assertRaises(OperationError, w1.save, save_condition={'save_id': UUID(42)}) w1.reload() self.assertFalse(w1.toggle) @@ -988,8 +987,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(w1.count, 2) flip(w2) flip(w2) - self.assertRaisesRegexp(OperationError, - "Race condition preventing document update detected", + self.assertRaises(OperationError, w2.save, save_condition={'save_id': old_id}) w2.reload() self.assertFalse(w2.toggle) @@ -1002,8 +1000,7 @@ class InstanceTest(unittest.TestCase): self.assertTrue(w1.toggle) self.assertEqual(w1.count, 3) flip(w1) - self.assertRaisesRegexp(OperationError, - "Race condition preventing document update detected", + self.assertRaises(OperationError, w1.save, save_condition={'count__gte': w1.count}) w1.reload() self.assertTrue(w1.toggle) From cd7a9345ec624094e3811bf0c5ec2ed92d4ccc4d Mon Sep 17 00:00:00 2001 From: Emmanuel Leblond Date: Thu, 11 Jun 2015 14:45:19 +0200 Subject: [PATCH 16/36] Add issue related in changelog.rst --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 52e76ffc..65e236ba 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -20,7 +20,7 @@ Changes in 0.9.X - DEV - Support for PyMongo 3+ #946 - Fix for issue where FileField deletion did not free space in GridFS. - No_dereference() not respected on embedded docs containing reference. #517 -- Document save raise an exception if save_condition fails +- Document save raise an exception if save_condition fails #1005 Changes in 0.9.0 ================ From 1951b52aa5b62839f000deacb5ef9a02592c03f4 Mon Sep 17 00:00:00 2001 From: Emmanuel Leblond Date: Tue, 9 Jun 2015 16:20:35 +0200 Subject: [PATCH 17/36] Fix #1017 (document clash between same ids but different collections) --- mongoengine/dereference.py | 25 ++++++++++++++++--------- tests/test_dereference.py | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+), 9 deletions(-) diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 0428397c..8e8920d4 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -128,21 +128,25 @@ class DeReference(object): """ object_map = {} for collection, dbrefs in self.reference_map.iteritems(): - refs = [dbref for dbref in dbrefs - if unicode(dbref).encode('utf-8') not in object_map] if hasattr(collection, 'objects'): # We have a document class for the refs + col_name = collection._get_collection_name() + refs = [dbref for dbref in dbrefs + if (col_name, dbref) not in object_map] references = collection.objects.in_bulk(refs) for key, doc in references.iteritems(): - object_map[key] = doc + object_map[(col_name, key)] = doc else: # Generic reference: use the refs data to convert to document if isinstance(doc_type, (ListField, DictField, MapField,)): continue + refs = [dbref for dbref in dbrefs + if (collection, dbref) not in object_map] + if doc_type: references = doc_type._get_db()[collection].find({'_id': {'$in': refs}}) for ref in references: doc = doc_type._from_son(ref) - object_map[doc.id] = doc + object_map[(collection, doc.id)] = doc else: references = get_db()[collection].find({'_id': {'$in': refs}}) for ref in references: @@ -154,7 +158,7 @@ class DeReference(object): for x in collection.split('_')))._from_son(ref) else: doc = doc_type._from_son(ref) - object_map[doc.id] = doc + object_map[(collection, doc.id)] = doc return object_map def _attach_objects(self, items, depth=0, instance=None, name=None): @@ -180,7 +184,8 @@ class DeReference(object): if isinstance(items, (dict, SON)): if '_ref' in items: - return self.object_map.get(items['_ref'].id, items) + return self.object_map.get( + (items['_ref'].collection, items['_ref'].id), items) elif '_cls' in items: doc = get_document(items['_cls'])._from_son(items) _cls = doc._data.pop('_cls', None) @@ -216,9 +221,11 @@ class DeReference(object): for field_name, field in v._fields.iteritems(): v = data[k]._data.get(field_name, None) if isinstance(v, (DBRef)): - data[k]._data[field_name] = self.object_map.get(v.id, v) + data[k]._data[field_name] = self.object_map.get( + (v.collection, v.id), v) elif isinstance(v, (dict, SON)) and '_ref' in v: - data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v) + data[k]._data[field_name] = self.object_map.get( + (v['_ref'].collection , v['_ref'].id), v) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: item_name = "{0}.{1}.{2}".format(name, k, field_name) data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name) @@ -226,7 +233,7 @@ class DeReference(object): item_name = '%s.%s' % (name, k) if name else name data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name) elif hasattr(v, 'id'): - data[k] = self.object_map.get(v.id, v) + data[k] = self.object_map.get((v.collection, v.id), v) if instance and name: if is_list: diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 2115b45a..e1ae3740 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -1026,6 +1026,43 @@ class FieldTest(unittest.TestCase): self.assertEqual(type(foo.bar), Bar) self.assertEqual(type(foo.baz), Baz) + + def test_document_reload_reference_integrity(self): + """ + Ensure reloading a document with multiple similar id + in different collections doesn't mix them. + """ + class Topic(Document): + id = IntField(primary_key=True) + class User(Document): + id = IntField(primary_key=True) + name = StringField() + class Message(Document): + id = IntField(primary_key=True) + topic = ReferenceField(Topic) + author = ReferenceField(User) + + Topic.drop_collection() + User.drop_collection() + Message.drop_collection() + + # All objects share the same id, but each in a different collection + topic = Topic(id=1).save() + user = User(id=1, name='user-name').save() + Message(id=1, topic=topic, author=user).save() + + concurrent_change_user = User.objects.get(id=1) + concurrent_change_user.name = 'new-name' + concurrent_change_user.save() + self.assertNotEqual(user.name, 'new-name') + + msg = Message.objects.get(id=1) + msg.reload() + self.assertEqual(msg.topic, topic) + self.assertEqual(msg.author, user) + self.assertEqual(msg.author.name, 'new-name') + + def test_list_lookup_not_checked_in_map(self): """Ensure we dereference list data correctly """ From a2f0f20284ed28c8633ebc6b01a6e9ca71b04f83 Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Thu, 11 Jun 2015 17:48:34 +0200 Subject: [PATCH 18/36] Improve error message for invalid query --- mongoengine/base/document.py | 9 +++++++-- mongoengine/queryset/transform.py | 2 +- tests/queryset/transform.py | 9 +++++++++ 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 5707992a..1ab19e75 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -983,8 +983,13 @@ class BaseDocument(object): if hasattr(getattr(field, 'field', None), 'lookup_member'): new_field = field.field.lookup_member(field_name) else: - # Look up subfield on the previous field - new_field = field.lookup_member(field_name) + # Look up subfield on the previous field or raise + try: + new_field = field.lookup_member(field_name) + except AttributeError: + raise LookUpError('Cannot resolve subfield or operator {} ' + 'on the field {}'.format( + field_name, field.name)) if not new_field and isinstance(field, ComplexBaseField): if hasattr(field.field, 'document_type') and cls._dynamic \ and field.field.document_type._dynamic: diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index c43c4b40..f6cfa87e 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -44,7 +44,7 @@ def query(_doc_cls=None, _field_operation=False, **query): if len(parts) > 1 and parts[-1] in MATCH_OPERATORS: op = parts.pop() - #if user escape field name by __ + # if user escape field name by __ if len(parts) > 1 and parts[-1] == "": parts.pop() diff --git a/tests/queryset/transform.py b/tests/queryset/transform.py index 77d3593c..a543317a 100644 --- a/tests/queryset/transform.py +++ b/tests/queryset/transform.py @@ -224,6 +224,15 @@ class TransformTest(unittest.TestCase): self.assertEqual(1, Doc.objects(item__type__="axe").count()) self.assertEqual(1, Doc.objects(item__name__="Heroic axe").count()) + def test_understandable_error_raised(self): + class Event(Document): + title = StringField() + location = GeoPointField() + + box = [(35.0, -125.0), (40.0, -100.0)] + # I *meant* to execute location__within_box=box + events = Event.objects(location__within=box) + self.assertRaises(InvalidQueryError, lambda: events.count()) if __name__ == '__main__': unittest.main() From 1862bcf86709c7e58379ee5c5a0baa52b0347d78 Mon Sep 17 00:00:00 2001 From: mrigal Date: Thu, 16 Apr 2015 14:33:16 +0200 Subject: [PATCH 19/36] added test for abstract document without pk creation and adapted behaviour --- mongoengine/base/document.py | 2 +- mongoengine/document.py | 2 ++ tests/document/inheritance.py | 11 +++++++++++ 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 5707992a..7142a3de 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -184,7 +184,7 @@ class BaseDocument(object): self__initialised = False # Check if the user has created a new instance of a class if (self._is_document and self__initialised - and self__created and name == self._meta['id_field']): + and self__created and name == self._meta.get('id_field')): super(BaseDocument, self).__setattr__('_created', False) super(BaseDocument, self).__setattr__(name, value) diff --git a/mongoengine/document.py b/mongoengine/document.py index eedd01d2..01b69fb2 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -152,6 +152,8 @@ class Document(BaseDocument): """ def fget(self): + if not 'id_field' in self._meta: + return None return getattr(self, self._meta['id_field']) def fset(self, value): diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py index e8347054..afa3ccea 100644 --- a/tests/document/inheritance.py +++ b/tests/document/inheritance.py @@ -307,6 +307,17 @@ class InheritanceTest(unittest.TestCase): doc = Animal(name='dog') self.assertFalse('_cls' in doc.to_mongo()) + def test_abstract_document_creation_does_not_fail(self): + + class City(Document): + continent = StringField() + meta = {'abstract': True, + 'allow_inheritance': False} + bkk = City(continent='asia') + self.assertEqual(None, bkk.pk) + # TODO: expected error? Shouldn'twe created a new error type + self.assertRaises(KeyError, lambda: setattr(bkk, 'pk', 1)) + def test_allow_inheritance_embedded_document(self): """Ensure embedded documents respect inheritance """ From 53fbc165ba84b0314d2c10f16bf9425bbba44fa2 Mon Sep 17 00:00:00 2001 From: mrigal Date: Thu, 16 Apr 2015 14:37:46 +0200 Subject: [PATCH 20/36] added content of PR #688 with a test to proove it is a bit right --- mongoengine/base/metaclasses.py | 14 ++++++++++---- tests/document/inheritance.py | 17 +++++++++++++++++ 2 files changed, 27 insertions(+), 4 deletions(-) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 8a25ff3d..6fdbe19c 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -390,10 +390,16 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): new_class._fields['id'] = ObjectIdField(db_field='_id') new_class._fields['id'].name = 'id' new_class.id = new_class._fields['id'] - - # Prepend id field to _fields_ordered - if 'id' in new_class._fields and 'id' not in new_class._fields_ordered: - new_class._fields_ordered = ('id', ) + new_class._fields_ordered + new_class._db_field_map['id'] = '_id' + new_class._reverse_db_field_map['_id'] = 'id' + if 'id' in new_class._fields_ordered: + # An existing id field will be overwritten anyway, so remove it + loc = new_class._fields_ordered.index('id') + new_class._fields_ordered = new_class._fields_ordered[:loc] + \ + new_class._fields_ordered[loc+1:] + else: + # Prepend id field to _fields_ordered + new_class._fields_ordered = ('id', ) + new_class._fields_ordered # Merge in exceptions with parent hierarchy exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py index afa3ccea..e408e611 100644 --- a/tests/document/inheritance.py +++ b/tests/document/inheritance.py @@ -307,6 +307,23 @@ class InheritanceTest(unittest.TestCase): doc = Animal(name='dog') self.assertFalse('_cls' in doc.to_mongo()) + def test_abstract_handle_ids_in_metaclass_properly(self): + + class City(Document): + continent = StringField() + meta = {'abstract': True, + 'allow_inheritance': False} + + class EuropeanCity(City): + name = StringField() + country = StringField() + + berlin = EuropeanCity(name='Berlin', continent='Europe') + self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) + self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) + self.assertEqual(len(berlin._fields_ordered), 4) + self.assertEqual(berlin._fields_ordered[0], 'id') + def test_abstract_document_creation_does_not_fail(self): class City(Document): From 051cd744adbe9a00ee3e3630167bfd80bc431589 Mon Sep 17 00:00:00 2001 From: mrigal Date: Thu, 16 Apr 2015 14:38:30 +0200 Subject: [PATCH 21/36] added another test to proove we still do not handle all cases well --- tests/document/inheritance.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py index e408e611..cbc0d0b7 100644 --- a/tests/document/inheritance.py +++ b/tests/document/inheritance.py @@ -324,6 +324,24 @@ class InheritanceTest(unittest.TestCase): self.assertEqual(len(berlin._fields_ordered), 4) self.assertEqual(berlin._fields_ordered[0], 'id') + def test_auto_id_vs_non_pk_id_field(self): + + class City(Document): + continent = StringField() + id = IntField() + meta = {'abstract': True, + 'allow_inheritance': False} + + class EuropeanCity(City): + name = StringField() + country = StringField() + + berlin = EuropeanCity(name='Berlin', continent='Europe') + self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) + self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) + self.assertEqual(len(berlin._fields_ordered), 5) + self.assertEqual(berlin._fields_ordered[0], 'id') + def test_abstract_document_creation_does_not_fail(self): class City(Document): From 2e963023368372bb975bd671124686b20db822a6 Mon Sep 17 00:00:00 2001 From: mrigal Date: Thu, 16 Apr 2015 14:53:28 +0200 Subject: [PATCH 22/36] not in fix --- mongoengine/document.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 01b69fb2..7b05ef8c 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -152,7 +152,7 @@ class Document(BaseDocument): """ def fget(self): - if not 'id_field' in self._meta: + if 'id_field' not in self._meta: return None return getattr(self, self._meta['id_field']) From 915849b2ce7706dbf091207f92ff7d9e37291823 Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Thu, 11 Jun 2015 16:39:20 +0200 Subject: [PATCH 23/36] Implemented method to auto-generate non-collisioning auto_id names --- docs/changelog.rst | 1 + docs/guide/defining-documents.rst | 2 +- mongoengine/base/metaclasses.py | 38 +++++++++++++++++++------------ tests/document/inheritance.py | 29 ++++++++++++++++++----- tests/queryset/queryset.py | 2 -- 5 files changed, 49 insertions(+), 23 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 65e236ba..dfa9c7a6 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -21,6 +21,7 @@ Changes in 0.9.X - DEV - Fix for issue where FileField deletion did not free space in GridFS. - No_dereference() not respected on embedded docs containing reference. #517 - Document save raise an exception if save_condition fails #1005 +- Fixes some internal _id handling issue. #961 Changes in 0.9.0 ================ diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index d1448158..4572d30b 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -310,7 +310,7 @@ Dealing with deletion of referred documents By default, MongoDB doesn't check the integrity of your data, so deleting documents that other documents still hold references to will lead to consistency issues. Mongoengine's :class:`ReferenceField` adds some functionality to -safeguard against these kinds of database integrity problems, providing each +safeguard against these kinds of database integrit2y problems, providing each reference with a delete rule specification. A delete rule is specified by supplying the :attr:`reverse_delete_rule` attributes on the :class:`ReferenceField` definition, like this:: diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 6fdbe19c..f608940d 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -385,21 +385,17 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): new_class._auto_id_field = getattr(parent_doc_cls, '_auto_id_field', False) if not new_class._meta.get('id_field'): + # After 0.10, find not existing names, instead of overwriting + id_name, id_db_name = cls.get_auto_id_names(new_class) new_class._auto_id_field = True - new_class._meta['id_field'] = 'id' - new_class._fields['id'] = ObjectIdField(db_field='_id') - new_class._fields['id'].name = 'id' - new_class.id = new_class._fields['id'] - new_class._db_field_map['id'] = '_id' - new_class._reverse_db_field_map['_id'] = 'id' - if 'id' in new_class._fields_ordered: - # An existing id field will be overwritten anyway, so remove it - loc = new_class._fields_ordered.index('id') - new_class._fields_ordered = new_class._fields_ordered[:loc] + \ - new_class._fields_ordered[loc+1:] - else: - # Prepend id field to _fields_ordered - new_class._fields_ordered = ('id', ) + new_class._fields_ordered + new_class._meta['id_field'] = id_name + new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) + new_class._fields[id_name].name = id_name + new_class.id = new_class._fields[id_name] + new_class._db_field_map[id_name] = id_db_name + new_class._reverse_db_field_map[id_db_name] = id_name + # Prepend id field to _fields_ordered + new_class._fields_ordered = (id_name, ) + new_class._fields_ordered # Merge in exceptions with parent hierarchy exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) @@ -414,6 +410,20 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): return new_class + def get_auto_id_names(self): + id_name, id_db_name = ('id', '_id') + if id_name not in self._fields and \ + id_db_name not in (v.db_field for v in self._fields.values()): + return id_name, id_db_name + id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0 + while id_name in self._fields or \ + id_db_name in (v.db_field for v in self._fields.values()): + id_name = '{}_{}'.format(id_basename, i) + id_db_name = '{}_{}'.format(id_db_basename, i) + i += 1 + return id_name, id_db_name + + class MetaDict(dict): diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py index cbc0d0b7..7673a103 100644 --- a/tests/document/inheritance.py +++ b/tests/document/inheritance.py @@ -316,14 +316,30 @@ class InheritanceTest(unittest.TestCase): class EuropeanCity(City): name = StringField() - country = StringField() berlin = EuropeanCity(name='Berlin', continent='Europe') self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._fields_ordered), 4) + self.assertEqual(len(berlin._fields_ordered), 3) self.assertEqual(berlin._fields_ordered[0], 'id') + def test_auto_id_not_set_if_specific_in_parent_class(self): + + class City(Document): + continent = StringField() + city_id = IntField(primary_key=True) + meta = {'abstract': True, + 'allow_inheritance': False} + + class EuropeanCity(City): + name = StringField() + + berlin = EuropeanCity(name='Berlin', continent='Europe') + self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) + self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) + self.assertEqual(len(berlin._fields_ordered), 3) + self.assertEqual(berlin._fields_ordered[0], 'city_id') + def test_auto_id_vs_non_pk_id_field(self): class City(Document): @@ -334,13 +350,14 @@ class InheritanceTest(unittest.TestCase): class EuropeanCity(City): name = StringField() - country = StringField() berlin = EuropeanCity(name='Berlin', continent='Europe') self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._fields_ordered), 5) - self.assertEqual(berlin._fields_ordered[0], 'id') + self.assertEqual(len(berlin._fields_ordered), 4) + self.assertEqual(berlin._fields_ordered[0], 'auto_id_0') + berlin.save() + self.assertEqual(berlin.pk, berlin.auto_id_0) def test_abstract_document_creation_does_not_fail(self): @@ -350,7 +367,7 @@ class InheritanceTest(unittest.TestCase): 'allow_inheritance': False} bkk = City(continent='asia') self.assertEqual(None, bkk.pk) - # TODO: expected error? Shouldn'twe created a new error type + # TODO: expected error? Shouldn't we create a new error type? self.assertRaises(KeyError, lambda: setattr(bkk, 'pk', 1)) def test_allow_inheritance_embedded_document(self): diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 0965e40f..83fb52df 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -3689,11 +3689,9 @@ class QuerySetTest(unittest.TestCase): def test_scalar(self): class Organization(Document): - id = ObjectIdField('_id') name = StringField() class User(Document): - id = ObjectIdField('_id') name = StringField() organization = ObjectIdField() From 810819861354914a217a58e6f7e9c772dca4549a Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Thu, 11 Jun 2015 22:38:05 +0200 Subject: [PATCH 24/36] corrected formatting for Python 2.6 compatibility --- mongoengine/base/metaclasses.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index f608940d..16e6bd29 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -418,8 +418,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0 while id_name in self._fields or \ id_db_name in (v.db_field for v in self._fields.values()): - id_name = '{}_{}'.format(id_basename, i) - id_db_name = '{}_{}'.format(id_db_basename, i) + id_name = '{0}_{1}'.format(id_basename, i) + id_db_name = '{0}_{1}'.format(id_db_basename, i) i += 1 return id_name, id_db_name From e1da83a8f6e835478e2e039cc241529c2384d2f2 Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Fri, 12 Jun 2015 09:12:19 +0200 Subject: [PATCH 25/36] Cosmetic --- docs/guide/defining-documents.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 4572d30b..d1448158 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -310,7 +310,7 @@ Dealing with deletion of referred documents By default, MongoDB doesn't check the integrity of your data, so deleting documents that other documents still hold references to will lead to consistency issues. Mongoengine's :class:`ReferenceField` adds some functionality to -safeguard against these kinds of database integrit2y problems, providing each +safeguard against these kinds of database integrity problems, providing each reference with a delete rule specification. A delete rule is specified by supplying the :attr:`reverse_delete_rule` attributes on the :class:`ReferenceField` definition, like this:: From 3093175f544e9f976caada8d4dbe6dc2f99a0108 Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Fri, 12 Jun 2015 11:03:52 +0200 Subject: [PATCH 26/36] SequenceField for abstract classes now have a proper name --- mongoengine/base/document.py | 2 +- mongoengine/base/metaclasses.py | 1 - mongoengine/document.py | 8 ++--- mongoengine/fields.py | 4 +-- tests/fields/fields.py | 52 +++++++++++++++++++++++++++++++++ 5 files changed, 59 insertions(+), 8 deletions(-) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 9357fde2..1049eabd 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -672,7 +672,7 @@ class BaseDocument(object): @classmethod def _get_collection_name(cls): - """Returns the collection name for this class. + """Returns the collection name for this class. None for abstract class """ return cls._meta.get('collection', None) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 16e6bd29..d4c26bfe 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -424,7 +424,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): return id_name, id_db_name - class MetaDict(dict): """Custom dictionary for meta classes. diff --git a/mongoengine/document.py b/mongoengine/document.py index 7b05ef8c..885f7eed 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -145,7 +145,7 @@ class Document(BaseDocument): my_metaclass = TopLevelDocumentMetaclass __metaclass__ = TopLevelDocumentMetaclass - __slots__ = ('__objects') + __slots__ = ('__objects',) def pk(): """Primary key alias @@ -174,10 +174,10 @@ class Document(BaseDocument): db = cls._get_db() collection_name = cls._get_collection_name() # Create collection as a capped collection if specified - if cls._meta['max_size'] or cls._meta['max_documents']: + if cls._meta.get('max_size') or cls._meta.get('max_documents'): # Get max document limit and max byte size from meta - max_size = cls._meta['max_size'] or 10000000 # 10MB default - max_documents = cls._meta['max_documents'] + max_size = cls._meta.get('max_size') or 10000000 # 10MB default + max_documents = cls._meta.get('max_documents') if collection_name in db.collection_names(): cls._collection = db[collection_name] diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 9176828c..94f41d28 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1694,7 +1694,7 @@ class SequenceField(BaseField): self.sequence_name = sequence_name self.value_decorator = (callable(value_decorator) and value_decorator or self.VALUE_DECORATOR) - return super(SequenceField, self).__init__(*args, **kwargs) + super(SequenceField, self).__init__(*args, **kwargs) def generate(self): """ @@ -1740,7 +1740,7 @@ class SequenceField(BaseField): if self.sequence_name: return self.sequence_name owner = self.owner_document - if issubclass(owner, Document): + if issubclass(owner, Document) and not owner._meta.get('abstract'): return owner._get_collection_name() else: return ''.join('_%s' % c if c.isupper() else c diff --git a/tests/fields/fields.py b/tests/fields/fields.py index fd083c73..9f9bf1dd 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -39,6 +39,7 @@ class FieldTest(unittest.TestCase): def tearDown(self): self.db.drop_collection('fs.files') self.db.drop_collection('fs.chunks') + self.db.drop_collection('mongoengine.counters') def test_default_values_nothing_set(self): """Ensure that default field values are used when creating a document. @@ -2954,6 +2955,57 @@ class FieldTest(unittest.TestCase): self.assertEqual(1, post.comments[0].id) self.assertEqual(2, post.comments[1].id) + def test_inherited_sequencefield(self): + class Base(Document): + name = StringField() + counter = SequenceField() + meta = {'abstract': True} + + class Foo(Base): + pass + + class Bar(Base): + pass + + bar = Bar(name='Bar') + bar.save() + + foo = Foo(name='Foo') + foo.save() + + self.assertTrue('base.counter' in + self.db['mongoengine.counters'].find().distinct('_id')) + self.assertFalse(('foo.counter' or 'bar.counter') in + self.db['mongoengine.counters'].find().distinct('_id')) + self.assertNotEqual(foo.counter, bar.counter) + self.assertEqual(foo._fields['counter'].owner_document, Base) + self.assertEqual(bar._fields['counter'].owner_document, Base) + + def test_no_inherited_sequencefield(self): + class Base(Document): + name = StringField() + meta = {'abstract': True} + + class Foo(Base): + counter = SequenceField() + + class Bar(Base): + counter = SequenceField() + + bar = Bar(name='Bar') + bar.save() + + foo = Foo(name='Foo') + foo.save() + + self.assertFalse('base.counter' in + self.db['mongoengine.counters'].find().distinct('_id')) + self.assertTrue(('foo.counter' and 'bar.counter') in + self.db['mongoengine.counters'].find().distinct('_id')) + self.assertEqual(foo.counter, bar.counter) + self.assertEqual(foo._fields['counter'].owner_document, Foo) + self.assertEqual(bar._fields['counter'].owner_document, Bar) + def test_generic_embedded_document(self): class Car(EmbeddedDocument): name = StringField() From eec876295d1e3feb5fd2e360b3b775a15ab2b14a Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Fri, 12 Jun 2015 12:13:28 +0200 Subject: [PATCH 27/36] Added passing test to prove save and only problem was fixed --- tests/queryset/queryset.py | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 83fb52df..981d47f4 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -693,6 +693,43 @@ class QuerySetTest(unittest.TestCase): self.assertEqual("Bob", bob.name) self.assertEqual(30, bob.age) + def test_save_and_only_on_fields_with_default(self): + class Embed(EmbeddedDocument): + field = IntField() + + class B(Document): + meta = {'collection': 'b'} + + field = IntField(default=1) + embed = EmbeddedDocumentField(Embed, default=Embed) + embed_no_default = EmbeddedDocumentField(Embed) + + # Creating {field : 2, embed : {field: 2}, embed_no_default: {field: 2}} + val = 2 + embed = Embed() + embed.field = val + record = B() + record.field = val + record.embed = embed + record.embed_no_default = embed + record.save() + + # Checking it was saved correctly + record.reload() + self.assertEqual(record.field, 2) + self.assertEqual(record.embed_no_default.field, 2) + self.assertEqual(record.embed.field, 2) + + # Request only the _id field and save + clone = B.objects().only('id').first() + clone.save() + + # Reload the record and see that the embed data is not lost + record.reload() + self.assertEqual(record.field, 2) + self.assertEqual(record.embed_no_default.field, 2) + self.assertEqual(record.embed.field, 2) + def test_get_or_create(self): """Ensure that ``get_or_create`` returns one result or creates a new document. From 4c1496b4a4d380fa62ada223819ec3cd0d523bc8 Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Fri, 12 Jun 2015 13:10:36 +0200 Subject: [PATCH 28/36] Updated URL and Email field regex validators, added schemes arg to urlfield --- mongoengine/fields.py | 23 ++++++++++++++++------- tests/fields/fields.py | 18 +++++++++++++++++- 2 files changed, 33 insertions(+), 8 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 9176828c..b685534d 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -119,22 +119,31 @@ class URLField(StringField): """ _URL_REGEX = re.compile( - r'^(?:http|ftp)s?://' # http:// or https:// - # domain... - r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' + r'^(?:[a-z0-9\.\-]*)://' # scheme is validated separately + r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}(? Date: Fri, 12 Jun 2015 13:12:35 +0200 Subject: [PATCH 29/36] Added to changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index dfa9c7a6..aba3f6b2 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -22,6 +22,7 @@ Changes in 0.9.X - DEV - No_dereference() not respected on embedded docs containing reference. #517 - Document save raise an exception if save_condition fails #1005 - Fixes some internal _id handling issue. #961 +- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652 Changes in 0.9.0 ================ From 7714cca5995a1f26e3e7d4bccbd2464591304fe4 Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Fri, 12 Jun 2015 13:34:28 +0200 Subject: [PATCH 30/36] Removed get_or_create() method, deprecated since 0.8 --- docs/changelog.rst | 1 + docs/guide/querying.rst | 20 ++++----------- mongoengine/queryset/base.py | 48 ------------------------------------ tests/fields/fields.py | 4 +-- tests/queryset/queryset.py | 35 +------------------------- 5 files changed, 8 insertions(+), 100 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index aba3f6b2..e3d7c8df 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -23,6 +23,7 @@ Changes in 0.9.X - DEV - Document save raise an exception if save_condition fails #1005 - Fixes some internal _id handling issue. #961 - Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652 +- Removed get_or_create() deprecated since 0.8.0. #300 Changes in 0.9.0 ================ diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 9861ce56..1cde82cb 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -263,21 +263,11 @@ no document matches the query, and if more than one document matched the query. These exceptions are merged into your document definitions eg: `MyDoc.DoesNotExist` -A variation of this method exists, -:meth:`~mongoengine.queryset.QuerySet.get_or_create`, that will create a new -document with the query arguments if no documents match the query. An -additional keyword argument, :attr:`defaults` may be provided, which will be -used as default values for the new document, in the case that it should need -to be created:: - - >>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30}) - >>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40}) - >>> a.name == b.name and a.age == b.age - True - -.. warning:: - :meth:`~mongoengine.queryset.QuerySet.get_or_create` method is deprecated - since :mod:`mongoengine` 0.8. +A variation of this method, get_or_create() existed, but it was unsafe. It +could not be made safe, because there are no transactions in mongoDB. Other +approaches should be investigated, to ensure you don't accidentally duplicate +data when using something similar to this method. Therefore it was deprecated +in 0.8 and removed in 0.10. Default Document queries ======================== diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 89eb9afa..c8a30783 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -258,54 +258,6 @@ class BaseQuerySet(object): """ return self._document(**kwargs).save() - def get_or_create(self, write_concern=None, auto_save=True, - *q_objs, **query): - """Retrieve unique object or create, if it doesn't exist. Returns a - tuple of ``(object, created)``, where ``object`` is the retrieved or - created object and ``created`` is a boolean specifying whether a new - object was created. Raises - :class:`~mongoengine.queryset.MultipleObjectsReturned` or - `DocumentName.MultipleObjectsReturned` if multiple results are found. - A new document will be created if the document doesn't exists; a - dictionary of default values for the new document may be provided as a - keyword argument called :attr:`defaults`. - - .. note:: This requires two separate operations and therefore a - race condition exists. Because there are no transactions in - mongoDB other approaches should be investigated, to ensure you - don't accidentally duplicate data when using this method. This is - now scheduled to be removed before 1.0 - - :param write_concern: optional extra keyword arguments used if we - have to create a new document. - Passes any write_concern onto :meth:`~mongoengine.Document.save` - - :param auto_save: if the object is to be saved automatically if - not found. - - .. deprecated:: 0.8 - .. versionchanged:: 0.6 - added `auto_save` - .. versionadded:: 0.3 - """ - msg = ("get_or_create is scheduled to be deprecated. The approach is " - "flawed without transactions. Upserts should be preferred.") - warnings.warn(msg, DeprecationWarning) - - defaults = query.get('defaults', {}) - if 'defaults' in query: - del query['defaults'] - - try: - doc = self.get(*q_objs, **query) - return doc, False - except self._document.DoesNotExist: - query.update(defaults) - doc = self._document(**query) - - if auto_save: - doc.save(write_concern=write_concern) - return doc, True - def first(self): """Retrieve the first object matching the query. """ diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 1a5c2561..35e3ca39 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -2152,9 +2152,7 @@ class FieldTest(unittest.TestCase): obj = Product.objects(company=None).first() self.assertEqual(obj, me) - obj, created = Product.objects.get_or_create(company=None) - - self.assertEqual(created, False) + obj = Product.objects.get(company=None) self.assertEqual(obj, me) def test_reference_query_conversion(self): diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 981d47f4..fbc8ce4a 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -340,8 +340,7 @@ class QuerySetTest(unittest.TestCase): write_concern = {"fsync": True} - author, created = self.Person.objects.get_or_create( - name='Test User', write_concern=write_concern) + author = self.Person.objects.create(name='Test User') author.save(write_concern=write_concern) result = self.Person.objects.update( @@ -730,38 +729,6 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(record.embed_no_default.field, 2) self.assertEqual(record.embed.field, 2) - def test_get_or_create(self): - """Ensure that ``get_or_create`` returns one result or creates a new - document. - """ - person1 = self.Person(name="User A", age=20) - person1.save() - person2 = self.Person(name="User B", age=30) - person2.save() - - # Retrieve the first person from the database - self.assertRaises(MultipleObjectsReturned, - self.Person.objects.get_or_create) - self.assertRaises(self.Person.MultipleObjectsReturned, - self.Person.objects.get_or_create) - - # Use a query to filter the people found to just person2 - person, created = self.Person.objects.get_or_create(age=30) - self.assertEqual(person.name, "User B") - self.assertEqual(created, False) - - person, created = self.Person.objects.get_or_create(age__lt=30) - self.assertEqual(person.name, "User A") - self.assertEqual(created, False) - - # Try retrieving when no objects exists - new doc should be created - kwargs = dict(age=50, defaults={'name': 'User C'}) - person, created = self.Person.objects.get_or_create(**kwargs) - self.assertEqual(created, True) - - person = self.Person.objects.get(age=50) - self.assertEqual(person.name, "User C") - def test_bulk_insert(self): """Ensure that bulk insert works """ From 2b647d24051f1eac5ae8d5dcf42010dea083b2d6 Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Fri, 12 Jun 2015 21:20:59 +0200 Subject: [PATCH 31/36] Improved doc for SequenceField Related to issue #497 --- mongoengine/fields.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 8e6c9b07..c27d02b2 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1013,6 +1013,7 @@ class CachedReferenceField(BaseField): """ A referencefield with cache fields to purpose pseudo-joins + .. versionadded:: 0.9 """ @@ -1683,12 +1684,21 @@ class SequenceField(BaseField): cluster of machines, it is easier to create an object ID than have global, uniformly increasing sequence numbers. + :param collection_name: Name of the counter collection (default 'mongoengine.counters') + :param sequence_name: Name of the sequence in the collection (default 'ClassName.counter') + :param value_decorator: Any callable to use as a counter (default int) + Use any callable as `value_decorator` to transform calculated counter into any value suitable for your needs, e.g. string or hexadecimal representation of the default integer counter value. - + + .. note:: + + In case the counter is defined in the abstract document, it will be + common to all inherited documents and the default sequence name will + be the class name of the abstract document. + .. versionadded:: 0.5 - .. versionchanged:: 0.8 added `value_decorator` """ From 2a3d3de0b2be6fbedf975fe99c6aaa721de8453f Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Mon, 15 Jun 2015 00:22:07 +0200 Subject: [PATCH 32/36] CappedCollection max_size normalized to multiple of 256 --- docs/changelog.rst | 1 + docs/guide/defining-documents.rst | 6 ++- mongoengine/document.py | 13 +++++-- tests/document/instance.py | 65 ++++++++++++++++++++++++++++++- 4 files changed, 78 insertions(+), 7 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index e3d7c8df..48e8b9aa 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -24,6 +24,7 @@ Changes in 0.9.X - DEV - Fixes some internal _id handling issue. #961 - Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652 - Removed get_or_create() deprecated since 0.8.0. #300 +- Capped collection multiple of 256. #1011 Changes in 0.9.0 ================ diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index d1448158..8f7382ee 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -447,8 +447,10 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary. :attr:`max_documents` is the maximum number of documents that is allowed to be stored in the collection, and :attr:`max_size` is the maximum size of the -collection in bytes. If :attr:`max_size` is not specified and -:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB). +collection in bytes. :attr:`max_size` is rounded up to the next multiple of 256 +by MongoDB internally and mongoengine before. Use also a multiple of 256 to +avoid confusions. If :attr:`max_size` is not specified and +:attr:`max_documents` is, :attr:`max_size` defaults to 10485760 bytes (10MB). The following example shows a :class:`Log` document that will be limited to 1000 entries and 2MB of disk space:: diff --git a/mongoengine/document.py b/mongoengine/document.py index 885f7eed..dee6cc10 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -114,9 +114,11 @@ class Document(BaseDocument): specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary. :attr:`max_documents` is the maximum number of documents that is allowed to be stored in the collection, and :attr:`max_size` is the - maximum size of the collection in bytes. If :attr:`max_size` is not + maximum size of the collection in bytes. :attr:`max_size` is rounded up + to the next multiple of 256 by MongoDB internally and mongoengine before. + Use also a multiple of 256 to avoid confusions. If :attr:`max_size` is not specified and :attr:`max_documents` is, :attr:`max_size` defaults to - 10000000 bytes (10MB). + 10485760 bytes (10MB). Indexes may be created by specifying :attr:`indexes` in the :attr:`meta` dictionary. The value should be a list of field names or tuples of field @@ -137,7 +139,7 @@ class Document(BaseDocument): By default, any extra attribute existing in stored data but not declared in your model will raise a :class:`~mongoengine.FieldDoesNotExist` error. This can be disabled by setting :attr:`strict` to ``False`` - in the :attr:`meta` dictionnary. + in the :attr:`meta` dictionary. """ # The __metaclass__ attribute is removed by 2to3 when running with Python3 @@ -176,8 +178,11 @@ class Document(BaseDocument): # Create collection as a capped collection if specified if cls._meta.get('max_size') or cls._meta.get('max_documents'): # Get max document limit and max byte size from meta - max_size = cls._meta.get('max_size') or 10000000 # 10MB default + max_size = cls._meta.get('max_size') or 10 * 2 ** 20 # 10MB default max_documents = cls._meta.get('max_documents') + # Round up to next 256 bytes as MongoDB would do it to avoid exception + if max_size % 256: + max_size = (max_size / 256 + 1) * 256 if collection_name in db.collection_names(): cls._collection = db[collection_name] diff --git a/tests/document/instance.py b/tests/document/instance.py index e1710b9f..fceec02f 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -88,7 +88,7 @@ class InstanceTest(unittest.TestCase): options = Log.objects._collection.options() self.assertEqual(options['capped'], True) self.assertEqual(options['max'], 10) - self.assertTrue(options['size'] >= 4096) + self.assertEqual(options['size'], 4096) # Check that the document cannot be redefined with different options def recreate_log_document(): @@ -103,6 +103,69 @@ class InstanceTest(unittest.TestCase): Log.drop_collection() + def test_capped_collection_default(self): + """Ensure that capped collections defaults work properly. + """ + class Log(Document): + date = DateTimeField(default=datetime.now) + meta = { + 'max_documents': 10, + } + + Log.drop_collection() + + # Create a doc to create the collection + Log().save() + + options = Log.objects._collection.options() + self.assertEqual(options['capped'], True) + self.assertEqual(options['max'], 10) + self.assertEqual(options['size'], 10 * 2**20) + + # Check that the document with default value can be recreated + def recreate_log_document(): + class Log(Document): + date = DateTimeField(default=datetime.now) + meta = { + 'max_documents': 10, + } + # Create the collection by accessing Document.objects + Log.objects + recreate_log_document() + Log.drop_collection() + + def test_capped_collection_no_max_size_problems(self): + """Ensure that capped collections with odd max_size work properly. + MongoDB rounds up max_size to next multiple of 256, recreating a doc + with the same spec failed in mongoengine <0.10 + """ + class Log(Document): + date = DateTimeField(default=datetime.now) + meta = { + 'max_size': 10000, + } + + Log.drop_collection() + + # Create a doc to create the collection + Log().save() + + options = Log.objects._collection.options() + self.assertEqual(options['capped'], True) + self.assertTrue(options['size'] >= 10000) + + # Check that the document with odd max_size value can be recreated + def recreate_log_document(): + class Log(Document): + date = DateTimeField(default=datetime.now) + meta = { + 'max_size': 10000, + } + # Create the collection by accessing Document.objects + Log.objects + recreate_log_document() + Log.drop_collection() + def test_repr(self): """Ensure that unicode representation works """ From 1bcd675ead609b8b423d227b9266df752f3c409a Mon Sep 17 00:00:00 2001 From: Matthieu Rigal Date: Mon, 15 Jun 2015 13:44:11 +0200 Subject: [PATCH 33/36] Python 3 fix, uses floor division --- mongoengine/document.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index dee6cc10..429f6065 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -182,7 +182,7 @@ class Document(BaseDocument): max_documents = cls._meta.get('max_documents') # Round up to next 256 bytes as MongoDB would do it to avoid exception if max_size % 256: - max_size = (max_size / 256 + 1) * 256 + max_size = (max_size // 256 + 1) * 256 if collection_name in db.collection_names(): cls._collection = db[collection_name] From dd095279c86d55f0497f751f8ca4cb56aeb36d55 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Sun, 7 Jun 2015 14:15:23 -0700 Subject: [PATCH 34/36] aggregate_sum/average + unit tests --- mongoengine/queryset/base.py | 34 ++++++++++++++++++ tests/queryset/queryset.py | 68 ++++++++++++++++++++++++++++++++---- 2 files changed, 96 insertions(+), 6 deletions(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index c8a30783..38389fbf 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1248,6 +1248,23 @@ class BaseQuerySet(object): else: return 0 + def aggregate_sum(self, field): + """Sum over the values of the specified field. + + :param field: the field to sum over; use dot-notation to refer to + embedded document fields + + This method is more performant than the regular `sum`, because it uses + the aggregation framework instead of map-reduce. + """ + result = self._document._get_collection().aggregate([ + { '$match': self._query }, + { '$group': { '_id': 'sum', 'total': { '$sum': '$' + field } } } + ]) + if result['result']: + return result['result'][0]['total'] + return 0 + def average(self, field): """Average over the values of the specified field. @@ -1303,6 +1320,23 @@ class BaseQuerySet(object): else: return 0 + def aggregate_average(self, field): + """Average over the values of the specified field. + + :param field: the field to average over; use dot-notation to refer to + embedded document fields + + This method is more performant than the regular `average`, because it + uses the aggregation framework instead of map-reduce. + """ + result = self._document._get_collection().aggregate([ + { '$match': self._query }, + { '$group': { '_id': 'avg', 'total': { '$avg': '$' + field } } } + ]) + if result['result']: + return result['result'][0]['total'] + return 0 + def item_frequencies(self, field, normalize=False, map_reduce=True): """Returns a dictionary of all items present in a field across the whole queried set of documents, and their corresponding frequency. diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index e7eb4901..d4348678 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -2706,26 +2706,58 @@ class QuerySetTest(unittest.TestCase): avg = float(sum(ages)) / (len(ages) + 1) # take into account the 0 self.assertAlmostEqual(int(self.Person.objects.average('age')), avg) + self.assertAlmostEqual( + int(self.Person.objects.aggregate_average('age')), avg + ) self.Person(name='ageless person').save() self.assertEqual(int(self.Person.objects.average('age')), avg) + self.assertEqual( + int(self.Person.objects.aggregate_average('age')), avg + ) # dot notation self.Person( name='person meta', person_meta=self.PersonMeta(weight=0)).save() self.assertAlmostEqual( int(self.Person.objects.average('person_meta.weight')), 0) + self.assertAlmostEqual( + int(self.Person.objects.aggregate_average('person_meta.weight')), + 0 + ) for i, weight in enumerate(ages): self.Person( name='test meta%i', person_meta=self.PersonMeta(weight=weight)).save() self.assertAlmostEqual( - int(self.Person.objects.average('person_meta.weight')), avg) + int(self.Person.objects.average('person_meta.weight')), avg + ) + self.assertAlmostEqual( + int(self.Person.objects.aggregate_average('person_meta.weight')), + avg + ) self.Person(name='test meta none').save() self.assertEqual( - int(self.Person.objects.average('person_meta.weight')), avg) + int(self.Person.objects.average('person_meta.weight')), avg + ) + self.assertEqual( + int(self.Person.objects.aggregate_average('person_meta.weight')), + avg + ) + + # test summing over a filtered queryset + over_50 = [a for a in ages if a >= 50] + avg = float(sum(over_50)) / len(over_50) + self.assertEqual( + self.Person.objects.filter(age__gte=50).average('age'), + avg + ) + self.assertEqual( + self.Person.objects.filter(age__gte=50).aggregate_average('age'), + avg + ) def test_sum(self): """Ensure that field can be summed over correctly. @@ -2734,20 +2766,44 @@ class QuerySetTest(unittest.TestCase): for i, age in enumerate(ages): self.Person(name='test%s' % i, age=age).save() - self.assertEqual(int(self.Person.objects.sum('age')), sum(ages)) + self.assertEqual(self.Person.objects.sum('age'), sum(ages)) + self.assertEqual( + self.Person.objects.aggregate_sum('age'), sum(ages) + ) self.Person(name='ageless person').save() - self.assertEqual(int(self.Person.objects.sum('age')), sum(ages)) + self.assertEqual(self.Person.objects.sum('age'), sum(ages)) + self.assertEqual( + self.Person.objects.aggregate_sum('age'), sum(ages) + ) for i, age in enumerate(ages): self.Person(name='test meta%s' % i, person_meta=self.PersonMeta(weight=age)).save() self.assertEqual( - int(self.Person.objects.sum('person_meta.weight')), sum(ages)) + self.Person.objects.sum('person_meta.weight'), sum(ages) + ) + self.assertEqual( + self.Person.objects.aggregate_sum('person_meta.weight'), + sum(ages) + ) self.Person(name='weightless person').save() - self.assertEqual(int(self.Person.objects.sum('age')), sum(ages)) + self.assertEqual(self.Person.objects.sum('age'), sum(ages)) + self.assertEqual( + self.Person.objects.aggregate_sum('age'), sum(ages) + ) + + # test summing over a filtered queryset + self.assertEqual( + self.Person.objects.filter(age__gte=50).sum('age'), + sum([a for a in ages if a >= 50]) + ) + self.assertEqual( + self.Person.objects.filter(age__gte=50).aggregate_sum('age'), + sum([a for a in ages if a >= 50]) + ) def test_embedded_average(self): class Pay(EmbeddedDocument): From 12337802657d7e6d03a1fd19e7f412d54b3ef05d Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Mon, 8 Jun 2015 13:46:19 -0700 Subject: [PATCH 35/36] make aggregate_sum/average compatible with pymongo 3.x --- mongoengine/queryset/base.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 38389fbf..b949e121 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1261,8 +1261,13 @@ class BaseQuerySet(object): { '$match': self._query }, { '$group': { '_id': 'sum', 'total': { '$sum': '$' + field } } } ]) - if result['result']: - return result['result'][0]['total'] + if IS_PYMONGO_3: + result = list(result) + if result: + return result[0]['total'] + else: + if result['result']: + return result['result'][0]['total'] return 0 def average(self, field): @@ -1333,8 +1338,13 @@ class BaseQuerySet(object): { '$match': self._query }, { '$group': { '_id': 'avg', 'total': { '$avg': '$' + field } } } ]) - if result['result']: - return result['result'][0]['total'] + if IS_PYMONGO_3: + result = list(result) + if result: + return result[0]['total'] + else: + if result['result']: + return result['result'][0]['total'] return 0 def item_frequencies(self, field, normalize=False, map_reduce=True): From b7ef82cb67d11787f7b305028690c542ad048301 Mon Sep 17 00:00:00 2001 From: Stefan Wojcik Date: Wed, 17 Jun 2015 17:05:10 -0700 Subject: [PATCH 36/36] style tweaks + changelog entry --- docs/changelog.rst | 1 + mongoengine/queryset/base.py | 14 ++++++-------- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 48e8b9aa..b9ad5b0e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -25,6 +25,7 @@ Changes in 0.9.X - DEV - Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652 - Removed get_or_create() deprecated since 0.8.0. #300 - Capped collection multiple of 256. #1011 +- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods. Changes in 0.9.0 ================ diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index b949e121..c3abd46a 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1263,11 +1263,10 @@ class BaseQuerySet(object): ]) if IS_PYMONGO_3: result = list(result) - if result: - return result[0]['total'] else: - if result['result']: - return result['result'][0]['total'] + result = result.get('result') + if result: + return result[0]['total'] return 0 def average(self, field): @@ -1340,11 +1339,10 @@ class BaseQuerySet(object): ]) if IS_PYMONGO_3: result = list(result) - if result: - return result[0]['total'] else: - if result['result']: - return result['result'][0]['total'] + result = result.get('result') + if result: + return result[0]['total'] return 0 def item_frequencies(self, field, normalize=False, map_reduce=True):