From 3b88a4f7288dd83a0bbbec2393127fc7abc24b03 Mon Sep 17 00:00:00 2001 From: Harry Marr Date: Tue, 19 Oct 2010 12:28:34 +0100 Subject: [PATCH 001/214] Fixed a couple of errors in the docs --- docs/apireference.rst | 4 ++++ docs/guide/querying.rst | 6 ------ 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/docs/apireference.rst b/docs/apireference.rst index 34d4536d..a3d287ab 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -41,6 +41,8 @@ Fields .. autoclass:: mongoengine.URLField +.. autoclass:: mongoengine.EmailField + .. autoclass:: mongoengine.IntField .. autoclass:: mongoengine.FloatField @@ -57,6 +59,8 @@ Fields .. autoclass:: mongoengine.ListField +.. autoclass:: mongoengine.SortedListField + .. autoclass:: mongoengine.BinaryField .. autoclass:: mongoengine.ObjectIdField diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 832fed50..1caed2d7 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -325,12 +325,6 @@ calling it with keyword arguments:: # Get top posts Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000)) -.. warning:: - Only use these advanced queries if absolutely necessary as they will execute - significantly slower than regular queries. This is because they are not - natively supported by MongoDB -- they are compiled to Javascript and sent - to the server for execution. - Server-side javascript execution ================================ Javascript functions may be written and sent to the server for execution. The From f0c5dd1bce63f7ce3c7cdbcba33a4bc367674e3c Mon Sep 17 00:00:00 2001 From: Viktor Kerkez Date: Sat, 23 Oct 2010 22:33:03 +0200 Subject: [PATCH 002/214] Small fix for Python 2.5 --- .gitignore | 4 +++- mongoengine/base.py | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 51a9ca1d..d2927298 100644 --- a/.gitignore +++ b/.gitignore @@ -6,4 +6,6 @@ docs/_build build/ dist/ mongoengine.egg-info/ -env/ \ No newline at end of file +env/ +.project +.pydevproject \ No newline at end of file diff --git a/mongoengine/base.py b/mongoengine/base.py index 6b74cb07..ac15df6f 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -491,6 +491,7 @@ class BaseDocument(object): if sys.version_info < (2, 5): # Prior to Python 2.5, Exception was an old-style class + import types def subclass_exception(name, parents, unused): return types.ClassType(name, parents, {}) else: From ef15733efea51bf61f2d01a58b0c87d7560f1bb5 Mon Sep 17 00:00:00 2001 From: Viktor Kerkez Date: Sat, 23 Oct 2010 22:35:37 +0200 Subject: [PATCH 003/214] Added creation_counter to BaseField in order to provied form modules with a way to sort fields i order user specified them (same technique is used in Django) --- mongoengine/base.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/mongoengine/base.py b/mongoengine/base.py index ac15df6f..c647208d 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -25,6 +25,12 @@ class BaseField(object): _index_with_types = True _geo_index = False + # These track each time a Field instance is created. Used to retain order. + # The auto_creation_counter is used for fields that MongoEngine implicitly + # creates, creation_counter is used for all user-specified fields. + creation_counter = 0 + auto_creation_counter = -1 + def __init__(self, db_field=None, name=None, required=False, default=None, unique=False, unique_with=None, primary_key=False, validation=None, choices=None): @@ -41,6 +47,13 @@ class BaseField(object): self.primary_key = primary_key self.validation = validation self.choices = choices + # Adjust the appropriate creation counter, and save our local copy. + if self.db_field == '_id': + self.creation_counter = BaseField.auto_creation_counter + BaseField.auto_creation_counter -= 1 + else: + self.creation_counter = BaseField.creation_counter + BaseField.creation_counter += 1 def __get__(self, instance, owner): """Descriptor for retrieving a value from a field in a document. Do From a3830be4c9481ff2be0043e413dfdca838f90a4e Mon Sep 17 00:00:00 2001 From: Ales Zoulek Date: Thu, 28 Oct 2010 01:03:57 +0200 Subject: [PATCH 004/214] QuerySet.only() supports subfields + tests --- mongoengine/queryset.py | 6 +----- tests/queryset.py | 45 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 46 insertions(+), 5 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 519dda03..0b651682 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -812,11 +812,7 @@ class QuerySet(object): """ self._loaded_fields = [] for field in fields: - if '.' in field: - raise InvalidQueryError('Subfields cannot be used as ' - 'arguments to QuerySet.only') - # Translate field name - field = QuerySet._lookup_field(self._document, field)[-1].db_field + field = ".".join(f.db_field for f in QuerySet._lookup_field(self._document, field.split('.'))) self._loaded_fields.append(field) # _cls is needed for polymorphism diff --git a/tests/queryset.py b/tests/queryset.py index 6ca4174d..84e450b6 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -452,6 +452,51 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(obj.salary, employee.salary) self.assertEqual(obj.name, None) + def test_only_with_subfields(self): + class User(EmbeddedDocument): + name = StringField() + email = StringField() + + class Comment(EmbeddedDocument): + title = StringField() + text = StringField() + + class BlogPost(Document): + content = StringField() + author = EmbeddedDocumentField(User) + comments = ListField(EmbeddedDocumentField(Comment)) + + BlogPost.drop_collection() + + post = BlogPost(content='Had a good coffee today...') + post.author = User(name='Test User') + post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] + post.save() + + obj = BlogPost.objects.only('author.name',).get() + self.assertEqual(obj.content, None) + self.assertEqual(obj.author.email, None) + self.assertEqual(obj.author.name, 'Test User') + self.assertEqual(obj.comments, []) + + obj = BlogPost.objects.only('content', 'comments.title',).get() + self.assertEqual(obj.content, 'Had a good coffee today...') + self.assertEqual(obj.author, None) + self.assertEqual(obj.comments[0].title, 'I aggree') + self.assertEqual(obj.comments[1].title, 'Coffee') + self.assertEqual(obj.comments[0].text, None) + self.assertEqual(obj.comments[1].text, None) + + obj = BlogPost.objects.only('comments',).get() + self.assertEqual(obj.content, None) + self.assertEqual(obj.author, None) + self.assertEqual(obj.comments[0].title, 'I aggree') + self.assertEqual(obj.comments[1].title, 'Coffee') + self.assertEqual(obj.comments[0].text, 'Great post!') + self.assertEqual(obj.comments[1].text, 'I hate coffee') + + BlogPost.drop_collection() + def test_find_embedded(self): """Ensure that an embedded document is properly returned from a query. """ From 6b880aa8b358d6a80c35525adced2da1d801830c Mon Sep 17 00:00:00 2001 From: Harry Marr Date: Mon, 1 Nov 2010 00:43:30 +0000 Subject: [PATCH 005/214] Fixed order-then-filter issue --- mongoengine/queryset.py | 4 +++- tests/queryset.py | 18 ++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index f4849619..5737cb33 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -431,7 +431,9 @@ class QuerySet(object): self._cursor_obj.where(self._where_clause) # apply default ordering - if self._document._meta['ordering']: + if self._ordering: + self._cursor_obj.sort(self._ordering) + elif self._document._meta['ordering']: self.order_by(*self._document._meta['ordering']) if self._limit is not None: diff --git a/tests/queryset.py b/tests/queryset.py index 37fe7501..7059659c 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1395,6 +1395,24 @@ class QuerySetTest(unittest.TestCase): Post.drop_collection() + def test_order_then_filter(self): + """Ensure that ordering still works after filtering. + """ + class Number(Document): + n = IntField() + + Number.drop_collection() + + n2 = Number.objects.create(n=2) + n1 = Number.objects.create(n=1) + + self.assertEqual(list(Number.objects), [n2, n1]) + self.assertEqual(list(Number.objects.order_by('n')), [n1, n2]) + self.assertEqual(list(Number.objects.order_by('n').filter()), [n1, n2]) + + Number.drop_collection() + + class QTest(unittest.TestCase): def test_empty_q(self): From e1282028a576b0a8c2bd0f9e9699f854e98db23d Mon Sep 17 00:00:00 2001 From: Viktor Kerkez Date: Mon, 1 Nov 2010 14:54:55 +0100 Subject: [PATCH 006/214] Added django style choices --- mongoengine/base.py | 10 +++++----- mongoengine/fields.py | 1 - 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index c647208d..589042e2 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -100,9 +100,9 @@ class BaseField(object): def _validate(self, value): # check choices if self.choices is not None: - if value not in self.choices: - raise ValidationError("Value must be one of %s." - % unicode(self.choices)) + option_keys = [option_key for option_key, option_value in self.choices] + if value not in option_keys: + raise ValidationError("Value must be one of %s." % unicode(option_keys)) # check validation argument if self.validation is not None: @@ -254,8 +254,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): # Propagate index options. for key in ('index_background', 'index_drop_dups', 'index_opts'): - if key in base._meta: - base_meta[key] = base._meta[key] + if key in base._meta: + base_meta[key] = base._meta[key] id_field = id_field or base._meta.get('id_field') base_indexes += base._meta.get('indexes', []) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index e95fd65e..e95312f9 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -12,7 +12,6 @@ import datetime import decimal import gridfs import warnings -import types __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', From bda4776a18522704469d16c0c58b855f0cb434a9 Mon Sep 17 00:00:00 2001 From: Ales Zoulek Date: Wed, 3 Nov 2010 16:37:41 +0100 Subject: [PATCH 007/214] added Queryset.exclude() + tests --- mongoengine/queryset.py | 68 +++++++++++++++++--- tests/queryset.py | 133 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 191 insertions(+), 10 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index e46380b6..8ce5ec7a 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -268,6 +268,48 @@ class Q(QNode): return not bool(self.query) +class QueryFieldList(object): + """Object that handles combinations of .only() and .exclude() calls""" + ONLY = True + EXCLUDE = False + + def __init__(self, fields=[], direction=ONLY, always_include=[]): + self.direction = direction + self.fields = set(fields) + self.always_include = set(always_include) + + def as_dict(self): + return dict((field, self.direction) for field in self.fields) + + def __add__(self, f): + if not self.fields: + self.fields = f.fields + self.direction = f.direction + elif self.direction is self.ONLY and f.direction is self.ONLY: + self.fields = self.fields.intersection(f.fields) + elif self.direction is self.EXCLUDE and f.direction is self.EXCLUDE: + self.fields = self.fields.union(f.fields) + elif self.direction is self.ONLY and f.direction is self.EXCLUDE: + self.fields -= f.fields + elif self.direction is self.EXCLUDE and f.direction is self.ONLY: + self.direction = self.ONLY + self.fields = f.fields - self.fields + + if self.always_include: + if self.direction is self.ONLY and self.fields: + self.fields = self.fields.union(self.always_include) + else: + self.fields -= self.always_include + return self + + def reset(self): + self.fields = set([]) + self.direction = self.ONLY + + def __nonzero__(self): + return bool(self.fields) + + class QuerySet(object): """A set of results returned from a query. Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as the results. @@ -281,7 +323,7 @@ class QuerySet(object): self._query_obj = Q() self._initial_query = {} self._where_clause = None - self._loaded_fields = [] + self._loaded_fields = QueryFieldList() self._ordering = [] self._snapshot = False self._timeout = True @@ -290,6 +332,7 @@ class QuerySet(object): # subclasses of the class being used if document._meta.get('allow_inheritance'): self._initial_query = {'_types': self._document._class_name} + self._loaded_fields = QueryFieldList(always_include=['_cls']) self._cursor_obj = None self._limit = None self._skip = None @@ -423,7 +466,7 @@ class QuerySet(object): 'timeout': self._timeout, } if self._loaded_fields: - cursor_args['fields'] = self._loaded_fields + cursor_args['fields'] = self._loaded_fields.as_dict() self._cursor_obj = self._collection.find(self._query, **cursor_args) # Apply where clauses to cursor @@ -818,15 +861,22 @@ class QuerySet(object): .. versionadded:: 0.3 """ - self._loaded_fields = [] + fields = self._fields_to_dbfields(fields) + self._loaded_fields += QueryFieldList(fields, direction=QueryFieldList.ONLY) + return self + + + def exclude(self, *fields): + fields = self._fields_to_dbfields(fields) + self._loaded_fields += QueryFieldList(fields, direction=QueryFieldList.EXCLUDE) + return self + + def _fields_to_dbfields(self, fields): + ret = [] for field in fields: field = ".".join(f.db_field for f in QuerySet._lookup_field(self._document, field.split('.'))) - self._loaded_fields.append(field) - - # _cls is needed for polymorphism - if self._document._meta.get('allow_inheritance'): - self._loaded_fields += ['_cls'] - return self + ret.append(field) + return ret def order_by(self, *keys): """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The diff --git a/tests/queryset.py b/tests/queryset.py index 8b25524e..4e1302ee 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -6,7 +6,7 @@ import pymongo from datetime import datetime, timedelta from mongoengine.queryset import (QuerySet, MultipleObjectsReturned, - DoesNotExist) + DoesNotExist, QueryFieldList) from mongoengine import * @@ -497,6 +497,81 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() + def test_exclude(self): + class User(EmbeddedDocument): + name = StringField() + email = StringField() + + class Comment(EmbeddedDocument): + title = StringField() + text = StringField() + + class BlogPost(Document): + content = StringField() + author = EmbeddedDocumentField(User) + comments = ListField(EmbeddedDocumentField(Comment)) + + BlogPost.drop_collection() + + post = BlogPost(content='Had a good coffee today...') + post.author = User(name='Test User') + post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] + post.save() + + obj = BlogPost.objects.exclude('author', 'comments.text').get() + self.assertEqual(obj.author, None) + self.assertEqual(obj.content, 'Had a good coffee today...') + self.assertEqual(obj.comments[0].title, 'I aggree') + self.assertEqual(obj.comments[0].text, None) + + BlogPost.drop_collection() + + def test_exclude_only_combining(self): + class Attachment(EmbeddedDocument): + name = StringField() + content = StringField() + + class Email(Document): + sender = StringField() + to = StringField() + subject = StringField() + body = StringField() + content_type = StringField() + attachments = ListField(EmbeddedDocumentField(Attachment)) + + Email.drop_collection() + email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') + email.attachments = [ + Attachment(name='file1.doc', content='ABC'), + Attachment(name='file2.doc', content='XYZ'), + ] + email.save() + + obj = Email.objects.exclude('content_type').exclude('body').get() + self.assertEqual(obj.sender, 'me') + self.assertEqual(obj.to, 'you') + self.assertEqual(obj.subject, 'From Russia with Love') + self.assertEqual(obj.body, None) + self.assertEqual(obj.content_type, None) + + obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get() + self.assertEqual(obj.sender, None) + self.assertEqual(obj.to, 'you') + self.assertEqual(obj.subject, None) + self.assertEqual(obj.body, None) + self.assertEqual(obj.content_type, None) + + obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get() + self.assertEqual(obj.attachments[0].name, 'file1.doc') + self.assertEqual(obj.attachments[0].content, None) + self.assertEqual(obj.sender, None) + self.assertEqual(obj.to, 'you') + self.assertEqual(obj.subject, None) + self.assertEqual(obj.body, None) + self.assertEqual(obj.content_type, None) + + Email.drop_collection() + def test_find_embedded(self): """Ensure that an embedded document is properly returned from a query. """ @@ -1594,6 +1669,62 @@ class QTest(unittest.TestCase): for condition in conditions: self.assertTrue(condition in query['$or']) +class QueryFieldListTest(unittest.TestCase): + def test_empty(self): + q = QueryFieldList() + self.assertFalse(q) + + q = QueryFieldList(always_include=['_cls']) + self.assertFalse(q) + + def test_include_include(self): + q = QueryFieldList() + q += QueryFieldList(fields=['a', 'b'], direction=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'a': True, 'b': True}) + q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'b': True}) + + def test_include_exclude(self): + q = QueryFieldList() + q += QueryFieldList(fields=['a', 'b'], direction=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'a': True, 'b': True}) + q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.EXCLUDE) + self.assertEqual(q.as_dict(), {'a': True}) + + def test_exclude_exclude(self): + q = QueryFieldList() + q += QueryFieldList(fields=['a', 'b'], direction=QueryFieldList.EXCLUDE) + self.assertEqual(q.as_dict(), {'a': False, 'b': False}) + q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.EXCLUDE) + self.assertEqual(q.as_dict(), {'a': False, 'b': False, 'c': False}) + + def test_exclude_include(self): + q = QueryFieldList() + q += QueryFieldList(fields=['a', 'b'], direction=QueryFieldList.EXCLUDE) + self.assertEqual(q.as_dict(), {'a': False, 'b': False}) + q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'c': True}) + + def test_always_include(self): + q = QueryFieldList(always_include=['x', 'y']) + q += QueryFieldList(fields=['a', 'b', 'x'], direction=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True}) + + + def test_reset(self): + q = QueryFieldList(always_include=['x', 'y']) + q += QueryFieldList(fields=['a', 'b', 'x'], direction=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True}) + q.reset() + self.assertFalse(q) + q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.ONLY) + self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'b': True, 'c': True}) + + + + if __name__ == '__main__': unittest.main() From 89646439e7afbffe7209d218f454ee0fa03a2f14 Mon Sep 17 00:00:00 2001 From: Deepak Thukral Date: Wed, 10 Nov 2010 21:02:59 +0100 Subject: [PATCH 008/214] fixed typo in error message --- mongoengine/queryset.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index e46380b6..55201009 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -78,7 +78,7 @@ class SimplificationVisitor(QNodeVisitor): # to a single field intersection = ops.intersection(query_ops) if intersection: - msg = 'Duplicate query contitions: ' + msg = 'Duplicate query conditions: ' raise InvalidQueryError(msg + ', '.join(intersection)) query_ops.update(ops) @@ -179,7 +179,7 @@ class QueryCompilerVisitor(QNodeVisitor): # once to a single field intersection = current_ops.intersection(new_ops) if intersection: - msg = 'Duplicate query contitions: ' + msg = 'Duplicate query conditions: ' raise InvalidQueryError(msg + ', '.join(intersection)) # Right! We've got two non-overlapping dicts of operations! From 66baa4eb6185ca27b2ccc92d68450ecaa17112fd Mon Sep 17 00:00:00 2001 From: Ales Zoulek Date: Wed, 10 Nov 2010 22:01:27 +0100 Subject: [PATCH 009/214] QS.all_fields - resets previous .only() and .exlude() --- mongoengine/queryset.py | 15 +++++++++++++++ tests/queryset.py | 23 +++++++++++++++++++++++ 2 files changed, 38 insertions(+) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 8ce5ec7a..e0755379 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -867,11 +867,26 @@ class QuerySet(object): def exclude(self, *fields): + """Opposite to .only(), exclude some document's fields. :: + + post = BlogPost.objects(...).exclude("comments") + + :param fields: fields to exclude + """ fields = self._fields_to_dbfields(fields) self._loaded_fields += QueryFieldList(fields, direction=QueryFieldList.EXCLUDE) return self + def all_fields(self): + """Include all fields. Reset all previously calls of .only() and .exclude(). :: + + post = BlogPost.objects(...).exclude("comments").only("title").all_fields() + """ + self._loaded_fields = QueryFieldList(always_include=self._loaded_fields.always_include) + return self + def _fields_to_dbfields(self, fields): + """Translate fields paths to its db equivalents""" ret = [] for field in fields: field = ".".join(f.db_field for f in QuerySet._lookup_field(self._document, field.split('.'))) diff --git a/tests/queryset.py b/tests/queryset.py index 4e1302ee..88e7737f 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -572,6 +572,29 @@ class QuerySetTest(unittest.TestCase): Email.drop_collection() + def test_all_fields(self): + + class Email(Document): + sender = StringField() + to = StringField() + subject = StringField() + body = StringField() + content_type = StringField() + + Email.drop_collection() + + email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') + email.save() + + obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get() + self.assertEqual(obj.sender, 'me') + self.assertEqual(obj.to, 'you') + self.assertEqual(obj.subject, 'From Russia with Love') + self.assertEqual(obj.body, 'Hello!') + self.assertEqual(obj.content_type, 'text/plain') + + Email.drop_collection() + def test_find_embedded(self): """Ensure that an embedded document is properly returned from a query. """ From 9c8411b251ab0557063fc60d50c32c888f55c613 Mon Sep 17 00:00:00 2001 From: Viktor Kerkez Date: Thu, 11 Nov 2010 18:19:00 +0100 Subject: [PATCH 010/214] Choice field test updated --- .gitignore | 1 + tests/fields.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index d2927298..d67429a2 100644 --- a/.gitignore +++ b/.gitignore @@ -7,5 +7,6 @@ build/ dist/ mongoengine.egg-info/ env/ +.settings .project .pydevproject \ No newline at end of file diff --git a/tests/fields.py b/tests/fields.py index 5602cdec..034ec61e 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -644,7 +644,8 @@ class FieldTest(unittest.TestCase): """Ensure that value is in a container of allowed values. """ class Shirt(Document): - size = StringField(max_length=3, choices=('S','M','L','XL','XXL')) + size = StringField(max_length=3, choices=(('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), + ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) Shirt.drop_collection() From b12c34334ccfeef41b948eef6bedae22c7254bdf Mon Sep 17 00:00:00 2001 From: Deepak Thukral Date: Thu, 18 Nov 2010 20:44:51 +0100 Subject: [PATCH 011/214] added test case for issue 103 --- tests/queryset.py | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/tests/queryset.py b/tests/queryset.py index 8b25524e..e130ecef 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -539,33 +539,47 @@ class QuerySetTest(unittest.TestCase): """Ensure that Q objects may be used to query for documents. """ class BlogPost(Document): + title = StringField() publish_date = DateTimeField() published = BooleanField() BlogPost.drop_collection() - post1 = BlogPost(publish_date=datetime(2010, 1, 8), published=False) + post1 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 8), published=False) post1.save() - post2 = BlogPost(publish_date=datetime(2010, 1, 15), published=True) + post2 = BlogPost(title='Test 2', publish_date=datetime(2010, 1, 15), published=True) post2.save() - post3 = BlogPost(published=True) + post3 = BlogPost(title='Test 3', published=True) post3.save() - post4 = BlogPost(publish_date=datetime(2010, 1, 8)) + post4 = BlogPost(title='Test 4', publish_date=datetime(2010, 1, 8)) post4.save() - post5 = BlogPost(publish_date=datetime(2010, 1, 15)) + post5 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 15)) post5.save() - post6 = BlogPost(published=False) + post6 = BlogPost(title='Test 1', published=False) post6.save() # Check ObjectId lookup works obj = BlogPost.objects(id=post1.id).first() self.assertEqual(obj, post1) + # Check Q object combination with one does not exist + q = BlogPost.objects(Q(title='Test 5') | Q(published=True)) + posts = [post.id for post in q] + + published_posts = (post2, post3) + self.assertTrue(all(obj.id in posts for obj in published_posts)) + + q = BlogPost.objects(Q(title='Test 1') | Q(published=True)) + posts = [post.id for post in q] + published_posts = (post1, post2, post3, post5, post6) + self.assertTrue(all(obj.id in posts for obj in published_posts)) + + # Check Q object combination date = datetime(2010, 1, 10) q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True)) From ca56785cbcf3747428b219f1ce0029c7e80bfd29 Mon Sep 17 00:00:00 2001 From: sshwsfc Date: Thu, 18 Nov 2010 21:33:05 -0800 Subject: [PATCH 012/214] add some prepare_query_value method for fields --- mongoengine/fields.py | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index e95fd65e..f658b107 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -150,6 +150,8 @@ class IntField(BaseField): if self.max_value is not None and value > self.max_value: raise ValidationError('Integer value is too large') + def prepare_query_value(self, op, value): + return int(value) class FloatField(BaseField): """An floating point number field. @@ -173,6 +175,9 @@ class FloatField(BaseField): if self.max_value is not None and value > self.max_value: raise ValidationError('Float value is too large') + def prepare_query_value(self, op, value): + return float(value) + class DecimalField(BaseField): """A fixed-point decimal number field. @@ -227,6 +232,40 @@ class DateTimeField(BaseField): def validate(self, value): assert isinstance(value, datetime.datetime) + def prepare_query_value(self, op, value): + if value is None: + return value + if isinstance(value, datetime.datetime): + return value + if isinstance(value, datetime.date): + return datetime.datetime(value.year, value.month, value.day) + + # Attempt to parse a datetime: + #value = smart_str(value) + # split usecs, because they are not recognized by strptime. + if '.' in value: + try: + value, usecs = value.split('.') + usecs = int(usecs) + except ValueError: + return None + else: + usecs = 0 + kwargs = {'microsecond': usecs} + try: # Seconds are optional, so try converting seconds first. + return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M:%S')[:6], + **kwargs) + + except ValueError: + try: # Try without seconds. + return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M')[:5], + **kwargs) + except ValueError: # Try without hour/minutes/seconds. + try: + return datetime.datetime(*time.strptime(value, '%Y-%m-%d')[:3], + **kwargs) + except ValueError: + return None class EmbeddedDocumentField(BaseField): """An embedded document field. Only valid values are subclasses of From ca8c3981c4f1fcf35a83e29395647366ffff6283 Mon Sep 17 00:00:00 2001 From: sshwsfc Date: Thu, 18 Nov 2010 22:35:11 -0800 Subject: [PATCH 013/214] --- mongoengine/fields.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index f658b107..7942e5eb 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -8,7 +8,7 @@ import pymongo import pymongo.dbref import pymongo.son import pymongo.binary -import datetime +import datetime, time import decimal import gridfs import warnings From cec8b67b08ae9d4776b3a19b0e8979b2237a328c Mon Sep 17 00:00:00 2001 From: Harry Marr Date: Sun, 5 Dec 2010 20:47:24 +0000 Subject: [PATCH 014/214] Added test for unsetting fields --- tests/queryset.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/queryset.py b/tests/queryset.py index 8b25524e..de3f4267 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -778,6 +778,11 @@ class QuerySetTest(unittest.TestCase): BlogPost.objects.update_one(add_to_set__tags='unique') post.reload() self.assertEqual(post.tags.count('unique'), 1) + + self.assertNotEqual(post.hits, None) + BlogPost.objects.update_one(unset__hits=1) + post.reload() + self.assertEqual(post.hits, None) BlogPost.drop_collection() From 3a0523dd796ed2b7e41ac7569df1a32cc1686488 Mon Sep 17 00:00:00 2001 From: Harry Marr Date: Sun, 5 Dec 2010 21:43:04 +0000 Subject: [PATCH 015/214] Fixed issue with unset operation --- mongoengine/queryset.py | 3 +-- tests/queryset.py | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index e46380b6..49c8f69d 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -917,8 +917,7 @@ class QuerySet(object): # Convert value to proper value field = fields[-1] - if op in (None, 'set', 'unset', 'pop', 'push', 'pull', - 'addToSet'): + if op in (None, 'set', 'push', 'pull', 'addToSet'): value = field.prepare_query_value(op, value) elif op in ('pushAll', 'pullAll'): value = [field.prepare_query_value(op, v) for v in value] diff --git a/tests/queryset.py b/tests/queryset.py index de3f4267..374fdb54 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1462,6 +1462,27 @@ class QuerySetTest(unittest.TestCase): Number.drop_collection() + def test_unset_reference(self): + class Comment(Document): + text = StringField() + + class Post(Document): + comment = ReferenceField(Comment) + + Comment.drop_collection() + Post.drop_collection() + + comment = Comment.objects.create(text='test') + post = Post.objects.create(comment=comment) + + self.assertEqual(post.comment, comment) + Post.objects.update(unset__comment=1) + post.reload() + self.assertEqual(post.comment, None) + + Comment.drop_collection() + Post.drop_collection() + class QTest(unittest.TestCase): From 62cc8d2ab3a00cc2af4c8cce2f8942bd2b13a3f1 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Sun, 5 Dec 2010 00:13:55 -0800 Subject: [PATCH 016/214] Fix: redefinition of "datetime" from line 6. --- tests/queryset.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/queryset.py b/tests/queryset.py index 374fdb54..6362555d 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -351,8 +351,6 @@ class QuerySetTest(unittest.TestCase): def test_filter_chaining(self): """Ensure filters can be chained together. """ - from datetime import datetime - class BlogPost(Document): title = StringField() is_published = BooleanField() From 67fcdca6d4f1b4fd791ad3485eff0a1b76b26e9b Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Sun, 5 Dec 2010 14:30:19 +0100 Subject: [PATCH 017/214] Fix: PyFlakes pointed out this missing import. --- mongoengine/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mongoengine/base.py b/mongoengine/base.py index 6b74cb07..3dd2cb02 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -492,6 +492,7 @@ class BaseDocument(object): if sys.version_info < (2, 5): # Prior to Python 2.5, Exception was an old-style class def subclass_exception(name, parents, unused): + import types return types.ClassType(name, parents, {}) else: def subclass_exception(name, parents, module): From 4f3eacd72cc807344bc06e69306b5174994be4eb Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Sun, 5 Dec 2010 14:30:50 +0100 Subject: [PATCH 018/214] Fix: whitespace. This broke my Vim auto-folds. --- tests/fields.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/tests/fields.py b/tests/fields.py index 5602cdec..d36a0804 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -523,29 +523,29 @@ class FieldTest(unittest.TestCase): Link.drop_collection() Post.drop_collection() Bookmark.drop_collection() - + link_1 = Link(title="Pitchfork") link_1.save() - + post_1 = Post(title="Behind the Scenes of the Pavement Reunion") post_1.save() - + bm = Bookmark(bookmark_object=post_1) bm.save() - + bm = Bookmark.objects(bookmark_object=post_1).first() - + self.assertEqual(bm.bookmark_object, post_1) self.assertTrue(isinstance(bm.bookmark_object, Post)) - + bm.bookmark_object = link_1 bm.save() - + bm = Bookmark.objects(bookmark_object=link_1).first() - + self.assertEqual(bm.bookmark_object, link_1) self.assertTrue(isinstance(bm.bookmark_object, Link)) - + Link.drop_collection() Post.drop_collection() Bookmark.drop_collection() @@ -555,23 +555,23 @@ class FieldTest(unittest.TestCase): """ class Link(Document): title = StringField() - + class Post(Document): title = StringField() - + class User(Document): bookmarks = ListField(GenericReferenceField()) - + Link.drop_collection() Post.drop_collection() User.drop_collection() - + link_1 = Link(title="Pitchfork") link_1.save() - + post_1 = Post(title="Behind the Scenes of the Pavement Reunion") post_1.save() - + user = User(bookmarks=[post_1, link_1]) user.save() From 86233bcdf539874c9cddef6f883abd84f68329a3 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Sun, 5 Dec 2010 08:08:55 -0800 Subject: [PATCH 019/214] Added initial implementation of cascading document deletion. The current implementation is still very basic and needs some polish. The essence of it is that each Document gets a new meta attribute called "delete_rules" that is a dictionary containing (documentclass, fieldname) as key and the actual delete rule as a value. (Possible values are DO_NOTHING, NULLIFY, CASCADE and DENY. Of those, only CASCADE is currently implented.) --- mongoengine/base.py | 3 +++ mongoengine/document.py | 27 ++++++++++++++++++++++++++- mongoengine/fields.py | 3 ++- tests/document.py | 25 +++++++++++++++++++++++++ 4 files changed, 56 insertions(+), 2 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 3dd2cb02..29de82fa 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -190,6 +190,8 @@ class DocumentMetaclass(type): new_class = super_new(cls, name, bases, attrs) for field in new_class._fields.values(): field.owner_document = new_class + if hasattr(field, 'delete_rule') and field.delete_rule: + field.document_type._meta['delete_rules'][(new_class, field.name)] = field.delete_rule module = attrs.get('__module__') @@ -258,6 +260,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): 'index_drop_dups': False, 'index_opts': {}, 'queryset_class': QuerySet, + 'delete_rules': {}, } meta.update(base_meta) diff --git a/mongoengine/document.py b/mongoengine/document.py index fef737db..06867168 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -6,9 +6,16 @@ from connection import _get_db import pymongo -__all__ = ['Document', 'EmbeddedDocument', 'ValidationError', 'OperationError'] +__all__ = ['Document', 'EmbeddedDocument', 'ValidationError', 'OperationError', + 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY'] +# Delete rules +DO_NOTHING = 0 +NULLIFY = 1 +CASCADE = 2 +DENY = 3 + class EmbeddedDocument(BaseDocument): """A :class:`~mongoengine.Document` that isn't stored in its own collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as @@ -92,6 +99,13 @@ class Document(BaseDocument): :param safe: check if the operation succeeded before returning """ + for rule_entry in self._meta['delete_rules']: + document_cls, field_name = rule_entry + rule = self._meta['delete_rules'][rule_entry] + + if rule == CASCADE: + document_cls.objects(**{field_name: self.id}).delete(safe=safe) + id_field = self._meta['id_field'] object_id = self._fields[id_field].to_mongo(self[id_field]) try: @@ -100,6 +114,17 @@ class Document(BaseDocument): message = u'Could not delete document (%s)' % err.message raise OperationError(message) + @classmethod + def register_delete_rule(cls, document_cls, field_name, rule): + """This method registers the delete rules to apply when removing this + object. This could go into the Document class. + """ + if rule == DO_NOTHING: + return + + cls._meta['delete_rules'][(document_cls, field_name)] = rule + + def reload(self): """Reloads all attributes from the database. diff --git a/mongoengine/fields.py b/mongoengine/fields.py index e95fd65e..01ec1f7b 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -417,12 +417,13 @@ class ReferenceField(BaseField): access (lazily). """ - def __init__(self, document_type, **kwargs): + def __init__(self, document_type, delete_rule=None, **kwargs): if not isinstance(document_type, basestring): if not issubclass(document_type, (Document, basestring)): raise ValidationError('Argument to ReferenceField constructor ' 'must be a document class or a string') self.document_type_obj = document_type + self.delete_rule = delete_rule super(ReferenceField, self).__init__(**kwargs) @property diff --git a/tests/document.py b/tests/document.py index c0567632..d5807c90 100644 --- a/tests/document.py +++ b/tests/document.py @@ -624,6 +624,31 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() + + def test_cascade_delete(self): + """Ensure that a referenced document is also deleted upon deletion. + """ + + class BlogPost(Document): + meta = {'collection': 'blogpost_1'} + content = StringField() + author = ReferenceField(self.Person, delete_rule=CASCADE) + + self.Person.drop_collection() + BlogPost.drop_collection() + + author = self.Person(name='Test User') + author.save() + + post = BlogPost(content = 'Watched some TV') + post.author = author + post.save() + + # Delete the Person, which should lead to deletion of the BlogPost, too + author.delete() + self.assertEqual(len(BlogPost.objects), 0) + + def tearDown(self): self.Person.drop_collection() From bba3aeb4fa06091561e601bf9d5dd72690416ddb Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Sun, 5 Dec 2010 11:10:11 -0800 Subject: [PATCH 020/214] Actually *use* the register_delete_rule classmethod, since it's there. --- mongoengine/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 29de82fa..9f8c1e7b 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -191,7 +191,8 @@ class DocumentMetaclass(type): for field in new_class._fields.values(): field.owner_document = new_class if hasattr(field, 'delete_rule') and field.delete_rule: - field.document_type._meta['delete_rules'][(new_class, field.name)] = field.delete_rule + field.document_type.register_delete_rule(new_class, field.name, + field.delete_rule) module = attrs.get('__module__') From dd21ce9eac4156936f17e7106c1b048fe6069015 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Sun, 5 Dec 2010 13:40:39 -0800 Subject: [PATCH 021/214] Initial implementation of the NULLIFY rule. --- mongoengine/document.py | 13 +++++++++++++ tests/document.py | 11 ++++++++++- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 06867168..3b812abb 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -105,6 +105,19 @@ class Document(BaseDocument): if rule == CASCADE: document_cls.objects(**{field_name: self.id}).delete(safe=safe) + elif rule == NULLIFY: + # TODO: For now, this makes the nullify test pass, but it would + # be nicer to use any of these two atomic versions: + # + # document_cls.objects(**{field_name: self.id}).update(**{'unset__%s' % field_name: 1}) + # or + # document_cls.objects(**{field_name: self.id}).update(**{'set__%s' % field_name: None}) + # + # However, I'm getting ValidationError: 1/None is not a valid ObjectId + # Anybody got a clue? + for doc in document_cls.objects(**{field_name: self.id}): + doc.reviewer = None + doc.save() id_field = self._meta['id_field'] object_id = self._fields[id_field].to_mongo(self[id_field]) diff --git a/tests/document.py b/tests/document.py index d5807c90..7f92320d 100644 --- a/tests/document.py +++ b/tests/document.py @@ -625,7 +625,7 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() - def test_cascade_delete(self): + def test_delete_rule_cascade_and_nullify(self): """Ensure that a referenced document is also deleted upon deletion. """ @@ -633,6 +633,7 @@ class DocumentTest(unittest.TestCase): meta = {'collection': 'blogpost_1'} content = StringField() author = ReferenceField(self.Person, delete_rule=CASCADE) + reviewer = ReferenceField(self.Person, delete_rule=NULLIFY) self.Person.drop_collection() BlogPost.drop_collection() @@ -640,10 +641,18 @@ class DocumentTest(unittest.TestCase): author = self.Person(name='Test User') author.save() + reviewer = self.Person(name='Re Viewer') + reviewer.save() + post = BlogPost(content = 'Watched some TV') post.author = author + post.reviewer = reviewer post.save() + reviewer.delete() + self.assertEqual(len(BlogPost.objects), 1) # No effect on the BlogPost + self.assertEqual(BlogPost.objects.get().reviewer, None) + # Delete the Person, which should lead to deletion of the BlogPost, too author.delete() self.assertEqual(len(BlogPost.objects), 0) From ad1aa5bd3e4f66ba18ae98b04af16a2e8aa60291 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Sun, 5 Dec 2010 13:47:32 -0800 Subject: [PATCH 022/214] Add tests that need to be satisfied. --- tests/document.py | 12 ++++++++++++ tests/queryset.py | 15 +++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/tests/document.py b/tests/document.py index 7f92320d..dc63e32b 100644 --- a/tests/document.py +++ b/tests/document.py @@ -657,6 +657,18 @@ class DocumentTest(unittest.TestCase): author.delete() self.assertEqual(len(BlogPost.objects), 0) + def test_delete_rule_cascade_recurs(self): + """Ensure that a recursive chain of documents is also deleted upon + cascaded deletion. + """ + self.fail() + + def test_delete_rule_deny(self): + """Ensure that a document cannot be referenced if there are still + documents referring to it. + """ + self.fail() + def tearDown(self): self.Person.drop_collection() diff --git a/tests/queryset.py b/tests/queryset.py index 6362555d..32bbc4bf 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -734,6 +734,21 @@ class QuerySetTest(unittest.TestCase): self.Person.objects.delete() self.assertEqual(len(self.Person.objects), 0) + def test_delete_rule_cascade(self): + """Ensure cascading deletion of referring documents from the database. + """ + self.fail() + + def test_delete_rule_nullify(self): + """Ensure nullification of references to deleted documents. + """ + self.fail() + + def test_delete_rule_deny(self): + """Ensure deletion gets denied on documents that still have references to them. + """ + self.fail() + def test_update(self): """Ensure that atomic updates work properly. """ From d21434dfd648332f903b0ebe99d10197f740ce03 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Sun, 5 Dec 2010 22:40:01 -0800 Subject: [PATCH 023/214] Make the nullification an atomic operation. This shortcut works now, since hmarr fixed the unset bug in dev. --- mongoengine/document.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 3b812abb..39442f6f 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -106,18 +106,7 @@ class Document(BaseDocument): if rule == CASCADE: document_cls.objects(**{field_name: self.id}).delete(safe=safe) elif rule == NULLIFY: - # TODO: For now, this makes the nullify test pass, but it would - # be nicer to use any of these two atomic versions: - # - # document_cls.objects(**{field_name: self.id}).update(**{'unset__%s' % field_name: 1}) - # or - # document_cls.objects(**{field_name: self.id}).update(**{'set__%s' % field_name: None}) - # - # However, I'm getting ValidationError: 1/None is not a valid ObjectId - # Anybody got a clue? - for doc in document_cls.objects(**{field_name: self.id}): - doc.reviewer = None - doc.save() + document_cls.objects(**{field_name: self.id}).update(**{'unset__%s' % field_name: 1}) id_field = self._meta['id_field'] object_id = self._fields[id_field].to_mongo(self[id_field]) From f3da5bc092df9e8ae78a3b81f3bb3af2506d55f5 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Sun, 5 Dec 2010 23:03:40 -0800 Subject: [PATCH 024/214] Fix: potential NameError bug in test case. --- tests/document.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/document.py b/tests/document.py index dc63e32b..e5ff3b12 100644 --- a/tests/document.py +++ b/tests/document.py @@ -502,7 +502,7 @@ class DocumentTest(unittest.TestCase): try: recipient.save(validate=False) except ValidationError: - fail() + self.fail() def test_delete(self): """Ensure that document may be deleted using the delete method. From b06d7948543870cc4ca0bb41a4450e18d76053ec Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Sun, 5 Dec 2010 23:43:19 -0800 Subject: [PATCH 025/214] Implementation of DENY rules. --- mongoengine/document.py | 14 +++++++++++++- tests/document.py | 31 ++++++++++++++++++++++++++++++- 2 files changed, 43 insertions(+), 2 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 39442f6f..38831b22 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -99,6 +99,17 @@ class Document(BaseDocument): :param safe: check if the operation succeeded before returning """ + # Check for DENY rules before actually deleting/nullifying any other + # references + for rule_entry in self._meta['delete_rules']: + document_cls, field_name = rule_entry + rule = self._meta['delete_rules'][rule_entry] + if rule == DENY and document_cls.objects(**{field_name: self.id}).count() > 0: + msg = u'Could not delete document (at least %s.%s refers to it)' % \ + (document_cls.__name__, field_name) + logging.error(msg) + raise OperationError(msg) + for rule_entry in self._meta['delete_rules']: document_cls, field_name = rule_entry rule = self._meta['delete_rules'][rule_entry] @@ -106,7 +117,8 @@ class Document(BaseDocument): if rule == CASCADE: document_cls.objects(**{field_name: self.id}).delete(safe=safe) elif rule == NULLIFY: - document_cls.objects(**{field_name: self.id}).update(**{'unset__%s' % field_name: 1}) + document_cls.objects(**{field_name: + self.id}).update(**{'unset__%s' % field_name: 1}) id_field = self._meta['id_field'] object_id = self._fields[id_field].to_mongo(self[id_field]) diff --git a/tests/document.py b/tests/document.py index e5ff3b12..99657993 100644 --- a/tests/document.py +++ b/tests/document.py @@ -667,7 +667,36 @@ class DocumentTest(unittest.TestCase): """Ensure that a document cannot be referenced if there are still documents referring to it. """ - self.fail() + + class BlogPost(Document): + content = StringField() + author = ReferenceField(self.Person, delete_rule=DENY) + + self.Person.drop_collection() + BlogPost.drop_collection() + + author = self.Person(name='Test User') + author.save() + + post = BlogPost(content = 'Watched some TV') + post.author = author + post.save() + + # Delete the Person should be denied + self.assertRaises(OperationError, author.delete) # Should raise denied error + self.assertEqual(len(BlogPost.objects), 1) # No objects may have been deleted + self.assertEqual(len(self.Person.objects), 1) + + # Other users, that don't have BlogPosts must be removable, like normal + author = self.Person(name='Another User') + author.save() + + self.assertEqual(len(self.Person.objects), 2) + author.delete() + self.assertEqual(len(self.Person.objects), 1) + + self.Person.drop_collection() + BlogPost.drop_collection() def tearDown(self): From 20eb920cb487457c016e1524348fcd57eace6d50 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Mon, 6 Dec 2010 00:06:03 -0800 Subject: [PATCH 026/214] Change test docstring. --- tests/document.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/document.py b/tests/document.py index 99657993..11af8b22 100644 --- a/tests/document.py +++ b/tests/document.py @@ -658,8 +658,8 @@ class DocumentTest(unittest.TestCase): self.assertEqual(len(BlogPost.objects), 0) def test_delete_rule_cascade_recurs(self): - """Ensure that a recursive chain of documents is also deleted upon - cascaded deletion. + """Ensure that a chain of documents is also deleted upon cascaded + deletion. """ self.fail() From 3c98a4bff56be07e495f83d2df52e29131005b5b Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Mon, 6 Dec 2010 00:07:30 -0800 Subject: [PATCH 027/214] Remove accidentally left behind debugging message. --- mongoengine/document.py | 1 - 1 file changed, 1 deletion(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 38831b22..d89d6872 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -107,7 +107,6 @@ class Document(BaseDocument): if rule == DENY and document_cls.objects(**{field_name: self.id}).count() > 0: msg = u'Could not delete document (at least %s.%s refers to it)' % \ (document_cls.__name__, field_name) - logging.error(msg) raise OperationError(msg) for rule_entry in self._meta['delete_rules']: From a68cb2026671e9e7bd58938147f04d9c910b126a Mon Sep 17 00:00:00 2001 From: Igor Ivanov Date: Thu, 9 Dec 2010 08:38:47 -0800 Subject: [PATCH 028/214] Allow 0 or "" to be used as valid _id value. --- mongoengine/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 6b74cb07..77d0c0d4 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -442,7 +442,7 @@ class BaseDocument(object): self._meta.get('allow_inheritance', True) == False): data['_cls'] = self._class_name data['_types'] = self._superclasses.keys() + [self._class_name] - if data.has_key('_id') and not data['_id']: + if data.has_key('_id') and data['_id'] is None: del data['_id'] return data From 07dae64d660235e681427494bd5a2d2dfc0f05dd Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Mon, 13 Dec 2010 12:36:24 -0800 Subject: [PATCH 029/214] More the deletion code over to the QuerySet object. The Document object doens't have any delete_rule specific code anymore, and leverages the QuerySet's ability to deny/cascade/nullify its relations. --- mongoengine/document.py | 20 -------------------- mongoengine/queryset.py | 22 ++++++++++++++++++++++ tests/document.py | 30 +++++++++++++++++++++++++++++- tests/queryset.py | 15 ++++++++++++++- 4 files changed, 65 insertions(+), 22 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index d89d6872..d1a031ab 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -99,26 +99,6 @@ class Document(BaseDocument): :param safe: check if the operation succeeded before returning """ - # Check for DENY rules before actually deleting/nullifying any other - # references - for rule_entry in self._meta['delete_rules']: - document_cls, field_name = rule_entry - rule = self._meta['delete_rules'][rule_entry] - if rule == DENY and document_cls.objects(**{field_name: self.id}).count() > 0: - msg = u'Could not delete document (at least %s.%s refers to it)' % \ - (document_cls.__name__, field_name) - raise OperationError(msg) - - for rule_entry in self._meta['delete_rules']: - document_cls, field_name = rule_entry - rule = self._meta['delete_rules'][rule_entry] - - if rule == CASCADE: - document_cls.objects(**{field_name: self.id}).delete(safe=safe) - elif rule == NULLIFY: - document_cls.objects(**{field_name: - self.id}).update(**{'unset__%s' % field_name: 1}) - id_field = self._meta['id_field'] object_id = self._fields[id_field].to_mongo(self[id_field]) try: diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 49c8f69d..82efd4f7 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -882,6 +882,28 @@ class QuerySet(object): :param safe: check if the operation succeeded before returning """ + from document import CASCADE, DENY, NULLIFY + + doc = self._document + + # Check for DENY rules before actually deleting/nullifying any other + # references + for rule_entry in doc._meta['delete_rules']: + document_cls, field_name = rule_entry + rule = doc._meta['delete_rules'][rule_entry] + if rule == DENY and document_cls.objects(**{field_name + '__in': self}).count() > 0: + msg = u'Could not delete document (at least %s.%s refers to it)' % \ + (document_cls.__name__, field_name) + raise OperationError(msg) + + for rule_entry in doc._meta['delete_rules']: + document_cls, field_name = rule_entry + rule = doc._meta['delete_rules'][rule_entry] + if rule == CASCADE: + document_cls.objects(**{field_name + '__in': self}).delete(safe=safe) + elif rule == NULLIFY: + document_cls.objects(**{field_name + '__in': self}).update(**{'unset__%s' % field_name: 1}) + self._collection.remove(self._query, safe=safe) @classmethod diff --git a/tests/document.py b/tests/document.py index 11af8b22..221d22b7 100644 --- a/tests/document.py +++ b/tests/document.py @@ -661,7 +661,35 @@ class DocumentTest(unittest.TestCase): """Ensure that a chain of documents is also deleted upon cascaded deletion. """ - self.fail() + + class BlogPost(Document): + content = StringField() + author = ReferenceField(self.Person, delete_rule=CASCADE) + + class Comment(Document): + text = StringField() + post = ReferenceField(BlogPost, delete_rule=CASCADE) + + + author = self.Person(name='Test User') + author.save() + + post = BlogPost(content = 'Watched some TV') + post.author = author + post.save() + + comment = Comment(text = 'Kudos.') + comment.post = post + comment.save() + + # Delete the Person, which should lead to deletion of the BlogPost, and, + # recursively to the Comment, too + author.delete() + self.assertEqual(len(Comment.objects), 0) + + self.Person.drop_collection() + BlogPost.drop_collection() + Comment.drop_collection() def test_delete_rule_deny(self): """Ensure that a document cannot be referenced if there are still diff --git a/tests/queryset.py b/tests/queryset.py index 32bbc4bf..fecbaecc 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -737,7 +737,20 @@ class QuerySetTest(unittest.TestCase): def test_delete_rule_cascade(self): """Ensure cascading deletion of referring documents from the database. """ - self.fail() + class BlogPost(Document): + content = StringField() + author = ReferenceField(self.Person, delete_rule=CASCADE) + BlogPost.drop_collection() + + me = self.Person(name='Test User') + me.save() + + post = BlogPost(content='Watching TV', author=me) + post.save() + + self.assertEqual(1, BlogPost.objects.count()) + self.Person.objects.delete() + self.assertEqual(0, BlogPost.objects.count()) def test_delete_rule_nullify(self): """Ensure nullification of references to deleted documents. From 5b118f64ec0b32cca5909d4fa4809227e4794034 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Mon, 13 Dec 2010 12:54:26 -0800 Subject: [PATCH 030/214] Add tests for nullification and denial on the queryset. --- tests/queryset.py | 40 +++++++++++++++++++++++++++++++++++++--- 1 file changed, 37 insertions(+), 3 deletions(-) diff --git a/tests/queryset.py b/tests/queryset.py index fecbaecc..132549de 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -755,12 +755,46 @@ class QuerySetTest(unittest.TestCase): def test_delete_rule_nullify(self): """Ensure nullification of references to deleted documents. """ - self.fail() + class Category(Document): + name = StringField() + + class BlogPost(Document): + content = StringField() + category = ReferenceField(Category, delete_rule=NULLIFY) + + BlogPost.drop_collection() + Category.drop_collection() + + lameness = Category(name='Lameness') + lameness.save() + + post = BlogPost(content='Watching TV', category=lameness) + post.save() + + self.assertEqual(1, BlogPost.objects.count()) + self.assertEqual('Lameness', BlogPost.objects.first().category.name) + Category.objects.delete() + self.assertEqual(1, BlogPost.objects.count()) + self.assertEqual(None, BlogPost.objects.first().category) def test_delete_rule_deny(self): - """Ensure deletion gets denied on documents that still have references to them. + """Ensure deletion gets denied on documents that still have references + to them. """ - self.fail() + class BlogPost(Document): + content = StringField() + author = ReferenceField(self.Person, delete_rule=DENY) + + BlogPost.drop_collection() + self.Person.drop_collection() + + me = self.Person(name='Test User') + me.save() + + post = BlogPost(content='Watching TV', author=me) + post.save() + + self.assertRaises(OperationError, self.Person.objects.delete) def test_update(self): """Ensure that atomic updates work properly. From 4d5164c5804882978ec607b695c13cfcbaf4b7be Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Mon, 13 Dec 2010 13:24:20 -0800 Subject: [PATCH 031/214] Use multiple objects in the test. This is to ensure only the intended subset is deleted and not all objects. --- tests/queryset.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/tests/queryset.py b/tests/queryset.py index 132549de..d6ec46bb 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -744,13 +744,16 @@ class QuerySetTest(unittest.TestCase): me = self.Person(name='Test User') me.save() + someoneelse = self.Person(name='Some-one Else') + someoneelse.save() - post = BlogPost(content='Watching TV', author=me) - post.save() + BlogPost(content='Watching TV', author=me).save() + BlogPost(content='Chilling out', author=me).save() + BlogPost(content='Pro Testing', author=someoneelse).save() + self.assertEqual(3, BlogPost.objects.count()) + self.Person.objects(name='Test User').delete() self.assertEqual(1, BlogPost.objects.count()) - self.Person.objects.delete() - self.assertEqual(0, BlogPost.objects.count()) def test_delete_rule_nullify(self): """Ensure nullification of references to deleted documents. From 3b55deb472638cb98a94fa59f7163709660393ed Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Mon, 13 Dec 2010 13:25:49 -0800 Subject: [PATCH 032/214] Remove unused meta data. --- tests/document.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/document.py b/tests/document.py index 221d22b7..e768b54f 100644 --- a/tests/document.py +++ b/tests/document.py @@ -630,7 +630,6 @@ class DocumentTest(unittest.TestCase): """ class BlogPost(Document): - meta = {'collection': 'blogpost_1'} content = StringField() author = ReferenceField(self.Person, delete_rule=CASCADE) reviewer = ReferenceField(self.Person, delete_rule=NULLIFY) From f30fd71c5ee6af832cdb9e01f9fdb915fef421ea Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Mon, 13 Dec 2010 13:42:01 -0800 Subject: [PATCH 033/214] Refactor: put the delete rule constants into the queryset module, too. --- mongoengine/document.py | 13 +++---------- mongoengine/queryset.py | 11 ++++++++--- 2 files changed, 11 insertions(+), 13 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index d1a031ab..504e14eb 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -1,21 +1,14 @@ from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, ValidationError) -from queryset import OperationError +from queryset import OperationError, DO_NOTHING from connection import _get_db import pymongo -__all__ = ['Document', 'EmbeddedDocument', 'ValidationError', 'OperationError', - 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY'] +__all__ = ['Document', 'EmbeddedDocument', 'ValidationError', 'OperationError'] -# Delete rules -DO_NOTHING = 0 -NULLIFY = 1 -CASCADE = 2 -DENY = 3 - class EmbeddedDocument(BaseDocument): """A :class:`~mongoengine.Document` that isn't stored in its own collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as @@ -110,7 +103,7 @@ class Document(BaseDocument): @classmethod def register_delete_rule(cls, document_cls, field_name, rule): """This method registers the delete rules to apply when removing this - object. This could go into the Document class. + object. """ if rule == DO_NOTHING: return diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 82efd4f7..c400a614 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -10,11 +10,18 @@ import copy import itertools __all__ = ['queryset_manager', 'Q', 'InvalidQueryError', - 'InvalidCollectionError'] + 'InvalidCollectionError', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY'] + # The maximum number of items to display in a QuerySet.__repr__ REPR_OUTPUT_SIZE = 20 +# Delete rules +DO_NOTHING = 0 +NULLIFY = 1 +CASCADE = 2 +DENY = 3 + class DoesNotExist(Exception): pass @@ -882,8 +889,6 @@ class QuerySet(object): :param safe: check if the operation succeeded before returning """ - from document import CASCADE, DENY, NULLIFY - doc = self._document # Check for DENY rules before actually deleting/nullifying any other From 620f4a222ea8c3d0177a741e234223909f433555 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Tue, 14 Dec 2010 02:01:25 -0800 Subject: [PATCH 034/214] Don't check for DO_NOTHING in the delete rule registration method. It is already checked before it is invoked. This saves the ugly import of DO_NOTHING inside document.py. --- mongoengine/base.py | 3 ++- mongoengine/document.py | 5 +---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 9f8c1e7b..42db460f 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -1,5 +1,6 @@ from queryset import QuerySet, QuerySetManager from queryset import DoesNotExist, MultipleObjectsReturned +from queryset import DO_NOTHING import sys import pymongo @@ -190,7 +191,7 @@ class DocumentMetaclass(type): new_class = super_new(cls, name, bases, attrs) for field in new_class._fields.values(): field.owner_document = new_class - if hasattr(field, 'delete_rule') and field.delete_rule: + if hasattr(field, 'delete_rule') and field.delete_rule > DO_NOTHING: field.document_type.register_delete_rule(new_class, field.name, field.delete_rule) diff --git a/mongoengine/document.py b/mongoengine/document.py index 504e14eb..e64092e8 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -1,6 +1,6 @@ from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, ValidationError) -from queryset import OperationError, DO_NOTHING +from queryset import OperationError from connection import _get_db import pymongo @@ -105,9 +105,6 @@ class Document(BaseDocument): """This method registers the delete rules to apply when removing this object. """ - if rule == DO_NOTHING: - return - cls._meta['delete_rules'][(document_cls, field_name)] = rule From 16e1f72e657895e6491f4111e621c510784f596a Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Tue, 14 Dec 2010 03:39:14 -0800 Subject: [PATCH 035/214] Avoid confusing semantics when comparing delete rules. --- mongoengine/base.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 42db460f..405f642c 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -191,9 +191,10 @@ class DocumentMetaclass(type): new_class = super_new(cls, name, bases, attrs) for field in new_class._fields.values(): field.owner_document = new_class - if hasattr(field, 'delete_rule') and field.delete_rule > DO_NOTHING: + delete_rule = getattr(field, 'delete_rule', DO_NOTHING) + if delete_rule != DO_NOTHING: field.document_type.register_delete_rule(new_class, field.name, - field.delete_rule) + delete_rule) module = attrs.get('__module__') From ffc8b21f67c1e43617f5cd33f71192974a12ed99 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Tue, 14 Dec 2010 03:50:49 -0800 Subject: [PATCH 036/214] Some tests broke over the default None value. --- mongoengine/fields.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 01ec1f7b..235694a6 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1,4 +1,5 @@ from base import BaseField, ObjectIdField, ValidationError, get_document +from queryset import DO_NOTHING from document import Document, EmbeddedDocument from connection import _get_db from operator import itemgetter @@ -417,7 +418,7 @@ class ReferenceField(BaseField): access (lazily). """ - def __init__(self, document_type, delete_rule=None, **kwargs): + def __init__(self, document_type, delete_rule=DO_NOTHING, **kwargs): if not isinstance(document_type, basestring): if not issubclass(document_type, (Document, basestring)): raise ValidationError('Argument to ReferenceField constructor ' From e05e6b89f38562c6063154e9e9cb87fca40dde39 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Thu, 16 Dec 2010 11:53:12 +0100 Subject: [PATCH 037/214] Add safe_update parameter to updates. --- mongoengine/queryset.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index c400a614..e12b308c 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -907,7 +907,9 @@ class QuerySet(object): if rule == CASCADE: document_cls.objects(**{field_name + '__in': self}).delete(safe=safe) elif rule == NULLIFY: - document_cls.objects(**{field_name + '__in': self}).update(**{'unset__%s' % field_name: 1}) + document_cls.objects(**{field_name + '__in': self}).update( + safe_update=safe, + **{'unset__%s' % field_name: 1}) self._collection.remove(self._query, safe=safe) From 52f5deb456eea6e9e06236f084c9b13364c0e33a Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Mon, 20 Dec 2010 05:23:27 -0800 Subject: [PATCH 038/214] Add documentation for the delete_rule argument. --- docs/guide/defining-documents.rst | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 106d4ec8..a2c598c6 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -193,6 +193,37 @@ as the constructor's argument:: class ProfilePage(Document): content = StringField() +Dealing with deletion of referred documents +''''''''''''''''''''''''''''''''''''''''''' +By default, MongoDB doesn't check the integrity of your data, so deleting +documents that other documents still hold references to will lead to consistency +issues. Mongoengine's :class:`ReferenceField` adds some functionality to +safeguard against these kinds of database integrity problems, providing each +reference with a delete rule specification. A delete rule is specified by +supplying the :attr:`delete_rule` attribute on the :class:`ReferenceField` +definition, like this:: + + class Employee(Document): + ... + profile_page = ReferenceField('ProfilePage', delete_rule=mongoengine.NULLIFY) + +Its value can take any of the following constants: + +:const:`mongoengine.DO_NOTHING` + This is the default and won't do anything. Deletes are fast, but may + cause database inconsistency or dangling references. +:const:`mongoengine.DENY` + Deletion is denied if there still exist references to the object being + deleted. +:const:`mongoengine.NULLIFY` + Any object's fields still referring to the object being deleted are + removed (using MongoDB's "unset" operation), effectively nullifying the + relationship. +:const:`mongoengine.CASCADE` + Any object containing fields that are refererring to the object being + deleted are deleted first. + + Generic reference fields '''''''''''''''''''''''' A second kind of reference field also exists, From 07ef58c1a7e757c211bf6036768839d0471dc976 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Mon, 20 Dec 2010 05:50:42 -0800 Subject: [PATCH 039/214] Rename delete_rule -> reverse_delete_rule. --- docs/guide/defining-documents.rst | 4 ++-- mongoengine/base.py | 2 +- mongoengine/fields.py | 4 ++-- tests/document.py | 16 ++++++++-------- tests/queryset.py | 12 ++++++------ 5 files changed, 19 insertions(+), 19 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index a2c598c6..2b64ca36 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -200,8 +200,8 @@ documents that other documents still hold references to will lead to consistency issues. Mongoengine's :class:`ReferenceField` adds some functionality to safeguard against these kinds of database integrity problems, providing each reference with a delete rule specification. A delete rule is specified by -supplying the :attr:`delete_rule` attribute on the :class:`ReferenceField` -definition, like this:: +supplying the :attr:`reverse_delete_rule` attributes on the +:class:`ReferenceField` definition, like this:: class Employee(Document): ... diff --git a/mongoengine/base.py b/mongoengine/base.py index 405f642c..a59cdbac 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -191,7 +191,7 @@ class DocumentMetaclass(type): new_class = super_new(cls, name, bases, attrs) for field in new_class._fields.values(): field.owner_document = new_class - delete_rule = getattr(field, 'delete_rule', DO_NOTHING) + delete_rule = getattr(field, 'reverse_delete_rule', DO_NOTHING) if delete_rule != DO_NOTHING: field.document_type.register_delete_rule(new_class, field.name, delete_rule) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 235694a6..5fdde1ee 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -418,13 +418,13 @@ class ReferenceField(BaseField): access (lazily). """ - def __init__(self, document_type, delete_rule=DO_NOTHING, **kwargs): + def __init__(self, document_type, reverse_delete_rule=DO_NOTHING, **kwargs): if not isinstance(document_type, basestring): if not issubclass(document_type, (Document, basestring)): raise ValidationError('Argument to ReferenceField constructor ' 'must be a document class or a string') self.document_type_obj = document_type - self.delete_rule = delete_rule + self.reverse_delete_rule = reverse_delete_rule super(ReferenceField, self).__init__(**kwargs) @property diff --git a/tests/document.py b/tests/document.py index e768b54f..67c21a46 100644 --- a/tests/document.py +++ b/tests/document.py @@ -625,14 +625,14 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() - def test_delete_rule_cascade_and_nullify(self): + def test_reverse_delete_rule_cascade_and_nullify(self): """Ensure that a referenced document is also deleted upon deletion. """ class BlogPost(Document): content = StringField() - author = ReferenceField(self.Person, delete_rule=CASCADE) - reviewer = ReferenceField(self.Person, delete_rule=NULLIFY) + author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) + reviewer = ReferenceField(self.Person, reverse_delete_rule=NULLIFY) self.Person.drop_collection() BlogPost.drop_collection() @@ -656,18 +656,18 @@ class DocumentTest(unittest.TestCase): author.delete() self.assertEqual(len(BlogPost.objects), 0) - def test_delete_rule_cascade_recurs(self): + def test_reverse_delete_rule_cascade_recurs(self): """Ensure that a chain of documents is also deleted upon cascaded deletion. """ class BlogPost(Document): content = StringField() - author = ReferenceField(self.Person, delete_rule=CASCADE) + author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) class Comment(Document): text = StringField() - post = ReferenceField(BlogPost, delete_rule=CASCADE) + post = ReferenceField(BlogPost, reverse_delete_rule=CASCADE) author = self.Person(name='Test User') @@ -690,14 +690,14 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() Comment.drop_collection() - def test_delete_rule_deny(self): + def test_reverse_delete_rule_deny(self): """Ensure that a document cannot be referenced if there are still documents referring to it. """ class BlogPost(Document): content = StringField() - author = ReferenceField(self.Person, delete_rule=DENY) + author = ReferenceField(self.Person, reverse_delete_rule=DENY) self.Person.drop_collection() BlogPost.drop_collection() diff --git a/tests/queryset.py b/tests/queryset.py index d6ec46bb..f95974e2 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -734,12 +734,12 @@ class QuerySetTest(unittest.TestCase): self.Person.objects.delete() self.assertEqual(len(self.Person.objects), 0) - def test_delete_rule_cascade(self): + def test_reverse_delete_rule_cascade(self): """Ensure cascading deletion of referring documents from the database. """ class BlogPost(Document): content = StringField() - author = ReferenceField(self.Person, delete_rule=CASCADE) + author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) BlogPost.drop_collection() me = self.Person(name='Test User') @@ -755,7 +755,7 @@ class QuerySetTest(unittest.TestCase): self.Person.objects(name='Test User').delete() self.assertEqual(1, BlogPost.objects.count()) - def test_delete_rule_nullify(self): + def test_reverse_delete_rule_nullify(self): """Ensure nullification of references to deleted documents. """ class Category(Document): @@ -763,7 +763,7 @@ class QuerySetTest(unittest.TestCase): class BlogPost(Document): content = StringField() - category = ReferenceField(Category, delete_rule=NULLIFY) + category = ReferenceField(Category, reverse_delete_rule=NULLIFY) BlogPost.drop_collection() Category.drop_collection() @@ -780,13 +780,13 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(1, BlogPost.objects.count()) self.assertEqual(None, BlogPost.objects.first().category) - def test_delete_rule_deny(self): + def test_reverse_delete_rule_deny(self): """Ensure deletion gets denied on documents that still have references to them. """ class BlogPost(Document): content = StringField() - author = ReferenceField(self.Person, delete_rule=DENY) + author = ReferenceField(self.Person, reverse_delete_rule=DENY) BlogPost.drop_collection() self.Person.drop_collection() From 0f68df3b4a9c7c770b25ca72f0e912e54c205b5c Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Mon, 20 Dec 2010 05:52:21 -0800 Subject: [PATCH 040/214] Fix line width. --- docs/guide/defining-documents.rst | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 2b64ca36..80d2cd38 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -205,23 +205,22 @@ supplying the :attr:`reverse_delete_rule` attributes on the class Employee(Document): ... - profile_page = ReferenceField('ProfilePage', delete_rule=mongoengine.NULLIFY) + profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY) Its value can take any of the following constants: :const:`mongoengine.DO_NOTHING` - This is the default and won't do anything. Deletes are fast, but may - cause database inconsistency or dangling references. + This is the default and won't do anything. Deletes are fast, but may cause + database inconsistency or dangling references. :const:`mongoengine.DENY` Deletion is denied if there still exist references to the object being deleted. :const:`mongoengine.NULLIFY` - Any object's fields still referring to the object being deleted are - removed (using MongoDB's "unset" operation), effectively nullifying the - relationship. + Any object's fields still referring to the object being deleted are removed + (using MongoDB's "unset" operation), effectively nullifying the relationship. :const:`mongoengine.CASCADE` - Any object containing fields that are refererring to the object being - deleted are deleted first. + Any object containing fields that are refererring to the object being deleted + are deleted first. Generic reference fields From 03a757bc6efa52d85cf26a9d0d5f8086c0234571 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Tue, 21 Dec 2010 01:19:27 -0800 Subject: [PATCH 041/214] Add a safety note on using the new delete rules. --- docs/guide/defining-documents.rst | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 80d2cd38..de0e7272 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -193,6 +193,7 @@ as the constructor's argument:: class ProfilePage(Document): content = StringField() + Dealing with deletion of referred documents ''''''''''''''''''''''''''''''''''''''''''' By default, MongoDB doesn't check the integrity of your data, so deleting @@ -207,6 +208,11 @@ supplying the :attr:`reverse_delete_rule` attributes on the ... profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY) +The declaration in this example means that when an :class:`Employee` object is +removed, the :class:`ProfilePage` that belongs to that employee is removed as +well. If a whole batch of employees is removed, all profile pages that are +linked are removed as well. + Its value can take any of the following constants: :const:`mongoengine.DO_NOTHING` @@ -223,6 +229,23 @@ Its value can take any of the following constants: are deleted first. +.. warning:: + A safety note on setting up these delete rules! Since the delete rules are + not recorded on the database level by MongoDB itself, but instead at runtime, + in-memory, by the MongoEngine module, it is of the upmost importance + that the module that declares the relationship is loaded **BEFORE** the + delete is invoked. + + If, for example, the :class:`Employee` object lives in the + :mod:`payroll` app, and the :class:`ProfilePage` in the :mod:`people` + app, it is extremely important that the :mod:`people` app is loaded + before any employee is removed, because otherwise, MongoEngine could + never know this relationship exists. + + In Django, be sure to put all apps that have such delete rule declarations in + their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. + + Generic reference fields '''''''''''''''''''''''' A second kind of reference field also exists, From 0acb2d904db38edc0820c3553ba1e0abf3a5b750 Mon Sep 17 00:00:00 2001 From: Serge Matveenko Date: Tue, 21 Dec 2010 18:11:33 +0300 Subject: [PATCH 042/214] Add hidden (.*) files to .gitignore but not the .gitignore itself. --- .gitignore | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 51a9ca1d..9c612961 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +.* +!.gitignore *.pyc .*.swp *.egg @@ -6,4 +8,4 @@ docs/_build build/ dist/ mongoengine.egg-info/ -env/ \ No newline at end of file +env/ From 846f5a868f345431fd2cae4cf5dab483cab604fc Mon Sep 17 00:00:00 2001 From: Serge Matveenko Date: Tue, 21 Dec 2010 18:16:00 +0300 Subject: [PATCH 043/214] Fix Issue#116: Use cls instead of User in create_user. --- mongoengine/django/auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index 595852ef..41d307cc 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -86,7 +86,7 @@ class User(Document): else: email = '@'.join([email_name, domain_part.lower()]) - user = User(username=username, email=email, date_joined=now) + user = cls(username=username, email=email, date_joined=now) user.set_password(password) user.save() return user From 34b923b7ac93262521542877df9f96c7cf822d6a Mon Sep 17 00:00:00 2001 From: Serge Matveenko Date: Tue, 21 Dec 2010 18:29:51 +0300 Subject: [PATCH 044/214] Fix Issue#115: Possibility to bypass class_name check in queryset. --- mongoengine/queryset.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 519dda03..18261574 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -285,6 +285,7 @@ class QuerySet(object): self._ordering = [] self._snapshot = False self._timeout = True + self._class_check = True # If inheritance is allowed, only return instances and instances of # subclasses of the class being used @@ -298,7 +299,8 @@ class QuerySet(object): def _query(self): if self._mongo_query is None: self._mongo_query = self._query_obj.to_query(self._document) - self._mongo_query.update(self._initial_query) + if self._class_check: + self._mongo_query.update(self._initial_query) return self._mongo_query def ensure_index(self, key_or_list, drop_dups=False, background=False, @@ -349,7 +351,7 @@ class QuerySet(object): return index_list - def __call__(self, q_obj=None, **query): + def __call__(self, q_obj=None, class_check=True, **query): """Filter the selected documents by calling the :class:`~mongoengine.queryset.QuerySet` with a query. @@ -357,6 +359,8 @@ class QuerySet(object): the query; the :class:`~mongoengine.queryset.QuerySet` is filtered multiple times with different :class:`~mongoengine.queryset.Q` objects, only the last one will be used + :param class_check: If set to False bypass class name check when + querying collection :param query: Django-style query keyword arguments """ #if q_obj: @@ -367,6 +371,7 @@ class QuerySet(object): self._query_obj &= query self._mongo_query = None self._cursor_obj = None + self._class_check = class_check return self def filter(self, *q_objs, **query): From ba9813e5a37f0b32292ab599d3030ea6aaa45141 Mon Sep 17 00:00:00 2001 From: Nick Vlku Date: Sun, 9 Jan 2011 22:30:18 -0500 Subject: [PATCH 045/214] Fixed Issue 122: item_frequencies doesn't work if tag is also the name of a native js function Did this by checking if the item is a native function, if it is I set it to an initial numeric value. Future occurrences of the tag count correctly. --- mongoengine/queryset.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 11d4706c..17ebc2e9 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1209,11 +1209,19 @@ class QuerySet(object): db[collection].find(query).forEach(function(doc) { if (doc[field].constructor == Array) { doc[field].forEach(function(item) { - frequencies[item] = inc + (frequencies[item] || 0); + var preValue = 0; + if (!isNaN(frequencies[item])) { + preValue = frequencies[item]; + } + frequencies[item] = inc + preValue; }); } else { var item = doc[field]; - frequencies[item] = inc + (frequencies[item] || 0); + var preValue = 0; + if (!isNaN(frequencies[item])) { + preValue = frequencies[item]; + } + frequencies[item] = inc + preValue; } }); return frequencies; From 53d66b72673550efa9316acd097c4dc8275a8346 Mon Sep 17 00:00:00 2001 From: Michael Henson Date: Thu, 27 Jan 2011 23:51:10 -0500 Subject: [PATCH 046/214] Added QuerySet.clone() to support copying querysets --- mongoengine/queryset.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 519dda03..5d48e6ef 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -293,6 +293,20 @@ class QuerySet(object): self._cursor_obj = None self._limit = None self._skip = None + + def clone(self): + """Creates a copy of the current :class:`~mongoengine.queryset.QuerySet`""" + c = self.__class__(self._document, self._collection_obj) + + copy_props = ('_initial_query', '_query_obj', '_where_clause', + '_loaded_fields', '_ordering', '_snapshot', + '_timeout', '_limit', '_skip') + + for prop in copy_props: + val = getattr(self, prop) + setattr(c, prop, copy.deepcopy(val)) + + return c @property def _query(self): From 6f7d7537f2fe5fbadffbe174fcbef7395922ae01 Mon Sep 17 00:00:00 2001 From: Nick Vlku Date: Sun, 6 Mar 2011 18:59:29 -0500 Subject: [PATCH 047/214] Added a test to verify that if a native JS function is put in as a tag, item_frequencies no longer fails (added the word 'watch' as a tag) --- tests/queryset.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/queryset.py b/tests/queryset.py index d0cdf106..d503cf3f 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1208,13 +1208,13 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - BlogPost(hits=1, tags=['music', 'film', 'actors']).save() + BlogPost(hits=1, tags=['music', 'film', 'actors', 'watch']).save() BlogPost(hits=2, tags=['music']).save() BlogPost(hits=2, tags=['music', 'actors']).save() f = BlogPost.objects.item_frequencies('tags') f = dict((key, int(val)) for key, val in f.items()) - self.assertEqual(set(['music', 'film', 'actors']), set(f.keys())) + self.assertEqual(set(['music', 'film', 'actors', 'watch']), set(f.keys())) self.assertEqual(f['music'], 3) self.assertEqual(f['actors'], 2) self.assertEqual(f['film'], 1) @@ -1228,9 +1228,9 @@ class QuerySetTest(unittest.TestCase): # Check that normalization works f = BlogPost.objects.item_frequencies('tags', normalize=True) - self.assertAlmostEqual(f['music'], 3.0/6.0) - self.assertAlmostEqual(f['actors'], 2.0/6.0) - self.assertAlmostEqual(f['film'], 1.0/6.0) + self.assertAlmostEqual(f['music'], 3.0/7.0) + self.assertAlmostEqual(f['actors'], 2.0/7.0) + self.assertAlmostEqual(f['film'], 1.0/7.0) # Check item_frequencies works for non-list fields f = BlogPost.objects.item_frequencies('hits') From 2c7469c62acaef77272383428b1ddcdee574b149 Mon Sep 17 00:00:00 2001 From: Stuart Rackham Date: Sun, 3 Apr 2011 15:21:00 +1200 Subject: [PATCH 048/214] Additional file-like behavior for FileField (optional size arg for read method; fixed seek and tell methods for reading files). --- mongoengine/fields.py | 12 +++++++++--- tests/fields.py | 6 ++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index e95fd65e..7639c7b9 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -561,6 +561,7 @@ class GridFSProxy(object): self.fs = gridfs.GridFS(_get_db()) # Filesystem instance self.newfile = None # Used for partial writes self.grid_id = grid_id # Store GridFS id for file + self.gridout = None def __getattr__(self, name): obj = self.get() @@ -574,8 +575,12 @@ class GridFSProxy(object): def get(self, id=None): if id: self.grid_id = id + if self.grid_id is None: + return None try: - return self.fs.get(id or self.grid_id) + if self.gridout is None: + self.gridout = self.fs.get(self.grid_id) + return self.gridout except: # File has been deleted return None @@ -605,9 +610,9 @@ class GridFSProxy(object): self.grid_id = self.newfile._id self.newfile.writelines(lines) - def read(self): + def read(self, size=-1): try: - return self.get().read() + return self.get().read(size) except: return None @@ -615,6 +620,7 @@ class GridFSProxy(object): # Delete file from GridFS, FileField still remains self.fs.delete(self.grid_id) self.grid_id = None + self.grid_out = None def replace(self, file, **kwargs): self.delete() diff --git a/tests/fields.py b/tests/fields.py index 5602cdec..c76935da 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -700,6 +700,12 @@ class FieldTest(unittest.TestCase): self.assertTrue(streamfile == result) self.assertEquals(result.file.read(), text + more_text) self.assertEquals(result.file.content_type, content_type) + result.file.seek(0) + self.assertEquals(result.file.tell(), 0) + self.assertEquals(result.file.read(len(text)), text) + self.assertEquals(result.file.tell(), len(text)) + self.assertEquals(result.file.read(len(more_text)), more_text) + self.assertEquals(result.file.tell(), len(text + more_text)) result.file.delete() # Ensure deleted file returns None From bd84d08b959f60d0adc2d39e1e51d1be4f5f42fb Mon Sep 17 00:00:00 2001 From: Stuart Rackham Date: Mon, 4 Apr 2011 13:44:36 +1200 Subject: [PATCH 049/214] Fixed misspelt variable name. --- mongoengine/fields.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 7639c7b9..186826ac 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -620,7 +620,7 @@ class GridFSProxy(object): # Delete file from GridFS, FileField still remains self.fs.delete(self.grid_id) self.grid_id = None - self.grid_out = None + self.gridout = None def replace(self, file, **kwargs): self.delete() From 76cbb668437267e41ed840b1177e6af8cda4f2c4 Mon Sep 17 00:00:00 2001 From: Alistair Roche Date: Thu, 28 Apr 2011 14:31:19 +0100 Subject: [PATCH 050/214] Fixed error with _lookup_field It was failing when given multiple fields --- mongoengine/queryset.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 9f24dea5..e2273620 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -522,6 +522,7 @@ class QuerySet(object): raise InvalidQueryError('Cannot resolve field "%s"' % field_name) fields.append(field) + field = None return fields @classmethod From f0277736e2144130b209e0a11667e50c098ec1bc Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 9 May 2011 10:22:37 +0100 Subject: [PATCH 051/214] Updated queryset to handle latest version of pymongo map_reduce now requires an output. Reverted previous _lookup_field change, until a test case is produced for the incorrect behaviour. --- mongoengine/queryset.py | 14 ++++++-------- tests/queryset.py | 9 ++++++--- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index e2273620..f5d5c5fb 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -522,7 +522,6 @@ class QuerySet(object): raise InvalidQueryError('Cannot resolve field "%s"' % field_name) fields.append(field) - field = None return fields @classmethod @@ -731,7 +730,7 @@ class QuerySet(object): def __len__(self): return self.count() - def map_reduce(self, map_f, reduce_f, finalize_f=None, limit=None, + def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None, scope=None, keep_temp=False): """Perform a map/reduce query using the current query spec and ordering. While ``map_reduce`` respects ``QuerySet`` chaining, @@ -745,26 +744,26 @@ class QuerySet(object): :param map_f: map function, as :class:`~pymongo.code.Code` or string :param reduce_f: reduce function, as :class:`~pymongo.code.Code` or string + :param output: output collection name :param finalize_f: finalize function, an optional function that performs any post-reduction processing. :param scope: values to insert into map/reduce global scope. Optional. :param limit: number of objects from current query to provide to map/reduce method - :param keep_temp: keep temporary table (boolean, default ``True``) Returns an iterator yielding :class:`~mongoengine.document.MapReduceDocument`. - .. note:: Map/Reduce requires server version **>= 1.1.1**. The PyMongo + .. note:: Map/Reduce changed in server version **>= 1.7.4**. The PyMongo :meth:`~pymongo.collection.Collection.map_reduce` helper requires - PyMongo version **>= 1.2**. + PyMongo version **>= 1.11**. .. versionadded:: 0.3 """ from document import MapReduceDocument if not hasattr(self._collection, "map_reduce"): - raise NotImplementedError("Requires MongoDB >= 1.1.1") + raise NotImplementedError("Requires MongoDB >= 1.7.1") map_f_scope = {} if isinstance(map_f, pymongo.code.Code): @@ -795,8 +794,7 @@ class QuerySet(object): if limit: mr_args['limit'] = limit - - results = self._collection.map_reduce(map_f, reduce_f, **mr_args) + results = self._collection.map_reduce(map_f, reduce_f, output, **mr_args) results = results.find() if self._ordering: diff --git a/tests/queryset.py b/tests/queryset.py index d0cdf106..746e8c2e 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1027,7 +1027,7 @@ class QuerySetTest(unittest.TestCase): """ # run a map/reduce operation spanning all posts - results = BlogPost.objects.map_reduce(map_f, reduce_f) + results = BlogPost.objects.map_reduce(map_f, reduce_f, "myresults") results = list(results) self.assertEqual(len(results), 4) @@ -1076,7 +1076,7 @@ class QuerySetTest(unittest.TestCase): } """ - results = BlogPost.objects.map_reduce(map_f, reduce_f) + results = BlogPost.objects.map_reduce(map_f, reduce_f, "myresults") results = list(results) self.assertEqual(results[0].object, post1) @@ -1187,6 +1187,7 @@ class QuerySetTest(unittest.TestCase): results = Link.objects.order_by("-value") results = results.map_reduce(map_f, reduce_f, + "myresults", finalize_f=finalize_f, scope=scope) results = list(results) @@ -1451,7 +1452,9 @@ class QuerySetTest(unittest.TestCase): """ class Test(Document): testdict = DictField() - + + Test.drop_collection() + t = Test(testdict={'f': 'Value'}) t.save() From 31521ccff5d984d3321d1ec3791521cead3ae37f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 18 May 2011 10:30:07 +0100 Subject: [PATCH 052/214] Added queryset clone support and tests, thanks to hensom Fixes #130 --- mongoengine/queryset.py | 32 +++++------ tests/queryset.py | 120 ++++++++++++++++++++++++++++------------ 2 files changed, 101 insertions(+), 51 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index bafb7c19..e0c62132 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -344,19 +344,19 @@ class QuerySet(object): self._cursor_obj = None self._limit = None self._skip = None - + def clone(self): """Creates a copy of the current :class:`~mongoengine.queryset.QuerySet`""" c = self.__class__(self._document, self._collection_obj) - + copy_props = ('_initial_query', '_query_obj', '_where_clause', '_loaded_fields', '_ordering', '_snapshot', '_timeout', '_limit', '_skip') - + for prop in copy_props: val = getattr(self, prop) setattr(c, prop, copy.deepcopy(val)) - + return c @property @@ -493,7 +493,7 @@ class QuerySet(object): } if self._loaded_fields: cursor_args['fields'] = self._loaded_fields.as_dict() - self._cursor_obj = self._collection.find(self._query, + self._cursor_obj = self._collection.find(self._query, **cursor_args) # Apply where clauses to cursor if self._where_clause: @@ -553,8 +553,8 @@ class QuerySet(object): operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', 'all', 'size', 'exists', 'not'] geo_operators = ['within_distance', 'within_spherical_distance', 'within_box', 'near', 'near_sphere'] - match_operators = ['contains', 'icontains', 'startswith', - 'istartswith', 'endswith', 'iendswith', + match_operators = ['contains', 'icontains', 'startswith', + 'istartswith', 'endswith', 'iendswith', 'exact', 'iexact'] mongo_query = {} @@ -644,8 +644,8 @@ class QuerySet(object): % self._document._class_name) def get_or_create(self, *q_objs, **query): - """Retrieve unique object or create, if it doesn't exist. Returns a tuple of - ``(object, created)``, where ``object`` is the retrieved or created object + """Retrieve unique object or create, if it doesn't exist. Returns a tuple of + ``(object, created)``, where ``object`` is the retrieved or created object and ``created`` is a boolean specifying whether a new object was created. Raises :class:`~mongoengine.queryset.MultipleObjectsReturned` or `DocumentName.MultipleObjectsReturned` if multiple results are found. @@ -857,7 +857,7 @@ class QuerySet(object): self._skip, self._limit = key.start, key.stop except IndexError, err: # PyMongo raises an error if key.start == key.stop, catch it, - # bin it, kill it. + # bin it, kill it. start = key.start or 0 if start >= 0 and key.stop >= 0 and key.step is None: if start == key.stop: @@ -1052,7 +1052,7 @@ class QuerySet(object): return mongo_update def update(self, safe_update=True, upsert=False, **update): - """Perform an atomic update on the fields matched by the query. When + """Perform an atomic update on the fields matched by the query. When ``safe_update`` is used, the number of affected documents is returned. :param safe: check if the operation succeeded before returning @@ -1076,7 +1076,7 @@ class QuerySet(object): raise OperationError(u'Update failed (%s)' % unicode(err)) def update_one(self, safe_update=True, upsert=False, **update): - """Perform an atomic update on first field matched by the query. When + """Perform an atomic update on first field matched by the query. When ``safe_update`` is used, the number of affected documents is returned. :param safe: check if the operation succeeded before returning @@ -1104,8 +1104,8 @@ class QuerySet(object): return self def _sub_js_fields(self, code): - """When fields are specified with [~fieldname] syntax, where - *fieldname* is the Python name of a field, *fieldname* will be + """When fields are specified with [~fieldname] syntax, where + *fieldname* is the Python name of a field, *fieldname* will be substituted for the MongoDB name of the field (specified using the :attr:`name` keyword argument in a field's constructor). """ @@ -1128,9 +1128,9 @@ class QuerySet(object): options specified as keyword arguments. As fields in MongoEngine may use different names in the database (set - using the :attr:`db_field` keyword argument to a :class:`Field` + using the :attr:`db_field` keyword argument to a :class:`Field` constructor), a mechanism exists for replacing MongoEngine field names - with the database field names in Javascript code. When accessing a + with the database field names in Javascript code. When accessing a field, use square-bracket notation, and prefix the MongoEngine field name with a tilde (~). diff --git a/tests/queryset.py b/tests/queryset.py index d4d7fb3a..25431782 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -162,7 +162,7 @@ class QuerySetTest(unittest.TestCase): person = self.Person.objects.get(age__lt=30) self.assertEqual(person.name, "User A") - + def test_find_array_position(self): """Ensure that query by array position works. """ @@ -177,7 +177,7 @@ class QuerySetTest(unittest.TestCase): posts = ListField(EmbeddedDocumentField(Post)) Blog.drop_collection() - + Blog.objects.create(tags=['a', 'b']) self.assertEqual(len(Blog.objects(tags__0='a')), 1) self.assertEqual(len(Blog.objects(tags__0='b')), 0) @@ -226,16 +226,16 @@ class QuerySetTest(unittest.TestCase): person, created = self.Person.objects.get_or_create(age=30) self.assertEqual(person.name, "User B") self.assertEqual(created, False) - + person, created = self.Person.objects.get_or_create(age__lt=30) self.assertEqual(person.name, "User A") self.assertEqual(created, False) - + # Try retrieving when no objects exists - new doc should be created kwargs = dict(age=50, defaults={'name': 'User C'}) person, created = self.Person.objects.get_or_create(**kwargs) self.assertEqual(created, True) - + person = self.Person.objects.get(age=50) self.assertEqual(person.name, "User C") @@ -328,7 +328,7 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(obj, person) obj = self.Person.objects(Q(name__iexact='gUIDO VAN rOSSU')).first() self.assertEqual(obj, None) - + # Test unsafe expressions person = self.Person(name='Guido van Rossum [.\'Geek\']') person.save() @@ -674,7 +674,7 @@ class QuerySetTest(unittest.TestCase): posts = [post.id for post in q] published_posts = (post1, post2, post3, post5, post6) self.assertTrue(all(obj.id in posts for obj in published_posts)) - + # Check Q object combination date = datetime(2010, 1, 10) @@ -714,7 +714,7 @@ class QuerySetTest(unittest.TestCase): obj = self.Person.objects(Q(name__not=re.compile('^bob'))).first() self.assertEqual(obj, person) - + obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first() self.assertEqual(obj, None) @@ -786,7 +786,7 @@ class QuerySetTest(unittest.TestCase): class BlogPost(Document): name = StringField(db_field='doc-name') - comments = ListField(EmbeddedDocumentField(Comment), + comments = ListField(EmbeddedDocumentField(Comment), db_field='cmnts') BlogPost.drop_collection() @@ -958,7 +958,7 @@ class QuerySetTest(unittest.TestCase): BlogPost.objects.update_one(unset__hits=1) post.reload() self.assertEqual(post.hits, None) - + BlogPost.drop_collection() def test_update_pull(self): @@ -1038,7 +1038,7 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(film.value, 3) BlogPost.drop_collection() - + def test_map_reduce_with_custom_object_ids(self): """Ensure that QuerySet.map_reduce works properly with custom primary keys. @@ -1047,24 +1047,24 @@ class QuerySetTest(unittest.TestCase): class BlogPost(Document): title = StringField(primary_key=True) tags = ListField(StringField()) - + post1 = BlogPost(title="Post #1", tags=["mongodb", "mongoengine"]) post2 = BlogPost(title="Post #2", tags=["django", "mongodb"]) post3 = BlogPost(title="Post #3", tags=["hitchcock films"]) - + post1.save() post2.save() post3.save() - + self.assertEqual(BlogPost._fields['title'].db_field, '_id') self.assertEqual(BlogPost._meta['id_field'], 'title') - + map_f = """ function() { emit(this._id, 1); } """ - + # reduce to a list of tag ids and counts reduce_f = """ function(key, values) { @@ -1075,10 +1075,10 @@ class QuerySetTest(unittest.TestCase): return total; } """ - + results = BlogPost.objects.map_reduce(map_f, reduce_f, "myresults") results = list(results) - + self.assertEqual(results[0].object, post1) self.assertEqual(results[1].object, post2) self.assertEqual(results[2].object, post3) @@ -1168,7 +1168,7 @@ class QuerySetTest(unittest.TestCase): finalize_f = """ function(key, value) { - // f(sec_since_epoch,y,z) = + // f(sec_since_epoch,y,z) = // log10(z) + ((y*sec_since_epoch) / 45000) z_10 = Math.log(value.z) / Math.log(10); weight = z_10 + ((value.y * value.t_s) / 45000); @@ -1452,9 +1452,9 @@ class QuerySetTest(unittest.TestCase): """ class Test(Document): testdict = DictField() - + Test.drop_collection() - + t = Test(testdict={'f': 'Value'}) t.save() @@ -1517,12 +1517,12 @@ class QuerySetTest(unittest.TestCase): title = StringField() date = DateTimeField() location = GeoPointField() - + def __unicode__(self): return self.title - + Event.drop_collection() - + event1 = Event(title="Coltrane Motion @ Double Door", date=datetime.now() - timedelta(days=1), location=[41.909889, -87.677137]) @@ -1532,7 +1532,7 @@ class QuerySetTest(unittest.TestCase): event3 = Event(title="Coltrane Motion @ Empty Bottle", date=datetime.now(), location=[41.900474, -87.686638]) - + event1.save() event2.save() event3.save() @@ -1552,24 +1552,24 @@ class QuerySetTest(unittest.TestCase): self.assertTrue(event2 not in events) self.assertTrue(event1 in events) self.assertTrue(event3 in events) - + # ensure ordering is respected by "near" events = Event.objects(location__near=[41.9120459, -87.67892]) events = events.order_by("-date") self.assertEqual(events.count(), 3) self.assertEqual(list(events), [event3, event1, event2]) - + # find events within 10 degrees of san francisco point_and_distance = [[37.7566023, -122.415579], 10] events = Event.objects(location__within_distance=point_and_distance) self.assertEqual(events.count(), 1) self.assertEqual(events[0], event2) - + # find events within 1 degree of greenpoint, broolyn, nyc, ny point_and_distance = [[40.7237134, -73.9509714], 1] events = Event.objects(location__within_distance=point_and_distance) self.assertEqual(events.count(), 0) - + # ensure ordering is respected by "within_distance" point_and_distance = [[41.9120459, -87.67892], 10] events = Event.objects(location__within_distance=point_and_distance) @@ -1582,7 +1582,7 @@ class QuerySetTest(unittest.TestCase): events = Event.objects(location__within_box=box) self.assertEqual(events.count(), 1) self.assertEqual(events[0].id, event2.id) - + Event.drop_collection() def test_spherical_geospatial_operators(self): @@ -1692,6 +1692,35 @@ class QuerySetTest(unittest.TestCase): Number.drop_collection() + def test_clone(self): + """Ensure that cloning clones complex querysets + """ + class Number(Document): + n = IntField() + + Number.drop_collection() + + for i in xrange(1, 101): + t = Number(n=i) + t.save() + + test = Number.objects + test2 = test.clone() + self.assertFalse(test == test2) + self.assertEqual(test.count(), test2.count()) + + test = test.filter(n__gt=11) + test2 = test.clone() + self.assertFalse(test == test2) + self.assertEqual(test.count(), test2.count()) + + test = test.limit(10) + test2 = test.clone() + self.assertFalse(test == test2) + self.assertEqual(test.count(), test2.count()) + + Number.drop_collection() + def test_unset_reference(self): class Comment(Document): text = StringField() @@ -1734,7 +1763,7 @@ class QTest(unittest.TestCase): query = {'age': {'$gte': 18}, 'name': 'test'} self.assertEqual((q1 & q2 & q3 & q4 & q5).to_query(Person), query) - + def test_q_with_dbref(self): """Ensure Q objects handle DBRefs correctly""" connect(db='mongoenginetest') @@ -1776,7 +1805,7 @@ class QTest(unittest.TestCase): query = Q(x__lt=100) & Q(y__ne='NotMyString') query &= Q(y__in=['a', 'b', 'c']) & Q(x__gt=-100) mongo_query = { - 'x': {'$lt': 100, '$gt': -100}, + 'x': {'$lt': 100, '$gt': -100}, 'y': {'$ne': 'NotMyString', '$in': ['a', 'b', 'c']}, } self.assertEqual(query.to_query(TestDoc), mongo_query) @@ -1850,6 +1879,30 @@ class QTest(unittest.TestCase): for condition in conditions: self.assertTrue(condition in query['$or']) + + def test_q_clone(self): + + class TestDoc(Document): + x = IntField() + + TestDoc.drop_collection() + for i in xrange(1, 101): + t = TestDoc(x=i) + t.save() + + # Check normal cases work without an error + test = TestDoc.objects(Q(x__lt=7) & Q(x__gt=3)) + + self.assertEqual(test.count(), 3) + + test2 = test.clone() + self.assertEqual(test2.count(), 3) + self.assertFalse(test2 == test) + + test2.filter(x=6) + self.assertEqual(test2.count(), 1) + self.assertEqual(test.count(), 3) + class QueryFieldListTest(unittest.TestCase): def test_empty(self): q = QueryFieldList() @@ -1904,8 +1957,5 @@ class QueryFieldListTest(unittest.TestCase): self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'b': True, 'c': True}) - - - if __name__ == '__main__': unittest.main() From 1a049ee49d3e5fc758f9c0d5268282599f9268dc Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 18 May 2011 11:06:14 +0100 Subject: [PATCH 053/214] Added regression test case for mongoengine/issues/155 --- tests/queryset.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/queryset.py b/tests/queryset.py index 25431782..6a87a1e6 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -105,6 +105,10 @@ class QuerySetTest(unittest.TestCase): people = list(self.Person.objects[1:1]) self.assertEqual(len(people), 0) + # Test slice out of range + people = list(self.Person.objects[80000:80001]) + self.assertEqual(len(people), 0) + def test_find_one(self): """Ensure that a query using find_one returns a valid result. """ From 1781c4638b0bc7b510c6c3cea27305fcb933941e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 18 May 2011 11:41:23 +0100 Subject: [PATCH 054/214] Changed how the connection identity key is made Uses the current thread identity as well as the process idenity to form the key. Fixes #151 --- mongoengine/connection.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 814fde13..fc6c7680 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -1,5 +1,6 @@ from pymongo import Connection import multiprocessing +import threading __all__ = ['ConnectionError', 'connect'] @@ -22,6 +23,8 @@ class ConnectionError(Exception): def _get_connection(reconnect=False): + """Handles the connection to the database + """ global _connection identity = get_identity() # Connect to the database if not already connected @@ -33,6 +36,9 @@ def _get_connection(reconnect=False): return _connection[identity] def _get_db(reconnect=False): + """Handles database connections and authentication based on the current + identity + """ global _db, _connection identity = get_identity() # Connect if not already connected @@ -52,12 +58,17 @@ def _get_db(reconnect=False): return _db[identity] def get_identity(): + """Creates an identity key based on the current process and thread + identity. + """ identity = multiprocessing.current_process()._identity identity = 0 if not identity else identity[0] + + identity = (identity, threading.current_thread().ident) return identity - + def connect(db, username=None, password=None, **kwargs): - """Connect to the database specified by the 'db' argument. Connection + """Connect to the database specified by the 'db' argument. Connection settings may be provided here as well if the database is not running on the default port on localhost. If authentication is needed, provide username and password arguments as well. From 7ba40062d3a8b661feba22db94711a472c14a172 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 18 May 2011 12:18:33 +0100 Subject: [PATCH 055/214] Fixes ordering with custom db field names Closes #125 --- mongoengine/queryset.py | 4 ++++ tests/queryset.py | 14 ++++++++++++++ 2 files changed, 18 insertions(+) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index e0c62132..3b37c3d7 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -939,6 +939,10 @@ class QuerySet(object): if key[0] in ('-', '+'): key = key[1:] key = key.replace('__', '.') + try: + key = QuerySet._translate_field_name(self._document, key) + except: + pass key_list.append((key, direction)) self._ordering = key_list diff --git a/tests/queryset.py b/tests/queryset.py index 6a87a1e6..48ce6272 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1746,6 +1746,20 @@ class QuerySetTest(unittest.TestCase): Comment.drop_collection() Post.drop_collection() + def test_order_works_with_custom_db_field_names(self): + class Number(Document): + n = IntField(db_field='number') + + Number.drop_collection() + + n2 = Number.objects.create(n=2) + n1 = Number.objects.create(n=1) + + self.assertEqual(list(Number.objects), [n2,n1]) + self.assertEqual(list(Number.objects.order_by('n')), [n1,n2]) + + Number.drop_collection() + class QTest(unittest.TestCase): From 5cbc76ea81c78a379d8be038437fdb49edcb7e13 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 18 May 2011 12:26:51 +0100 Subject: [PATCH 056/214] Pep8 --- tests/document.py | 68 +++++++++++++++++++++++------------------------ 1 file changed, 34 insertions(+), 34 deletions(-) diff --git a/tests/document.py b/tests/document.py index 0da7b93e..45f1c3c7 100644 --- a/tests/document.py +++ b/tests/document.py @@ -7,7 +7,7 @@ from mongoengine.connection import _get_db class DocumentTest(unittest.TestCase): - + def setUp(self): connect(db='mongoenginetest') self.db = _get_db() @@ -38,7 +38,7 @@ class DocumentTest(unittest.TestCase): name = name_field age = age_field non_field = True - + self.assertEqual(Person._fields['name'], name_field) self.assertEqual(Person._fields['age'], age_field) self.assertFalse('non_field' in Person._fields) @@ -60,7 +60,7 @@ class DocumentTest(unittest.TestCase): mammal_superclasses = {'Animal': Animal} self.assertEqual(Mammal._superclasses, mammal_superclasses) - + dog_superclasses = { 'Animal': Animal, 'Animal.Mammal': Mammal, @@ -68,7 +68,7 @@ class DocumentTest(unittest.TestCase): self.assertEqual(Dog._superclasses, dog_superclasses) def test_get_subclasses(self): - """Ensure that the correct list of subclasses is retrieved by the + """Ensure that the correct list of subclasses is retrieved by the _get_subclasses method. """ class Animal(Document): pass @@ -78,15 +78,15 @@ class DocumentTest(unittest.TestCase): class Dog(Mammal): pass mammal_subclasses = { - 'Animal.Mammal.Dog': Dog, + 'Animal.Mammal.Dog': Dog, 'Animal.Mammal.Human': Human } self.assertEqual(Mammal._get_subclasses(), mammal_subclasses) - + animal_subclasses = { 'Animal.Fish': Fish, 'Animal.Mammal': Mammal, - 'Animal.Mammal.Dog': Dog, + 'Animal.Mammal.Dog': Dog, 'Animal.Mammal.Human': Human } self.assertEqual(Animal._get_subclasses(), animal_subclasses) @@ -124,7 +124,7 @@ class DocumentTest(unittest.TestCase): self.assertTrue('name' in Employee._fields) self.assertTrue('salary' in Employee._fields) - self.assertEqual(Employee._meta['collection'], + self.assertEqual(Employee._meta['collection'], self.Person._meta['collection']) # Ensure that MRO error is not raised @@ -146,7 +146,7 @@ class DocumentTest(unittest.TestCase): class Dog(Animal): pass self.assertRaises(ValueError, create_dog_class) - + # Check that _cls etc aren't present on simple documents dog = Animal(name='dog') dog.save() @@ -161,7 +161,7 @@ class DocumentTest(unittest.TestCase): class Employee(self.Person): meta = {'allow_inheritance': False} self.assertRaises(ValueError, create_employee_class) - + # Test the same for embedded documents class Comment(EmbeddedDocument): content = StringField() @@ -186,7 +186,7 @@ class DocumentTest(unittest.TestCase): class Person(Document): name = StringField() meta = {'collection': collection} - + user = Person(name="Test User") user.save() self.assertTrue(collection in self.db.collection_names()) @@ -280,7 +280,7 @@ class DocumentTest(unittest.TestCase): tags = ListField(StringField()) meta = { 'indexes': [ - '-date', + '-date', 'tags', ('category', '-date') ], @@ -296,12 +296,12 @@ class DocumentTest(unittest.TestCase): list(BlogPost.objects) info = BlogPost.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)] + self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)] in info) self.assertTrue([('_types', 1), ('addDate', -1)] in info) # tags is a list field so it shouldn't have _types in the index self.assertTrue([('tags', 1)] in info) - + class ExtendedBlogPost(BlogPost): title = StringField() meta = {'indexes': ['title']} @@ -311,7 +311,7 @@ class DocumentTest(unittest.TestCase): list(ExtendedBlogPost.objects) info = ExtendedBlogPost.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)] + self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)] in info) self.assertTrue([('_types', 1), ('addDate', -1)] in info) self.assertTrue([('_types', 1), ('title', 1)] in info) @@ -380,7 +380,7 @@ class DocumentTest(unittest.TestCase): class EmailUser(User): email = StringField() - + user = User(username='test', name='test user') user.save() @@ -391,20 +391,20 @@ class DocumentTest(unittest.TestCase): user_son = User.objects._collection.find_one() self.assertEqual(user_son['_id'], 'test') self.assertTrue('username' not in user_son['_id']) - + User.drop_collection() - + user = User(pk='mongo', name='mongo user') user.save() - + user_obj = User.objects.first() self.assertEqual(user_obj.id, 'mongo') self.assertEqual(user_obj.pk, 'mongo') - + user_son = User.objects._collection.find_one() self.assertEqual(user_son['_id'], 'mongo') self.assertTrue('username' not in user_son['_id']) - + User.drop_collection() def test_creation(self): @@ -457,18 +457,18 @@ class DocumentTest(unittest.TestCase): """ class Comment(EmbeddedDocument): content = StringField() - + self.assertTrue('content' in Comment._fields) self.assertFalse('id' in Comment._fields) self.assertFalse('collection' in Comment._meta) - + def test_embedded_document_validation(self): """Ensure that embedded documents may be validated. """ class Comment(EmbeddedDocument): date = DateTimeField() content = StringField(required=True) - + comment = Comment() self.assertRaises(ValidationError, comment.validate) @@ -496,7 +496,7 @@ class DocumentTest(unittest.TestCase): # Test skipping validation on save class Recipient(Document): email = EmailField(required=True) - + recipient = Recipient(email='root@localhost') self.assertRaises(ValidationError, recipient.save) try: @@ -517,19 +517,19 @@ class DocumentTest(unittest.TestCase): """Ensure that a document may be saved with a custom _id. """ # Create person object and save it to the database - person = self.Person(name='Test User', age=30, + person = self.Person(name='Test User', age=30, id='497ce96f395f2f052a494fd4') person.save() # Ensure that the object is in the database with the correct _id collection = self.db[self.Person._meta['collection']] person_obj = collection.find_one({'name': 'Test User'}) self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') - + def test_save_custom_pk(self): """Ensure that a document may be saved with a custom _id using pk alias. """ # Create person object and save it to the database - person = self.Person(name='Test User', age=30, + person = self.Person(name='Test User', age=30, pk='497ce96f395f2f052a494fd4') person.save() # Ensure that the object is in the database with the correct _id @@ -565,7 +565,7 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() def test_save_embedded_document(self): - """Ensure that a document with an embedded document field may be + """Ensure that a document with an embedded document field may be saved in the database. """ class EmployeeDetails(EmbeddedDocument): @@ -591,7 +591,7 @@ class DocumentTest(unittest.TestCase): def test_save_reference(self): """Ensure that a document reference field may be saved in the database. """ - + class BlogPost(Document): meta = {'collection': 'blogpost_1'} content = StringField() @@ -610,7 +610,7 @@ class DocumentTest(unittest.TestCase): post_obj = BlogPost.objects.first() # Test laziness - self.assertTrue(isinstance(post_obj._data['author'], + self.assertTrue(isinstance(post_obj._data['author'], pymongo.dbref.DBRef)) self.assertTrue(isinstance(post_obj.author, self.Person)) self.assertEqual(post_obj.author.name, 'Test User') @@ -737,7 +737,7 @@ class DocumentTest(unittest.TestCase): class BlogPost(Document): pass - + # Clear old datas User.drop_collection() BlogPost.drop_collection() @@ -774,9 +774,9 @@ class DocumentTest(unittest.TestCase): # in Set all_user_set = set(User.objects.all()) - + self.assertTrue(u1 in all_user_set ) - + if __name__ == '__main__': unittest.main() From 7526272f84f2867f1619e1e947292fea3ec57b1c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 18 May 2011 12:27:33 +0100 Subject: [PATCH 057/214] Added test example of updating an embedded field Closes #139 --- tests/document.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/document.py b/tests/document.py index 45f1c3c7..6f9d9ecb 100644 --- a/tests/document.py +++ b/tests/document.py @@ -588,6 +588,34 @@ class DocumentTest(unittest.TestCase): # Ensure that the 'details' embedded object saved correctly self.assertEqual(employee_obj['details']['position'], 'Developer') + def test_updating_an_embedded_document(self): + """Ensure that a document with an embedded document field may be + saved in the database. + """ + class EmployeeDetails(EmbeddedDocument): + position = StringField() + + class Employee(self.Person): + salary = IntField() + details = EmbeddedDocumentField(EmployeeDetails) + + # Create employee object and save it to the database + employee = Employee(name='Test Employee', age=50, salary=20000) + employee.details = EmployeeDetails(position='Developer') + employee.save() + + # Test updating an embedded document + promoted_employee = Employee.objects.get(name='Test Employee') + promoted_employee.details.position = 'Senior Developer' + promoted_employee.save() + + collection = self.db[self.Person._meta['collection']] + employee_obj = collection.find_one({'name': 'Test Employee'}) + self.assertEqual(employee_obj['name'], 'Test Employee') + self.assertEqual(employee_obj['age'], 50) + # Ensure that the 'details' embedded object saved correctly + self.assertEqual(employee_obj['details']['position'], 'Senior Developer') + def test_save_reference(self): """Ensure that a document reference field may be saved in the database. """ From 5d5a84dbcf1fcd2bcd603dd417c3c2c75e1d6252 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 18 May 2011 16:24:35 +0100 Subject: [PATCH 058/214] Spacing issue cleaned up --- mongoengine/queryset.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 3b37c3d7..58ea61c6 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -346,18 +346,18 @@ class QuerySet(object): self._skip = None def clone(self): - """Creates a copy of the current :class:`~mongoengine.queryset.QuerySet`""" - c = self.__class__(self._document, self._collection_obj) + """Creates a copy of the current :class:`~mongoengine.queryset.QuerySet`""" + c = self.__class__(self._document, self._collection_obj) - copy_props = ('_initial_query', '_query_obj', '_where_clause', + copy_props = ('_initial_query', '_query_obj', '_where_clause', '_loaded_fields', '_ordering', '_snapshot', '_timeout', '_limit', '_skip') - for prop in copy_props: - val = getattr(self, prop) - setattr(c, prop, copy.deepcopy(val)) + for prop in copy_props: + val = getattr(self, prop) + setattr(c, prop, copy.deepcopy(val)) - return c + return c @property def _query(self): From 371dbf009fe6f0637a758d8cdb07d3be4e311f51 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 18 May 2011 16:39:19 +0100 Subject: [PATCH 059/214] Updated QuerySet to allow more granular fields control. Added a fields method and tests showing the retrival of subranges of List Fields. Refs #167 --- mongoengine/queryset.py | 58 +++++++++++++++++++++++++------------ tests/queryset.py | 64 ++++++++++++++++++++++++++++++++--------- 2 files changed, 90 insertions(+), 32 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 58ea61c6..54d7643d 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -8,6 +8,7 @@ import pymongo.objectid import re import copy import itertools +import operator __all__ = ['queryset_manager', 'Q', 'InvalidQueryError', 'InvalidCollectionError', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY'] @@ -280,30 +281,30 @@ class QueryFieldList(object): ONLY = True EXCLUDE = False - def __init__(self, fields=[], direction=ONLY, always_include=[]): - self.direction = direction + def __init__(self, fields=[], value=ONLY, always_include=[]): + self.value = value self.fields = set(fields) self.always_include = set(always_include) def as_dict(self): - return dict((field, self.direction) for field in self.fields) + return dict((field, self.value) for field in self.fields) def __add__(self, f): if not self.fields: self.fields = f.fields - self.direction = f.direction - elif self.direction is self.ONLY and f.direction is self.ONLY: + self.value = f.value + elif self.value is self.ONLY and f.value is self.ONLY: self.fields = self.fields.intersection(f.fields) - elif self.direction is self.EXCLUDE and f.direction is self.EXCLUDE: + elif self.value is self.EXCLUDE and f.value is self.EXCLUDE: self.fields = self.fields.union(f.fields) - elif self.direction is self.ONLY and f.direction is self.EXCLUDE: + elif self.value is self.ONLY and f.value is self.EXCLUDE: self.fields -= f.fields - elif self.direction is self.EXCLUDE and f.direction is self.ONLY: - self.direction = self.ONLY + elif self.value is self.EXCLUDE and f.value is self.ONLY: + self.value = self.ONLY self.fields = f.fields - self.fields if self.always_include: - if self.direction is self.ONLY and self.fields: + if self.value is self.ONLY and self.fields: self.fields = self.fields.union(self.always_include) else: self.fields -= self.always_include @@ -311,7 +312,7 @@ class QueryFieldList(object): def reset(self): self.fields = set([]) - self.direction = self.ONLY + self.value = self.ONLY def __nonzero__(self): return bool(self.fields) @@ -890,10 +891,8 @@ class QuerySet(object): .. versionadded:: 0.3 """ - fields = self._fields_to_dbfields(fields) - self._loaded_fields += QueryFieldList(fields, direction=QueryFieldList.ONLY) - return self - + fields = dict([(f, QueryFieldList.ONLY) for f in fields]) + return self.fields(**fields) def exclude(self, *fields): """Opposite to .only(), exclude some document's fields. :: @@ -902,8 +901,31 @@ class QuerySet(object): :param fields: fields to exclude """ - fields = self._fields_to_dbfields(fields) - self._loaded_fields += QueryFieldList(fields, direction=QueryFieldList.EXCLUDE) + fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields]) + return self.fields(**fields) + + def fields(self, **kwargs): + """Manipulate how you load this document's fields. Used by `.only()` + and `.exclude()` to manipulate which fields to retrieve. Fields also + allows for a greater level of control for example: + + Retrieving a Subrange of Array Elements + --------------------------------------- + + You can use the $slice operator to retrieve a subrange of elements in + an array :: + + post = BlogPost.objects(...).fields(comments={"$slice": 5}) // first 5 comments + + :param kwargs: A dictionary identifying what to include + + .. versionadded:: 0.5 + """ + fields = sorted(kwargs.iteritems(), key=operator.itemgetter(1)) + for value, group in itertools.groupby(fields, lambda x: x[1]): + fields = [field for field, value in group] + fields = self._fields_to_dbfields(fields) + self._loaded_fields += QueryFieldList(fields, value=value) return self def all_fields(self): @@ -1277,7 +1299,7 @@ class QuerySetManager(object): # Create collection as a capped collection if specified if owner._meta['max_size'] or owner._meta['max_documents']: # Get max document limit and max byte size from meta - max_size = owner._meta['max_size'] or 10000000 # 10MB default + max_size = owner._meta['max_size'] or 10000000 # 10MB default max_documents = owner._meta['max_documents'] if collection in db.collection_names(): diff --git a/tests/queryset.py b/tests/queryset.py index 48ce6272..1961d7cf 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -597,6 +597,38 @@ class QuerySetTest(unittest.TestCase): Email.drop_collection() + def test_custom_fields(self): + """Ensure that query slicing an array works. + """ + + class Numbers(Document): + n = ListField(IntField()) + + Numbers.drop_collection() + + numbers = Numbers(n=[0,1,2,3,4,5,-5,-4,-3,-2,-1]) + numbers.save() + + # first three + numbers = Numbers.objects.fields(n={"$slice": 3}).get() + self.assertEquals(numbers.n, [0, 1, 2]) + + # last three + numbers = Numbers.objects.fields(n={"$slice": -3}).get() + self.assertEquals(numbers.n, [-3, -2, -1]) + + # skip 2, limit 3 + numbers = Numbers.objects.fields(n={"$slice": [2, 3]}).get() + self.assertEquals(numbers.n, [2, 3, 4]) + + # skip to fifth from last, limit 4 + numbers = Numbers.objects.fields(n={"$slice": [-5, 4]}).get() + self.assertEquals(numbers.n, [-5, -4, -3, -2]) + + # skip to fifth from last, limit 10 + numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get() + self.assertEquals(numbers.n, [-5, -4, -3, -2, -1]) + def test_find_embedded(self): """Ensure that an embedded document is properly returned from a query. """ @@ -1931,49 +1963,53 @@ class QueryFieldListTest(unittest.TestCase): def test_include_include(self): q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], direction=QueryFieldList.ONLY) + q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'a': True, 'b': True}) - q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.ONLY) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'b': True}) def test_include_exclude(self): q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], direction=QueryFieldList.ONLY) + q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'a': True, 'b': True}) - q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) self.assertEqual(q.as_dict(), {'a': True}) def test_exclude_exclude(self): q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], direction=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) self.assertEqual(q.as_dict(), {'a': False, 'b': False}) - q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) self.assertEqual(q.as_dict(), {'a': False, 'b': False, 'c': False}) def test_exclude_include(self): q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], direction=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) self.assertEqual(q.as_dict(), {'a': False, 'b': False}) - q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.ONLY) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'c': True}) def test_always_include(self): q = QueryFieldList(always_include=['x', 'y']) - q += QueryFieldList(fields=['a', 'b', 'x'], direction=QueryFieldList.EXCLUDE) - q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.ONLY) + q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True}) - def test_reset(self): q = QueryFieldList(always_include=['x', 'y']) - q += QueryFieldList(fields=['a', 'b', 'x'], direction=QueryFieldList.EXCLUDE) - q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.ONLY) + q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True}) q.reset() self.assertFalse(q) - q += QueryFieldList(fields=['b', 'c'], direction=QueryFieldList.ONLY) + q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'b': True, 'c': True}) + def test_using_a_slice(self): + q = QueryFieldList() + q += QueryFieldList(fields=['a'], value={"$slice": 5}) + self.assertEqual(q.as_dict(), {'a': {"$slice": 5}}) + if __name__ == '__main__': unittest.main() From fc2aff342bed0bdc764af7d4dc96850161477149 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 18 May 2011 17:37:41 +0100 Subject: [PATCH 060/214] Unique indexes are created before user declared indexes This ensures that indexes are created with the unique flag, if a user declares the index, that would automatically be declared by the `unique_indexes` logic. Thanks to btubbs for the test case. Fixes #129 --- mongoengine/queryset.py | 10 +++++----- tests/document.py | 23 +++++++++++++++++++++++ 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 58ea61c6..6da11fa7 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -459,17 +459,17 @@ class QuerySet(object): drop_dups = self._document._meta.get('index_drop_dups', False) index_opts = self._document._meta.get('index_options', {}) + # Ensure indexes created by uniqueness constraints + for index in self._document._meta['unique_indexes']: + self._collection.ensure_index(index, unique=True, + background=background, drop_dups=drop_dups, **index_opts) + # Ensure document-defined indexes are created if self._document._meta['indexes']: for key_or_list in self._document._meta['indexes']: self._collection.ensure_index(key_or_list, background=background, **index_opts) - # Ensure indexes created by uniqueness constraints - for index in self._document._meta['unique_indexes']: - self._collection.ensure_index(index, unique=True, - background=background, drop_dups=drop_dups, **index_opts) - # If _types is being used (for polymorphism), it needs an index if '_types' in self._query: self._collection.ensure_index('_types', diff --git a/tests/document.py b/tests/document.py index 6f9d9ecb..66efdf98 100644 --- a/tests/document.py +++ b/tests/document.py @@ -357,6 +357,29 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() + def test_unique_and_indexes(self): + """Ensure that 'unique' constraints aren't overridden by + meta.indexes. + """ + class Customer(Document): + cust_id = IntField(unique=True, required=True) + meta = { + 'indexes': ['cust_id'], + 'allow_inheritance': False, + } + + Customer.drop_collection() + cust = Customer(cust_id=1) + cust.save() + + cust_dupe = Customer(cust_id=1) + try: + cust_dupe.save() + raise AssertionError, "We saved a dupe!" + except OperationError: + pass + Customer.drop_collection() + def test_custom_id_field(self): """Ensure that documents may be created with custom primary keys. """ From 95c2643f63558ccc2a707fa425f670b597f4e2a2 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 18 May 2011 20:31:28 +0100 Subject: [PATCH 061/214] Added test showing primary=True behaviour. If you set a field as primary, then unexpected behaviour can occur. You won't create a duplicate but you will update an existing document. Closes #138 --- tests/document.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/tests/document.py b/tests/document.py index 66efdf98..dee0b712 100644 --- a/tests/document.py +++ b/tests/document.py @@ -380,6 +380,28 @@ class DocumentTest(unittest.TestCase): pass Customer.drop_collection() + def test_unique_and_primary(self): + """If you set a field as primary, then unexpected behaviour can occur. + You won't create a duplicate but you will update an existing document. + """ + + class User(Document): + name = StringField(primary_key=True, unique=True) + password = StringField() + + User.drop_collection() + + user = User(name='huangz', password='secret') + user.save() + + user = User(name='huangz', password='secret2') + user.save() + + self.assertEqual(User.objects.count(), 1) + self.assertEqual(User.objects.get().password, 'secret2') + + User.drop_collection() + def test_custom_id_field(self): """Ensure that documents may be created with custom primary keys. """ From fb61c9a765d0605a27f720d7a24d43aa580e109b Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 19 May 2011 09:55:34 +0100 Subject: [PATCH 062/214] Regression test for mysterious uniqueness constraint when inserting into mongoengine Closes #143 Thanks to tfausak for the test case. --- tests/document.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/document.py b/tests/document.py index dee0b712..f6d8ea49 100644 --- a/tests/document.py +++ b/tests/document.py @@ -798,6 +798,25 @@ class DocumentTest(unittest.TestCase): self.Person.drop_collection() BlogPost.drop_collection() + def subclasses_and_unique_keys_works(self): + + class A(Document): + pass + + class B(A): + foo = BooleanField(unique=True) + + A.drop_collection() + B.drop_collection() + + A().save() + A().save() + B(foo=True).save() + + self.assertEquals(A.objects.count(), 2) + self.assertEquals(B.objects.count(), 1) + A.drop_collection() + B.drop_collection() def tearDown(self): self.Person.drop_collection() From da8a057edecb5e3246e7ad14bdd5eb7f08363ed1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 19 May 2011 12:41:38 +0100 Subject: [PATCH 063/214] Added test showing documents can be pickled Refs #135 --- tests/document.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/tests/document.py b/tests/document.py index f6d8ea49..84d0068b 100644 --- a/tests/document.py +++ b/tests/document.py @@ -1,11 +1,23 @@ import unittest from datetime import datetime import pymongo +import pickle from mongoengine import * +from mongoengine.base import BaseField from mongoengine.connection import _get_db +class PickleEmbedded(EmbeddedDocument): + date = DateTimeField(default=datetime.now) + +class PickleTest(Document): + number = IntField() + string = StringField() + embedded = EmbeddedDocumentField(PickleEmbedded) + lists = ListField(StringField()) + + class DocumentTest(unittest.TestCase): def setUp(self): @@ -869,6 +881,23 @@ class DocumentTest(unittest.TestCase): self.assertTrue(u1 in all_user_set ) + def test_picklable(self): + + pickle_doc = PickleTest(number=1, string="OH HAI", lists=['1', '2']) + pickle_doc.embedded = PickleEmbedded() + pickle_doc.save() + + pickled_doc = pickle.dumps(pickle_doc) + resurrected = pickle.loads(pickled_doc) + + self.assertEquals(resurrected, pickle_doc) + + resurrected.string = "Working" + resurrected.save() + + pickle_doc.reload() + self.assertEquals(resurrected, pickle_doc) + if __name__ == '__main__': unittest.main() From b3251818cc3b55e7dab1dc7e45ab2f8be82f7269 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 19 May 2011 13:04:14 +0100 Subject: [PATCH 064/214] Added regression test for custom queryset ordering Closes #126 --- tests/queryset.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/queryset.py b/tests/queryset.py index 48ce6272..51224ea0 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1294,6 +1294,7 @@ class QuerySetTest(unittest.TestCase): class BlogPost(Document): tags = ListField(StringField()) deleted = BooleanField(default=False) + date = DateTimeField(default=datetime.now) @queryset_manager def objects(doc_cls, queryset): @@ -1301,7 +1302,7 @@ class QuerySetTest(unittest.TestCase): @queryset_manager def music_posts(doc_cls, queryset): - return queryset(tags='music', deleted=False) + return queryset(tags='music', deleted=False).order_by('-date') BlogPost.drop_collection() @@ -1317,7 +1318,7 @@ class QuerySetTest(unittest.TestCase): self.assertEqual([p.id for p in BlogPost.objects], [post1.id, post2.id, post3.id]) self.assertEqual([p.id for p in BlogPost.music_posts], - [post1.id, post2.id]) + [post2.id, post1.id]) BlogPost.drop_collection() From 40b69baa2991f750bdf879058ddb2d1e6a9a0c05 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 19 May 2011 16:49:00 +0100 Subject: [PATCH 065/214] Implementing Write Concern Added write_options dict to save, update, update_one and get_or_create. Thanks to justquick for the initial ticket and code. Refs #132 --- mongoengine/document.py | 38 ++++++++++++++++++++++++-------------- mongoengine/queryset.py | 32 +++++++++++++++++++++++--------- tests/document.py | 20 ++++++++++++++++++++ 3 files changed, 67 insertions(+), 23 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 196662c3..771b9229 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -40,44 +40,54 @@ class Document(BaseDocument): presence of `_cls` and `_types`, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` dictionary. - A :class:`~mongoengine.Document` may use a **Capped Collection** by + A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary. :attr:`max_documents` is the maximum number of documents that - is allowed to be stored in the collection, and :attr:`max_size` is the - maximum size of the collection in bytes. If :attr:`max_size` is not - specified and :attr:`max_documents` is, :attr:`max_size` defaults to + is allowed to be stored in the collection, and :attr:`max_size` is the + maximum size of the collection in bytes. If :attr:`max_size` is not + specified and :attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB). Indexes may be created by specifying :attr:`indexes` in the :attr:`meta` - dictionary. The value should be a list of field names or tuples of field + dictionary. The value should be a list of field names or tuples of field names. Index direction may be specified by prefixing the field names with a **+** or **-** sign. """ __metaclass__ = TopLevelDocumentMetaclass - def save(self, safe=True, force_insert=False, validate=True): + def save(self, safe=True, force_insert=False, validate=True, write_options=None): """Save the :class:`~mongoengine.Document` to the database. If the document already exists, it will be updated, otherwise it will be created. - If ``safe=True`` and the operation is unsuccessful, an + If ``safe=True`` and the operation is unsuccessful, an :class:`~mongoengine.OperationError` will be raised. :param safe: check if the operation succeeded before returning - :param force_insert: only try to create a new document, don't allow + :param force_insert: only try to create a new document, don't allow updates of existing documents :param validate: validates the document; set to ``False`` to skip. + :param write_options: Extra keyword arguments are passed down to + :meth:`~pymongo.collection.Collection.save` OR + :meth:`~pymongo.collection.Collection.insert` + which will be used as options for the resultant ``getLastError`` command. + For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers + have recorded the write and will force an fsync on each server being written to. """ if validate: self.validate() + + if not write_options: + write_options = {} + doc = self.to_mongo() try: collection = self.__class__.objects._collection if force_insert: - object_id = collection.insert(doc, safe=safe) + object_id = collection.insert(doc, safe=safe, **write_options) else: - object_id = collection.save(doc, safe=safe) + object_id = collection.save(doc, safe=safe, **write_options) except pymongo.errors.OperationFailure, err: message = 'Could not save document (%s)' if u'duplicate key' in unicode(err): @@ -131,9 +141,9 @@ class MapReduceDocument(object): """A document returned from a map/reduce query. :param collection: An instance of :class:`~pymongo.Collection` - :param key: Document/result key, often an instance of - :class:`~pymongo.objectid.ObjectId`. If supplied as - an ``ObjectId`` found in the given ``collection``, + :param key: Document/result key, often an instance of + :class:`~pymongo.objectid.ObjectId`. If supplied as + an ``ObjectId`` found in the given ``collection``, the object can be accessed via the ``object`` property. :param value: The result(s) for this key. @@ -148,7 +158,7 @@ class MapReduceDocument(object): @property def object(self): - """Lazy-load the object referenced by ``self.key``. ``self.key`` + """Lazy-load the object referenced by ``self.key``. ``self.key`` should be the ``primary_key``. """ id_field = self._document()._meta['id_field'] diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 6da11fa7..683aac50 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -643,7 +643,7 @@ class QuerySet(object): raise self._document.DoesNotExist("%s matching query does not exist." % self._document._class_name) - def get_or_create(self, *q_objs, **query): + def get_or_create(self, write_options=None, *q_objs, **query): """Retrieve unique object or create, if it doesn't exist. Returns a tuple of ``(object, created)``, where ``object`` is the retrieved or created object and ``created`` is a boolean specifying whether a new object was created. Raises @@ -653,6 +653,10 @@ class QuerySet(object): dictionary of default values for the new document may be provided as a keyword argument called :attr:`defaults`. + :param write_options: optional extra keyword arguments used if we + have to create a new document. + Passes any write_options onto :meth:`~mongoengine.document.Document.save` + .. versionadded:: 0.3 """ defaults = query.get('defaults', {}) @@ -664,7 +668,7 @@ class QuerySet(object): if count == 0: query.update(defaults) doc = self._document(**query) - doc.save() + doc.save(write_options=write_options) return doc, True elif count == 1: return self.first(), False @@ -1055,22 +1059,27 @@ class QuerySet(object): return mongo_update - def update(self, safe_update=True, upsert=False, **update): + def update(self, safe_update=True, upsert=False, write_options=None, **update): """Perform an atomic update on the fields matched by the query. When ``safe_update`` is used, the number of affected documents is returned. - :param safe: check if the operation succeeded before returning - :param update: Django-style update keyword arguments + :param safe_update: check if the operation succeeded before returning + :param upsert: Any existing document with that "_id" is overwritten. + :param write_options: extra keyword arguments for :meth:`~pymongo.collection.Collection.update` .. versionadded:: 0.2 """ if pymongo.version < '1.1.1': raise OperationError('update() method requires PyMongo 1.1.1+') + if not write_options: + write_options = {} + update = QuerySet._transform_update(self._document, **update) try: ret = self._collection.update(self._query, update, multi=True, - upsert=upsert, safe=safe_update) + upsert=upsert, safe=safe_update, + **write_options) if ret is not None and 'n' in ret: return ret['n'] except pymongo.errors.OperationFailure, err: @@ -1079,22 +1088,27 @@ class QuerySet(object): raise OperationError(message) raise OperationError(u'Update failed (%s)' % unicode(err)) - def update_one(self, safe_update=True, upsert=False, **update): + def update_one(self, safe_update=True, upsert=False, write_options=None, **update): """Perform an atomic update on first field matched by the query. When ``safe_update`` is used, the number of affected documents is returned. - :param safe: check if the operation succeeded before returning + :param safe_update: check if the operation succeeded before returning + :param upsert: Any existing document with that "_id" is overwritten. + :param write_options: extra keyword arguments for :meth:`~pymongo.collection.Collection.update` :param update: Django-style update keyword arguments .. versionadded:: 0.2 """ + if not write_options: + write_options = {} update = QuerySet._transform_update(self._document, **update) try: # Explicitly provide 'multi=False' to newer versions of PyMongo # as the default may change to 'True' if pymongo.version >= '1.1.1': ret = self._collection.update(self._query, update, multi=False, - upsert=upsert, safe=safe_update) + upsert=upsert, safe=safe_update, + **write_options) else: # Older versions of PyMongo don't support 'multi' ret = self._collection.update(self._query, update, diff --git a/tests/document.py b/tests/document.py index 84d0068b..cef6e8c1 100644 --- a/tests/document.py +++ b/tests/document.py @@ -898,6 +898,26 @@ class DocumentTest(unittest.TestCase): pickle_doc.reload() self.assertEquals(resurrected, pickle_doc) + def test_write_options(self): + """Test that passing write_options works""" + + self.Person.drop_collection() + + write_options = {"fsync": True} + + author, created = self.Person.objects.get_or_create( + name='Test User', write_options=write_options) + author.save(write_options=write_options) + + self.Person.objects.update(set__name='Ross', write_options=write_options) + + author = self.Person.objects.first() + self.assertEquals(author.name, 'Ross') + + self.Person.objects.update_one(set__name='Test User', write_options=write_options) + author = self.Person.objects.first() + self.assertEquals(author.name, 'Test User') + if __name__ == '__main__': unittest.main() From 08d1689268c846118a4f6d07772ce6f6b29649a6 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 20 May 2011 09:47:41 +0100 Subject: [PATCH 066/214] Updated to handle the converntional api style for slicing a field Added testcase to demonstrate embedded slicing as well. Refs #167 --- mongoengine/queryset.py | 19 +++++++++++--- tests/queryset.py | 55 ++++++++++++++++++++++++++++++++++++----- 2 files changed, 65 insertions(+), 9 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 54d7643d..8469e715 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -548,7 +548,7 @@ class QuerySet(object): return '.'.join(parts) @classmethod - def _transform_query(cls, _doc_cls=None, **query): + def _transform_query(cls, _doc_cls=None, _field_operation=False, **query): """Transform a query from Django-style format to Mongo format. """ operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', @@ -915,13 +915,26 @@ class QuerySet(object): You can use the $slice operator to retrieve a subrange of elements in an array :: - post = BlogPost.objects(...).fields(comments={"$slice": 5}) // first 5 comments + post = BlogPost.objects(...).fields(slice__comments=5) // first 5 comments :param kwargs: A dictionary identifying what to include .. versionadded:: 0.5 """ - fields = sorted(kwargs.iteritems(), key=operator.itemgetter(1)) + + # Check for an operator and transform to mongo-style if there is + operators = ["slice"] + cleaned_fields = [] + for key, value in kwargs.items(): + parts = key.split('__') + op = None + if parts[0] in operators: + op = parts.pop(0) + value = {'$' + op: value} + key = '.'.join(parts) + cleaned_fields.append((key, value)) + + fields = sorted(cleaned_fields, key=operator.itemgetter(1)) for value, group in itertools.groupby(fields, lambda x: x[1]): fields = [field for field, value in group] fields = self._fields_to_dbfields(fields) diff --git a/tests/queryset.py b/tests/queryset.py index 1961d7cf..e29a6d9d 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -597,10 +597,9 @@ class QuerySetTest(unittest.TestCase): Email.drop_collection() - def test_custom_fields(self): + def test_slicing_fields(self): """Ensure that query slicing an array works. """ - class Numbers(Document): n = ListField(IntField()) @@ -610,25 +609,69 @@ class QuerySetTest(unittest.TestCase): numbers.save() # first three - numbers = Numbers.objects.fields(n={"$slice": 3}).get() + numbers = Numbers.objects.fields(slice__n=3).get() self.assertEquals(numbers.n, [0, 1, 2]) # last three - numbers = Numbers.objects.fields(n={"$slice": -3}).get() + numbers = Numbers.objects.fields(slice__n=-3).get() self.assertEquals(numbers.n, [-3, -2, -1]) # skip 2, limit 3 - numbers = Numbers.objects.fields(n={"$slice": [2, 3]}).get() + numbers = Numbers.objects.fields(slice__n=[2, 3]).get() self.assertEquals(numbers.n, [2, 3, 4]) # skip to fifth from last, limit 4 - numbers = Numbers.objects.fields(n={"$slice": [-5, 4]}).get() + numbers = Numbers.objects.fields(slice__n=[-5, 4]).get() self.assertEquals(numbers.n, [-5, -4, -3, -2]) # skip to fifth from last, limit 10 + numbers = Numbers.objects.fields(slice__n=[-5, 10]).get() + self.assertEquals(numbers.n, [-5, -4, -3, -2, -1]) + + # skip to fifth from last, limit 10 dict method numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get() self.assertEquals(numbers.n, [-5, -4, -3, -2, -1]) + def test_slicing_nested_fields(self): + """Ensure that query slicing an embedded array works. + """ + + class EmbeddedNumber(EmbeddedDocument): + n = ListField(IntField()) + + class Numbers(Document): + embedded = EmbeddedDocumentField(EmbeddedNumber) + + Numbers.drop_collection() + + numbers = Numbers() + numbers.embedded = EmbeddedNumber(n=[0,1,2,3,4,5,-5,-4,-3,-2,-1]) + numbers.save() + + # first three + numbers = Numbers.objects.fields(slice__embedded__n=3).get() + self.assertEquals(numbers.embedded.n, [0, 1, 2]) + + # last three + numbers = Numbers.objects.fields(slice__embedded__n=-3).get() + self.assertEquals(numbers.embedded.n, [-3, -2, -1]) + + # skip 2, limit 3 + numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get() + self.assertEquals(numbers.embedded.n, [2, 3, 4]) + + # skip to fifth from last, limit 4 + numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get() + self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2]) + + # skip to fifth from last, limit 10 + numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get() + self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2, -1]) + + # skip to fifth from last, limit 10 dict method + numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() + self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2, -1]) + def test_find_embedded(self): """Ensure that an embedded document is properly returned from a query. """ From 9260ff9e83365a13bd75334c60d3eb33d2fdf5ef Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 20 May 2011 10:22:22 +0100 Subject: [PATCH 067/214] Updated docs and added a NotRegistered exception For handling GenericReferences that reference documents that haven't been imported. Closes #170 --- mongoengine/base.py | 38 ++++++++++++++--------- mongoengine/fields.py | 7 +++-- tests/fields.py | 70 ++++++++++++++++++++++++++++++++----------- 3 files changed, 81 insertions(+), 34 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 9d0b8231..ede90835 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -7,22 +7,32 @@ import pymongo import pymongo.objectid -_document_registry = {} - -def get_document(name): - return _document_registry[name] +class NotRegistered(Exception): + pass class ValidationError(Exception): pass +_document_registry = {} + +def get_document(name): + if name not in _document_registry: + raise NotRegistered(""" + `%s` has not been registered in the document registry. + Importing the document class automatically registers it, has it + been imported? + """.strip() % name) + return _document_registry[name] + + class BaseField(object): """A base class for fields in a MongoDB document. Instances of this class may be added to subclasses of `Document` to define a document's schema. """ - # Fields may have _types inserted into indexes by default + # Fields may have _types inserted into indexes by default _index_with_types = True _geo_index = False @@ -32,7 +42,7 @@ class BaseField(object): creation_counter = 0 auto_creation_counter = -1 - def __init__(self, db_field=None, name=None, required=False, default=None, + def __init__(self, db_field=None, name=None, required=False, default=None, unique=False, unique_with=None, primary_key=False, validation=None, choices=None): self.db_field = (db_field or name) if not primary_key else '_id' @@ -57,7 +67,7 @@ class BaseField(object): BaseField.creation_counter += 1 def __get__(self, instance, owner): - """Descriptor for retrieving a value from a field in a document. Do + """Descriptor for retrieving a value from a field in a document. Do any necessary conversion between Python and MongoDB types. """ if instance is None: @@ -167,8 +177,8 @@ class DocumentMetaclass(type): superclasses.update(base._superclasses) if hasattr(base, '_meta'): - # Ensure that the Document class may be subclassed - - # inheritance may be disabled to remove dependency on + # Ensure that the Document class may be subclassed - + # inheritance may be disabled to remove dependency on # additional fields _cls and _types if base._meta.get('allow_inheritance', True) == False: raise ValueError('Document %s may not be subclassed' % @@ -211,12 +221,12 @@ class DocumentMetaclass(type): module = attrs.get('__module__') - base_excs = tuple(base.DoesNotExist for base in bases + base_excs = tuple(base.DoesNotExist for base in bases if hasattr(base, 'DoesNotExist')) or (DoesNotExist,) exc = subclass_exception('DoesNotExist', base_excs, module) new_class.add_to_class('DoesNotExist', exc) - base_excs = tuple(base.MultipleObjectsReturned for base in bases + base_excs = tuple(base.MultipleObjectsReturned for base in bases if hasattr(base, 'MultipleObjectsReturned')) base_excs = base_excs or (MultipleObjectsReturned,) exc = subclass_exception('MultipleObjectsReturned', base_excs, module) @@ -238,9 +248,9 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): def __new__(cls, name, bases, attrs): super_new = super(TopLevelDocumentMetaclass, cls).__new__ - # Classes defined in this package are abstract and should not have + # Classes defined in this package are abstract and should not have # their own metadata with DB collection, etc. - # __metaclass__ is only set on the class with the __metaclass__ + # __metaclass__ is only set on the class with the __metaclass__ # attribute (i.e. it is not set on subclasses). This differentiates # 'real' documents from the 'Document' class if attrs.get('__metaclass__') == TopLevelDocumentMetaclass: @@ -366,7 +376,7 @@ class BaseDocument(object): are present. """ # Get a list of tuples of field names and their current values - fields = [(field, getattr(self, name)) + fields = [(field, getattr(self, name)) for name, field in self._fields.items()] # Ensure that each field is matched to a valid value diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 527eb158..0cc8219b 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -339,7 +339,7 @@ class ListField(BaseField): if isinstance(self.field, ReferenceField): referenced_type = self.field.document_type - # Get value from document instance if available + # Get value from document instance if available value_list = instance._data.get(self.name) if value_list: deref_list = [] @@ -522,6 +522,9 @@ class GenericReferenceField(BaseField): """A reference to *any* :class:`~mongoengine.document.Document` subclass that will be automatically dereferenced on access (lazily). + note: Any documents used as a generic reference must be registered in the + document registry. Importing the model will automatically register it. + .. versionadded:: 0.3 """ @@ -648,7 +651,7 @@ class GridFSProxy(object): if not self.newfile: self.new_file() self.grid_id = self.newfile._id - self.newfile.writelines(lines) + self.newfile.writelines(lines) def read(self, size=-1): try: diff --git a/tests/fields.py b/tests/fields.py index c8671873..38409b6a 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -7,6 +7,7 @@ import gridfs from mongoengine import * from mongoengine.connection import _get_db +from mongoengine.base import _document_registry, NotRegistered class FieldTest(unittest.TestCase): @@ -45,7 +46,7 @@ class FieldTest(unittest.TestCase): """ class Person(Document): name = StringField() - + person = Person(name='Test User') self.assertEqual(person.id, None) @@ -95,7 +96,7 @@ class FieldTest(unittest.TestCase): link.url = 'http://www.google.com:8080' link.validate() - + def test_int_validation(self): """Ensure that invalid values cannot be assigned to int fields. """ @@ -129,12 +130,12 @@ class FieldTest(unittest.TestCase): self.assertRaises(ValidationError, person.validate) person.height = 4.0 self.assertRaises(ValidationError, person.validate) - + def test_decimal_validation(self): """Ensure that invalid values cannot be assigned to decimal fields. """ class Person(Document): - height = DecimalField(min_value=Decimal('0.1'), + height = DecimalField(min_value=Decimal('0.1'), max_value=Decimal('3.5')) Person.drop_collection() @@ -249,7 +250,7 @@ class FieldTest(unittest.TestCase): post.save() post.reload() self.assertEqual(post.tags, ['fun', 'leisure']) - + comment1 = Comment(content='Good for you', order=1) comment2 = Comment(content='Yay.', order=0) comments = [comment1, comment2] @@ -315,7 +316,7 @@ class FieldTest(unittest.TestCase): person.validate() def test_embedded_document_inheritance(self): - """Ensure that subclasses of embedded documents may be provided to + """Ensure that subclasses of embedded documents may be provided to EmbeddedDocumentFields of the superclass' type. """ class User(EmbeddedDocument): @@ -327,7 +328,7 @@ class FieldTest(unittest.TestCase): class BlogPost(Document): content = StringField() author = EmbeddedDocumentField(User) - + post = BlogPost(content='What I did today...') post.author = User(name='Test User') post.author = PowerUser(name='Test User', power=47) @@ -370,7 +371,7 @@ class FieldTest(unittest.TestCase): User.drop_collection() BlogPost.drop_collection() - + def test_list_item_dereference(self): """Ensure that DBRef items in ListFields are dereferenced. """ @@ -434,7 +435,7 @@ class FieldTest(unittest.TestCase): class TreeNode(EmbeddedDocument): name = StringField() children = ListField(EmbeddedDocumentField('self')) - + tree = Tree(name="Tree") first_child = TreeNode(name="Child 1") @@ -442,7 +443,7 @@ class FieldTest(unittest.TestCase): second_child = TreeNode(name="Child 2") first_child.children.append(second_child) - + third_child = TreeNode(name="Child 3") first_child.children.append(third_child) @@ -506,20 +507,20 @@ class FieldTest(unittest.TestCase): Member.drop_collection() BlogPost.drop_collection() - + def test_generic_reference(self): """Ensure that a GenericReferenceField properly dereferences items. """ class Link(Document): title = StringField() meta = {'allow_inheritance': False} - + class Post(Document): title = StringField() - + class Bookmark(Document): bookmark_object = GenericReferenceField() - + Link.drop_collection() Post.drop_collection() Bookmark.drop_collection() @@ -574,16 +575,49 @@ class FieldTest(unittest.TestCase): user = User(bookmarks=[post_1, link_1]) user.save() - + user = User.objects(bookmarks__all=[post_1, link_1]).first() - + self.assertEqual(user.bookmarks[0], post_1) self.assertEqual(user.bookmarks[1], link_1) - + Link.drop_collection() Post.drop_collection() User.drop_collection() + def test_generic_reference_document_not_registered(self): + """Ensure dereferencing out of the document registry throws a + `NotRegistered` error. + """ + class Link(Document): + title = StringField() + + class User(Document): + bookmarks = ListField(GenericReferenceField()) + + Link.drop_collection() + User.drop_collection() + + link_1 = Link(title="Pitchfork") + link_1.save() + + user = User(bookmarks=[link_1]) + user.save() + + # Mimic User and Link definitions being in a different file + # and the Link model not being imported in the User file. + del(_document_registry["Link"]) + + user = User.objects.first() + try: + user.bookmarks + raise AssertionError, "Link was removed from the registry" + except NotRegistered: + pass + + Link.drop_collection() + User.drop_collection() + def test_binary_fields(self): """Ensure that binary fields can be stored and retrieved. """ @@ -727,7 +761,7 @@ class FieldTest(unittest.TestCase): result = SetFile.objects.first() self.assertTrue(setfile == result) self.assertEquals(result.file.read(), more_text) - result.file.delete() + result.file.delete() PutFile.drop_collection() StreamFile.drop_collection() From 5f53cda3ab3a320c1a303a44b9c61d350900a91c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 20 May 2011 10:55:01 +0100 Subject: [PATCH 068/214] Added regression test for #94 --- tests/queryset.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/queryset.py b/tests/queryset.py index 51224ea0..82dae6cd 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1761,6 +1761,25 @@ class QuerySetTest(unittest.TestCase): Number.drop_collection() + def test_order_works_with_primary(self): + """Ensure that order_by and primary work. + """ + class Number(Document): + n = IntField(primary_key=True) + + Number.drop_collection() + + Number(n=1).save() + Number(n=2).save() + Number(n=3).save() + + numbers = [n.n for n in Number.objects.order_by('-n')] + self.assertEquals([3, 2, 1], numbers) + + numbers = [n.n for n in Number.objects.order_by('+n')] + self.assertEquals([1, 2, 3], numbers) + Number.drop_collection() + class QTest(unittest.TestCase): From 07e71d9ce9ac42ca7f9cb3d33550d619e7f99bdd Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 20 May 2011 14:18:16 +0100 Subject: [PATCH 069/214] Regression test for collection names an primary ordering Closes #91 --- tests/document.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/document.py b/tests/document.py index cef6e8c1..77c8269b 100644 --- a/tests/document.py +++ b/tests/document.py @@ -212,6 +212,22 @@ class DocumentTest(unittest.TestCase): Person.drop_collection() self.assertFalse(collection in self.db.collection_names()) + def test_collection_name_and_primary(self): + """Ensure that a collection with a specified name may be used. + """ + + class Person(Document): + name = StringField(primary_key=True) + meta = {'collection': 'app'} + + user = Person(name="Test User") + user.save() + + user_obj = Person.objects[0] + self.assertEqual(user_obj.name, "Test User") + + Person.drop_collection() + def test_inherited_collections(self): """Ensure that subclassed documents don't override parents' collections. """ From 1b72ea9cc161995c9a2c34c9929620f9f0f7a79e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 20 May 2011 16:09:03 +0100 Subject: [PATCH 070/214] Fixed detection of unique=True in embedded documents. Added some more test cases - thanks to @heyman for the initial test case. Closes #172 Refs #171 --- mongoengine/base.py | 50 +++++++++++++++++++++++-------------- tests/document.py | 61 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 92 insertions(+), 19 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index ede90835..9a6b5f12 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -306,6 +306,30 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): for spec in meta['indexes']] + base_indexes new_class._meta['indexes'] = user_indexes + unique_indexes = cls._unique_with_indexes(new_class) + new_class._meta['unique_indexes'] = unique_indexes + + for field_name, field in new_class._fields.items(): + # Check for custom primary key + if field.primary_key: + current_pk = new_class._meta['id_field'] + if current_pk and current_pk != field_name: + raise ValueError('Cannot override primary key field') + + if not current_pk: + new_class._meta['id_field'] = field_name + # Make 'Document.id' an alias to the real primary key field + new_class.id = field + + if not new_class._meta['id_field']: + new_class._meta['id_field'] = 'id' + new_class._fields['id'] = ObjectIdField(db_field='_id') + new_class.id = new_class._fields['id'] + + return new_class + + @classmethod + def _unique_with_indexes(cls, new_class, namespace=""): unique_indexes = [] for field_name, field in new_class._fields.items(): # Generate a list of indexes needed by uniqueness constraints @@ -331,28 +355,16 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): unique_fields += unique_with # Add the new index to the list - index = [(f, pymongo.ASCENDING) for f in unique_fields] + index = [("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields] unique_indexes.append(index) - # Check for custom primary key - if field.primary_key: - current_pk = new_class._meta['id_field'] - if current_pk and current_pk != field_name: - raise ValueError('Cannot override primary key field') + # Grab any embedded document field unique indexes + if field.__class__.__name__ == "EmbeddedDocumentField": + field_namespace = "%s." % field_name + unique_indexes += cls._unique_with_indexes(field.document_type, + field_namespace) - if not current_pk: - new_class._meta['id_field'] = field_name - # Make 'Document.id' an alias to the real primary key field - new_class.id = field - - new_class._meta['unique_indexes'] = unique_indexes - - if not new_class._meta['id_field']: - new_class._meta['id_field'] = 'id' - new_class._fields['id'] = ObjectIdField(db_field='_id') - new_class.id = new_class._fields['id'] - - return new_class + return unique_indexes class BaseDocument(object): diff --git a/tests/document.py b/tests/document.py index 77c8269b..8f47ec3c 100644 --- a/tests/document.py +++ b/tests/document.py @@ -362,6 +362,10 @@ class DocumentTest(unittest.TestCase): post2 = BlogPost(title='test2', slug='test') self.assertRaises(OperationError, post2.save) + + def test_unique_with(self): + """Ensure that unique_with constraints are applied to fields. + """ class Date(EmbeddedDocument): year = IntField(db_field='yr') @@ -385,6 +389,63 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() + def test_unique_embedded_document(self): + """Ensure that uniqueness constraints are applied to fields on embedded documents. + """ + class SubDocument(EmbeddedDocument): + year = IntField(db_field='yr') + slug = StringField(unique=True) + + class BlogPost(Document): + title = StringField() + sub = EmbeddedDocumentField(SubDocument) + + BlogPost.drop_collection() + + post1 = BlogPost(title='test1', sub=SubDocument(year=2009, slug="test")) + post1.save() + + # sub.slug is different so won't raise exception + post2 = BlogPost(title='test2', sub=SubDocument(year=2010, slug='another-slug')) + post2.save() + + # Now there will be two docs with the same sub.slug + post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test')) + self.assertRaises(OperationError, post3.save) + + BlogPost.drop_collection() + + def test_unique_with_embedded_document_and_embedded_unique(self): + """Ensure that uniqueness constraints are applied to fields on + embedded documents. And work with unique_with as well. + """ + class SubDocument(EmbeddedDocument): + year = IntField(db_field='yr') + slug = StringField(unique=True) + + class BlogPost(Document): + title = StringField(unique_with='sub.year') + sub = EmbeddedDocumentField(SubDocument) + + BlogPost.drop_collection() + + post1 = BlogPost(title='test1', sub=SubDocument(year=2009, slug="test")) + post1.save() + + # sub.slug is different so won't raise exception + post2 = BlogPost(title='test2', sub=SubDocument(year=2010, slug='another-slug')) + post2.save() + + # Now there will be two docs with the same sub.slug + post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test')) + self.assertRaises(OperationError, post3.save) + + # Now there will be two docs with the same title and year + post3 = BlogPost(title='test1', sub=SubDocument(year=2009, slug='test-1')) + self.assertRaises(OperationError, post3.save) + + BlogPost.drop_collection() + def test_unique_and_indexes(self): """Ensure that 'unique' constraints aren't overridden by meta.indexes. From 36034ee15fad413392c009e8f0d367669d355cb5 Mon Sep 17 00:00:00 2001 From: Alistair Roche Date: Mon, 23 May 2011 18:27:01 +0100 Subject: [PATCH 071/214] 'set__comments__0__body="asdf"' syntax works --- mongoengine/base.py | 21 ++++++++++----------- mongoengine/fields.py | 4 ++-- mongoengine/queryset.py | 15 +++++++++++---- 3 files changed, 23 insertions(+), 17 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 9d0b8231..495e2418 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -22,7 +22,7 @@ class BaseField(object): may be added to subclasses of `Document` to define a document's schema. """ - # Fields may have _types inserted into indexes by default + # Fields may have _types inserted into indexes by default _index_with_types = True _geo_index = False @@ -32,7 +32,7 @@ class BaseField(object): creation_counter = 0 auto_creation_counter = -1 - def __init__(self, db_field=None, name=None, required=False, default=None, + def __init__(self, db_field=None, name=None, required=False, default=None, unique=False, unique_with=None, primary_key=False, validation=None, choices=None): self.db_field = (db_field or name) if not primary_key else '_id' @@ -57,7 +57,7 @@ class BaseField(object): BaseField.creation_counter += 1 def __get__(self, instance, owner): - """Descriptor for retrieving a value from a field in a document. Do + """Descriptor for retrieving a value from a field in a document. Do any necessary conversion between Python and MongoDB types. """ if instance is None: @@ -167,8 +167,8 @@ class DocumentMetaclass(type): superclasses.update(base._superclasses) if hasattr(base, '_meta'): - # Ensure that the Document class may be subclassed - - # inheritance may be disabled to remove dependency on + # Ensure that the Document class may be subclassed - + # inheritance may be disabled to remove dependency on # additional fields _cls and _types if base._meta.get('allow_inheritance', True) == False: raise ValueError('Document %s may not be subclassed' % @@ -190,7 +190,6 @@ class DocumentMetaclass(type): attrs['_class_name'] = '.'.join(reversed(class_name)) attrs['_superclasses'] = superclasses - # Add the document's fields to the _fields attribute for attr_name, attr_value in attrs.items(): if hasattr(attr_value, "__class__") and \ @@ -211,12 +210,12 @@ class DocumentMetaclass(type): module = attrs.get('__module__') - base_excs = tuple(base.DoesNotExist for base in bases + base_excs = tuple(base.DoesNotExist for base in bases if hasattr(base, 'DoesNotExist')) or (DoesNotExist,) exc = subclass_exception('DoesNotExist', base_excs, module) new_class.add_to_class('DoesNotExist', exc) - base_excs = tuple(base.MultipleObjectsReturned for base in bases + base_excs = tuple(base.MultipleObjectsReturned for base in bases if hasattr(base, 'MultipleObjectsReturned')) base_excs = base_excs or (MultipleObjectsReturned,) exc = subclass_exception('MultipleObjectsReturned', base_excs, module) @@ -238,9 +237,9 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): def __new__(cls, name, bases, attrs): super_new = super(TopLevelDocumentMetaclass, cls).__new__ - # Classes defined in this package are abstract and should not have + # Classes defined in this package are abstract and should not have # their own metadata with DB collection, etc. - # __metaclass__ is only set on the class with the __metaclass__ + # __metaclass__ is only set on the class with the __metaclass__ # attribute (i.e. it is not set on subclasses). This differentiates # 'real' documents from the 'Document' class if attrs.get('__metaclass__') == TopLevelDocumentMetaclass: @@ -366,7 +365,7 @@ class BaseDocument(object): are present. """ # Get a list of tuples of field names and their current values - fields = [(field, getattr(self, name)) + fields = [(field, getattr(self, name)) for name, field in self._fields.items()] # Ensure that each field is matched to a valid value diff --git a/mongoengine/fields.py b/mongoengine/fields.py index c06fdd4d..9fcfcc2b 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -339,7 +339,7 @@ class ListField(BaseField): if isinstance(self.field, ReferenceField): referenced_type = self.field.document_type - # Get value from document instance if available + # Get value from document instance if available value_list = instance._data.get(self.name) if value_list: deref_list = [] @@ -643,7 +643,7 @@ class GridFSProxy(object): if not self.newfile: self.new_file() self.grid_id = self.newfile._id - self.newfile.writelines(lines) + self.newfile.writelines(lines) def read(self): try: diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 6da11fa7..f58328ac 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -523,6 +523,10 @@ class QuerySet(object): fields = [] field = None for field_name in parts: + if field_name.isdigit(): + fields.append(field_name) + field = field.field + continue if field is None: # Look up first field from the document if field_name == 'pk': @@ -620,7 +624,6 @@ class QuerySet(object): mongo_query[key] = value elif key in mongo_query and isinstance(mongo_query[key], dict): mongo_query[key].update(value) - return mongo_query def get(self, *q_objs, **query): @@ -1010,7 +1013,6 @@ class QuerySet(object): """ operators = ['set', 'unset', 'inc', 'dec', 'pop', 'push', 'push_all', 'pull', 'pull_all', 'add_to_set'] - mongo_update = {} for key, value in update.items(): parts = key.split('__') @@ -1033,10 +1035,15 @@ class QuerySet(object): if _doc_cls: # Switch field names to proper names [set in Field(name='foo')] fields = QuerySet._lookup_field(_doc_cls, parts) - parts = [field.db_field for field in fields] - + parts = [] + for field in fields: + if isinstance(field, str): + parts.append(field) + else: + parts.append(field.db_field) # Convert value to proper value field = fields[-1] + if op in (None, 'set', 'push', 'pull', 'addToSet'): value = field.prepare_query_value(op, value) elif op in ('pushAll', 'pullAll'): From 1126c85903431fc789ba7afa9220d16720a5b40a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 24 May 2011 11:26:46 +0100 Subject: [PATCH 072/214] Added Custom Objects Managers Managers can now be directly declared in a Document eg:: ```python class CustomQuerySetManager(QuerySetManager): @staticmethod def get_queryset(doc_cls, queryset): return queryset(is_published=True) class Post(Document): is_published = BooleanField(default=False) published = CustomQuerySetManager() ``` Refactored the name of the `_manager_func` to `get_queryset` to mark it as part the public API. If declaring a Manager with a get_queryset method, it should be a staticmethod, that accepts the document_class and the queryset. Note - you can still use decorators in fact in the example below, we effectively do the same thing as the first example and is much less verbose. ```python class Post(Document): is_published = BooleanField(default=False) @queryset_manager def published(doc_cls, queryset): return queryset(is_published=True) ``` Thanks to @theojulienne for the initial impetus and code sample #108 --- mongoengine/base.py | 6 +++-- mongoengine/queryset.py | 17 +++++++------- tests/queryset.py | 52 +++++++++++++++++++++++++++++++++++++++-- 3 files changed, 63 insertions(+), 12 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 9a6b5f12..77c2d7d1 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -299,8 +299,10 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): new_class = super_new(cls, name, bases, attrs) # Provide a default queryset unless one has been manually provided - if not hasattr(new_class, 'objects'): - new_class.objects = QuerySetManager() + manager = attrs.get('objects', QuerySetManager()) + if hasattr(manager, 'queryset_class'): + meta['queryset_class'] = manager.queryset_class + new_class.objects = manager user_indexes = [QuerySet._build_index_spec(new_class, spec) for spec in meta['indexes']] + base_indexes diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 239e146b..3583d0f9 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -428,8 +428,6 @@ class QuerySet(object): querying collection :param query: Django-style query keyword arguments """ - #if q_obj: - #self._where_clause = q_obj.as_js(self._document) query = Q(**query) if q_obj: query &= q_obj @@ -1308,8 +1306,11 @@ class QuerySet(object): class QuerySetManager(object): - def __init__(self, manager_func=None): - self._manager_func = manager_func + get_queryset = None + + def __init__(self, queryset_func=None): + if queryset_func: + self.get_queryset = queryset_func self._collections = {} def __get__(self, instance, owner): @@ -1353,11 +1354,11 @@ class QuerySetManager(object): # owner is the document that contains the QuerySetManager queryset_class = owner._meta['queryset_class'] or QuerySet queryset = queryset_class(owner, self._collections[(db, collection)]) - if self._manager_func: - if self._manager_func.func_code.co_argcount == 1: - queryset = self._manager_func(queryset) + if self.get_queryset: + if self.get_queryset.func_code.co_argcount == 1: + queryset = self.get_queryset(queryset) else: - queryset = self._manager_func(owner, queryset) + queryset = self.get_queryset(owner, queryset) return queryset diff --git a/tests/queryset.py b/tests/queryset.py index 5a2c46cb..777f9e36 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -5,8 +5,9 @@ import unittest import pymongo from datetime import datetime, timedelta -from mongoengine.queryset import (QuerySet, MultipleObjectsReturned, - DoesNotExist, QueryFieldList) +from mongoengine.queryset import (QuerySet, QuerySetManager, + MultipleObjectsReturned, DoesNotExist, + QueryFieldList) from mongoengine import * @@ -1737,6 +1738,53 @@ class QuerySetTest(unittest.TestCase): Post.drop_collection() + def test_custom_querysets_set_manager_directly(self): + """Ensure that custom QuerySet classes may be used. + """ + + class CustomQuerySet(QuerySet): + def not_empty(self): + return len(self) > 0 + + class CustomQuerySetManager(QuerySetManager): + queryset_class = CustomQuerySet + + class Post(Document): + objects = CustomQuerySetManager() + + Post.drop_collection() + + self.assertTrue(isinstance(Post.objects, CustomQuerySet)) + self.assertFalse(Post.objects.not_empty()) + + Post().save() + self.assertTrue(Post.objects.not_empty()) + + Post.drop_collection() + + def test_custom_querysets_managers_directly(self): + """Ensure that custom QuerySet classes may be used. + """ + + class CustomQuerySetManager(QuerySetManager): + + @staticmethod + def get_queryset(doc_cls, queryset): + return queryset(is_published=True) + + class Post(Document): + is_published = BooleanField(default=False) + published = CustomQuerySetManager() + + Post.drop_collection() + + Post().save() + Post(is_published=True).save() + self.assertEquals(Post.objects.count(), 2) + self.assertEquals(Post.published.count(), 1) + + Post.drop_collection() + def test_call_after_limits_set(self): """Ensure that re-filtering after slicing works """ From 118c0deb7a7bd48170eb49624f79f737419c5342 Mon Sep 17 00:00:00 2001 From: Alistair Roche Date: Tue, 24 May 2011 11:31:44 +0100 Subject: [PATCH 073/214] Fixed list-indexing syntax; created tests. --- mongoengine/queryset.py | 16 +++++++++++++- tests/queryset.py | 49 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 64 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 239e146b..e6c93353 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -524,6 +524,15 @@ class QuerySet(object): fields = [] field = None for field_name in parts: + # Handle ListField indexing: + if field_name.isdigit(): + try: + field = field.field + except AttributeError, err: + raise InvalidQueryError( + "Can't use index on unsubscriptable field (%s)" % err) + fields.append(field_name) + continue if field is None: # Look up first field from the document if field_name == 'pk': @@ -1072,7 +1081,12 @@ class QuerySet(object): if _doc_cls: # Switch field names to proper names [set in Field(name='foo')] fields = QuerySet._lookup_field(_doc_cls, parts) - parts = [field.db_field for field in fields] + parts = [] + for field in fields: + if isinstance(field, str): + parts.append(field) + else: + parts.append(field.db_field) # Convert value to proper value field = fields[-1] diff --git a/tests/queryset.py b/tests/queryset.py index 5a2c46cb..b0693f66 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -211,6 +211,55 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() + def test_update_array_position(self): + """Ensure that updating by array position works. + + Check update() and update_one() can take syntax like: + set__posts__1__comments__1__name="testc" + Check that it only works for ListFields. + """ + class Comment(EmbeddedDocument): + name = StringField() + + class Post(EmbeddedDocument): + comments = ListField(EmbeddedDocumentField(Comment)) + + class Blog(Document): + tags = ListField(StringField()) + posts = ListField(EmbeddedDocumentField(Post)) + + Blog.drop_collection() + + comment1 = Comment(name='testa') + comment2 = Comment(name='testb') + post1 = Post(comments=[comment1, comment2]) + post2 = Post(comments=[comment2, comment2]) + blog1 = Blog.objects.create(posts=[post1, post2]) + blog2 = Blog.objects.create(posts=[post2, post1]) + + # Update all of the first comments of second posts of all blogs + blog = Blog.objects().update(set__posts__1__comments__0__name="testc") + testc_blogs = Blog.objects(posts__1__comments__0__name="testc") + self.assertEqual(len(testc_blogs), 2) + + Blog.drop_collection() + + blog1 = Blog.objects.create(posts=[post1, post2]) + blog2 = Blog.objects.create(posts=[post2, post1]) + + # Update only the first blog returned by the query + blog = Blog.objects().update_one( + set__posts__1__comments__1__name="testc") + testc_blogs = Blog.objects(posts__1__comments__1__name="testc") + self.assertEqual(len(testc_blogs), 1) + + # Check that using this indexing syntax on a non-list fails + def non_list_indexing(): + Blog.objects().update(set__posts__1__comments__0__name__1="asdf") + self.assertRaises(InvalidQueryError, non_list_indexing) + + Blog.drop_collection() + def test_get_or_create(self): """Ensure that ``get_or_create`` returns one result or creates a new document. From 305fd4b232e480f4aaa85999d87e6a645e4e5c43 Mon Sep 17 00:00:00 2001 From: Alistair Roche Date: Tue, 24 May 2011 11:44:43 +0100 Subject: [PATCH 074/214] Fixed whitespace --- mongoengine/base.py | 1 + mongoengine/queryset.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/mongoengine/base.py b/mongoengine/base.py index 5188c310..77c2d7d1 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -200,6 +200,7 @@ class DocumentMetaclass(type): attrs['_class_name'] = '.'.join(reversed(class_name)) attrs['_superclasses'] = superclasses + # Add the document's fields to the _fields attribute for attr_name, attr_value in attrs.items(): if hasattr(attr_value, "__class__") and \ diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index d7d349ad..087ee487 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -628,6 +628,7 @@ class QuerySet(object): mongo_query[key] = value elif key in mongo_query and isinstance(mongo_query[key], dict): mongo_query[key].update(value) + return mongo_query def get(self, *q_objs, **query): @@ -1055,6 +1056,7 @@ class QuerySet(object): """ operators = ['set', 'unset', 'inc', 'dec', 'pop', 'push', 'push_all', 'pull', 'pull_all', 'add_to_set'] + mongo_update = {} for key, value in update.items(): parts = key.split('__') From 088c40f9f2d296f65fd493fd5d98baf02f9125eb Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 24 May 2011 12:30:12 +0100 Subject: [PATCH 075/214] Added Abstract Base Classes Thanks to @theojulienne for the code :) #108 --- mongoengine/base.py | 12 +++++++++++- tests/document.py | 34 +++++++++++++++++++++++++++++++--- 2 files changed, 42 insertions(+), 4 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 77c2d7d1..ffceb794 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -253,7 +253,16 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): # __metaclass__ is only set on the class with the __metaclass__ # attribute (i.e. it is not set on subclasses). This differentiates # 'real' documents from the 'Document' class - if attrs.get('__metaclass__') == TopLevelDocumentMetaclass: + # + # Also assume a class is abstract if it has abstract set to True in + # its meta dictionary. This allows custom Document superclasses. + if (attrs.get('__metaclass__') == TopLevelDocumentMetaclass or + ('meta' in attrs and attrs['meta'].get('abstract', False))): + # Make sure no base class was non-abstract + non_abstract_bases = [b for b in bases + if hasattr(b,'_meta') and not b._meta.get('abstract', False)] + if non_abstract_bases: + raise ValueError("Abstract document cannot have non-abstract base") return super_new(cls, name, bases, attrs) collection = name.lower() @@ -276,6 +285,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): base_indexes += base._meta.get('indexes', []) meta = { + 'abstract': False, 'collection': collection, 'max_documents': None, 'max_size': None, diff --git a/tests/document.py b/tests/document.py index 8f47ec3c..fe67312e 100644 --- a/tests/document.py +++ b/tests/document.py @@ -29,6 +29,9 @@ class DocumentTest(unittest.TestCase): age = IntField() self.Person = Person + def tearDown(self): + self.Person.drop_collection() + def test_drop_collection(self): """Ensure that the collection may be dropped from the database. """ @@ -188,6 +191,34 @@ class DocumentTest(unittest.TestCase): self.assertFalse('_cls' in comment.to_mongo()) self.assertFalse('_types' in comment.to_mongo()) + def test_abstract_documents(self): + """Ensure that a document superclass can be marked as abstract + thereby not using it as the name for the collection.""" + + class Animal(Document): + name = StringField() + meta = {'abstract': True} + + class Fish(Animal): pass + class Guppy(Fish): pass + + class Mammal(Animal): + meta = {'abstract': True} + class Human(Mammal): pass + + self.assertFalse('collection' in Animal._meta) + self.assertFalse('collection' in Mammal._meta) + + self.assertEqual(Fish._meta['collection'], 'fish') + self.assertEqual(Guppy._meta['collection'], 'fish') + self.assertEqual(Human._meta['collection'], 'human') + + def create_bad_abstract(): + class EvilHuman(Human): + evil = BooleanField(default=True) + meta = {'abstract': True} + self.assertRaises(ValueError, create_bad_abstract) + def test_collection_name(self): """Ensure that a collection with a specified name may be used. """ @@ -907,9 +938,6 @@ class DocumentTest(unittest.TestCase): A.drop_collection() B.drop_collection() - def tearDown(self): - self.Person.drop_collection() - def test_document_hash(self): """Test document in list, dict, set """ From 32bab13a8acc0e091094468be0a6d890719bfec5 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 24 May 2011 12:50:48 +0100 Subject: [PATCH 076/214] Added MapField, similar to DictField Similar to DictField except the value of each entry is always of a certain (declared) field type. Thanks again to @theojulienne for the code #108 --- mongoengine/fields.py | 93 ++++++++++++++++++++++++++++++++++++++++++- tests/fields.py | 61 ++++++++++++++++++++++++++++ 2 files changed, 153 insertions(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 0cc8219b..d1f9b665 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -17,7 +17,7 @@ import warnings __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', 'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', - 'ObjectIdField', 'ReferenceField', 'ValidationError', + 'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', 'DecimalField', 'URLField', 'GenericReferenceField', 'FileField', 'BinaryField', 'SortedListField', 'EmailField', 'GeoPointField'] @@ -451,6 +451,97 @@ class DictField(BaseField): def lookup_member(self, member_name): return self.basecls(db_field=member_name) + +class MapField(BaseField): + """A field that maps a name to a specified field type. Similar to + a DictField, except the 'value' of each item must match the specified + field type. + + .. versionadded:: 0.5 + """ + + def __init__(self, field=None, *args, **kwargs): + if not isinstance(field, BaseField): + raise ValidationError('Argument to MapField constructor must be ' + 'a valid field') + self.field = field + kwargs.setdefault('default', lambda: {}) + super(MapField, self).__init__(*args, **kwargs) + + def validate(self, value): + """Make sure that a list of valid fields is being used. + """ + if not isinstance(value, dict): + raise ValidationError('Only dictionaries may be used in a ' + 'DictField') + + if any(('.' in k or '$' in k) for k in value): + raise ValidationError('Invalid dictionary key name - keys may not ' + 'contain "." or "$" characters') + + try: + [self.field.validate(item) for item in value.values()] + except Exception, err: + raise ValidationError('Invalid MapField item (%s)' % str(item)) + + def __get__(self, instance, owner): + """Descriptor to automatically dereference references. + """ + if instance is None: + # Document class being used rather than a document object + return self + + if isinstance(self.field, ReferenceField): + referenced_type = self.field.document_type + # Get value from document instance if available + value_dict = instance._data.get(self.name) + if value_dict: + deref_dict = [] + for key,value in value_dict.iteritems(): + # Dereference DBRefs + if isinstance(value, (pymongo.dbref.DBRef)): + value = _get_db().dereference(value) + deref_dict[key] = referenced_type._from_son(value) + else: + deref_dict[key] = value + instance._data[self.name] = deref_dict + + if isinstance(self.field, GenericReferenceField): + value_dict = instance._data.get(self.name) + if value_dict: + deref_dict = [] + for key,value in value_dict.iteritems(): + # Dereference DBRefs + if isinstance(value, (dict, pymongo.son.SON)): + deref_dict[key] = self.field.dereference(value) + else: + deref_dict[key] = value + instance._data[self.name] = deref_dict + + return super(MapField, self).__get__(instance, owner) + + def to_python(self, value): + return dict( [(key,self.field.to_python(item)) for key,item in value.iteritems()] ) + + def to_mongo(self, value): + return dict( [(key,self.field.to_mongo(item)) for key,item in value.iteritems()] ) + + def prepare_query_value(self, op, value): + return self.field.prepare_query_value(op, value) + + def lookup_member(self, member_name): + return self.field.lookup_member(member_name) + + def _set_owner_document(self, owner_document): + self.field.owner_document = owner_document + self._owner_document = owner_document + + def _get_owner_document(self, owner_document): + self._owner_document = owner_document + + owner_document = property(_get_owner_document, _set_owner_document) + + class ReferenceField(BaseField): """A reference to a document that will be automatically dereferenced on access (lazily). diff --git a/tests/fields.py b/tests/fields.py index 38409b6a..62bd3a1f 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -825,5 +825,66 @@ class FieldTest(unittest.TestCase): self.assertEqual(d2.data, {}) self.assertEqual(d2.data2, {}) + def test_mapfield(self): + """Ensure that the MapField handles the declared type.""" + + class Simple(Document): + mapping = MapField(IntField()) + + Simple.drop_collection() + + e = Simple() + e.mapping['someint'] = 1 + e.save() + + def create_invalid_mapping(): + e.mapping['somestring'] = "abc" + e.save() + + self.assertRaises(ValidationError, create_invalid_mapping) + + def create_invalid_class(): + class NoDeclaredType(Document): + mapping = MapField() + + self.assertRaises(ValidationError, create_invalid_class) + + Simple.drop_collection() + + def test_complex_mapfield(self): + """Ensure that the MapField can handle complex declared types.""" + + class SettingBase(EmbeddedDocument): + pass + + class StringSetting(SettingBase): + value = StringField() + + class IntegerSetting(SettingBase): + value = IntField() + + class Extensible(Document): + mapping = MapField(EmbeddedDocumentField(SettingBase)) + + Extensible.drop_collection() + + e = Extensible() + e.mapping['somestring'] = StringSetting(value='foo') + e.mapping['someint'] = IntegerSetting(value=42) + e.save() + + e2 = Extensible.objects.get(id=e.id) + self.assertTrue(isinstance(e2.mapping['somestring'], StringSetting)) + self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting)) + + def create_invalid_mapping(): + e.mapping['someint'] = 123 + e.save() + + self.assertRaises(ValidationError, create_invalid_mapping) + + Extensible.drop_collection() + + if __name__ == '__main__': unittest.main() From 7ecf84395a698818440b70104dc2f4d5984a4386 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 24 May 2011 14:07:58 +0100 Subject: [PATCH 077/214] Improved DictFields Allow searching multiple levels deep in DictFields Allow DictField entries containing strings to use matching operators Thanks again to @theojulien for the initial code #108 --- mongoengine/fields.py | 12 +++++++++++- tests/fields.py | 23 +++++++++++++++++++++-- tests/queryset.py | 2 +- 3 files changed, 33 insertions(+), 4 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index d1f9b665..11366dd0 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -449,7 +449,17 @@ class DictField(BaseField): 'contain "." or "$" characters') def lookup_member(self, member_name): - return self.basecls(db_field=member_name) + return DictField(basecls=self.basecls, db_field=member_name) + + def prepare_query_value(self, op, value): + match_operators = ['contains', 'icontains', 'startswith', + 'istartswith', 'endswith', 'iendswith', + 'exact', 'iexact'] + + if op in match_operators and isinstance(value, basestring): + return StringField().prepare_query_value(op, value) + + return super(DictField,self).prepare_query_value(op, value) class MapField(BaseField): diff --git a/tests/fields.py b/tests/fields.py index 62bd3a1f..00b1c886 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -262,12 +262,14 @@ class FieldTest(unittest.TestCase): BlogPost.drop_collection() - def test_dict_validation(self): + def test_dict_field(self): """Ensure that dict types work as expected. """ class BlogPost(Document): info = DictField() + BlogPost.drop_collection() + post = BlogPost() post.info = 'my post' self.assertRaises(ValidationError, post.validate) @@ -282,7 +284,24 @@ class FieldTest(unittest.TestCase): self.assertRaises(ValidationError, post.validate) post.info = {'title': 'test'} - post.validate() + post.save() + + post = BlogPost() + post.info = {'details': {'test': 'test'}} + post.save() + + post = BlogPost() + post.info = {'details': {'test': 3}} + post.save() + + self.assertEquals(BlogPost.objects.count(), 3) + self.assertEquals(BlogPost.objects.filter(info__title__exact='test').count(), 1) + self.assertEquals(BlogPost.objects.filter(info__details__test__exact='test').count(), 1) + + # Confirm handles non strings or non existing keys + self.assertEquals(BlogPost.objects.filter(info__details__test__exact=5).count(), 0) + self.assertEquals(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) + BlogPost.drop_collection() def test_embedded_document_validation(self): """Ensure that invalid embedded documents cannot be assigned to diff --git a/tests/queryset.py b/tests/queryset.py index 777f9e36..6c0b686f 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1539,7 +1539,7 @@ class QuerySetTest(unittest.TestCase): t = Test(testdict={'f': 'Value'}) t.save() - self.assertEqual(len(Test.objects(testdict__f__startswith='Val')), 0) + self.assertEqual(len(Test.objects(testdict__f__startswith='Val')), 1) self.assertEqual(len(Test.objects(testdict__f='Value')), 1) Test.drop_collection() From c3a88404356c40702ca0204193ede1b1bd21e0ad Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 24 May 2011 20:27:19 +0100 Subject: [PATCH 078/214] Blinker signals added --- mongoengine/base.py | 6 ++ mongoengine/document.py | 10 ++++ mongoengine/signals.py | 41 +++++++++++++ setup.py | 6 +- tests/signals.py | 130 ++++++++++++++++++++++++++++++++++++++++ 5 files changed, 190 insertions(+), 3 deletions(-) create mode 100644 mongoengine/signals.py create mode 100644 tests/signals.py diff --git a/mongoengine/base.py b/mongoengine/base.py index ffceb794..101bb73f 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -2,6 +2,8 @@ from queryset import QuerySet, QuerySetManager from queryset import DoesNotExist, MultipleObjectsReturned from queryset import DO_NOTHING +from mongoengine import signals + import sys import pymongo import pymongo.objectid @@ -382,6 +384,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): class BaseDocument(object): def __init__(self, **values): + signals.pre_init.send(self, values=values) + self._data = {} # Assign default values to instance for attr_name in self._fields.keys(): @@ -395,6 +399,8 @@ class BaseDocument(object): except AttributeError: pass + signals.post_init.send(self) + def validate(self): """Ensure that all fields' values are valid and that required fields are present. diff --git a/mongoengine/document.py b/mongoengine/document.py index 771b9229..b563f427 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -1,3 +1,4 @@ +from mongoengine import signals from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, ValidationError) from queryset import OperationError @@ -75,6 +76,8 @@ class Document(BaseDocument): For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers have recorded the write and will force an fsync on each server being written to. """ + signals.pre_save.send(self) + if validate: self.validate() @@ -82,6 +85,7 @@ class Document(BaseDocument): write_options = {} doc = self.to_mongo() + created = '_id' not in doc try: collection = self.__class__.objects._collection if force_insert: @@ -96,12 +100,16 @@ class Document(BaseDocument): id_field = self._meta['id_field'] self[id_field] = self._fields[id_field].to_python(object_id) + signals.post_save.send(self, created=created) + def delete(self, safe=False): """Delete the :class:`~mongoengine.Document` from the database. This will only take effect if the document has been previously saved. :param safe: check if the operation succeeded before returning """ + signals.pre_delete.send(self) + id_field = self._meta['id_field'] object_id = self._fields[id_field].to_mongo(self[id_field]) try: @@ -110,6 +118,8 @@ class Document(BaseDocument): message = u'Could not delete document (%s)' % err.message raise OperationError(message) + signals.post_delete.send(self) + @classmethod def register_delete_rule(cls, document_cls, field_name, rule): """This method registers the delete rules to apply when removing this diff --git a/mongoengine/signals.py b/mongoengine/signals.py new file mode 100644 index 00000000..4caa5530 --- /dev/null +++ b/mongoengine/signals.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- + +signals_available = False +try: + from blinker import Namespace + signals_available = True +except ImportError: + class Namespace(object): + def signal(self, name, doc=None): + return _FakeSignal(name, doc) + + class _FakeSignal(object): + """If blinker is unavailable, create a fake class with the same + interface that allows sending of signals but will fail with an + error on anything else. Instead of doing anything on send, it + will just ignore the arguments and do nothing instead. + """ + + def __init__(self, name, doc=None): + self.name = name + self.__doc__ = doc + + def _fail(self, *args, **kwargs): + raise RuntimeError('signalling support is unavailable ' + 'because the blinker library is ' + 'not installed.') + send = lambda *a, **kw: None + connect = disconnect = has_receivers_for = receivers_for = \ + temporarily_connected_to = _fail + del _fail + +# the namespace for code signals. If you are not mongoengine code, do +# not put signals in here. Create your own namespace instead. +_signals = Namespace() + +pre_init = _signals.signal('pre_init') +post_init = _signals.signal('post_init') +pre_save = _signals.signal('pre_save') +post_save = _signals.signal('post_save') +pre_delete = _signals.signal('pre_delete') +post_delete = _signals.signal('post_delete') diff --git a/setup.py b/setup.py index e0585b7c..01a201d5 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ def get_version(version_tuple): version = '%s.%s' % (version, version_tuple[2]) return version -# Dirty hack to get version number from monogengine/__init__.py - we can't +# Dirty hack to get version number from monogengine/__init__.py - we can't # import it as it depends on PyMongo and PyMongo isn't installed until this # file is read init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') @@ -45,6 +45,6 @@ setup(name='mongoengine', long_description=LONG_DESCRIPTION, platforms=['any'], classifiers=CLASSIFIERS, - install_requires=['pymongo'], - test_suite='tests', + install_requires=['pymongo', 'blinker'], + test_suite='tests.signals', ) diff --git a/tests/signals.py b/tests/signals.py new file mode 100644 index 00000000..fff2d398 --- /dev/null +++ b/tests/signals.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +import unittest + +from mongoengine import * +from mongoengine import signals + +signal_output = [] + + +class SignalTests(unittest.TestCase): + """ + Testing signals before/after saving and deleting. + """ + + def get_signal_output(self, fn, *args, **kwargs): + # Flush any existing signal output + global signal_output + signal_output = [] + fn(*args, **kwargs) + return signal_output + + def setUp(self): + connect(db='mongoenginetest') + class Author(Document): + name = StringField() + + def __unicode__(self): + return self.name + + @classmethod + def pre_init(cls, instance, **kwargs): + signal_output.append('pre_init signal, %s' % cls.__name__) + signal_output.append(str(kwargs['values'])) + + @classmethod + def post_init(cls, instance, **kwargs): + signal_output.append('post_init signal, %s' % instance) + + @classmethod + def pre_save(cls, instance, **kwargs): + signal_output.append('pre_save signal, %s' % instance) + + @classmethod + def post_save(cls, instance, **kwargs): + signal_output.append('post_save signal, %s' % instance) + if 'created' in kwargs: + if kwargs['created']: + signal_output.append('Is created') + else: + signal_output.append('Is updated') + + @classmethod + def pre_delete(cls, instance, **kwargs): + signal_output.append('pre_delete signal, %s' % instance) + + @classmethod + def post_delete(cls, instance, **kwargs): + signal_output.append('post_delete signal, %s' % instance) + + self.Author = Author + + # Save up the number of connected signals so that we can check at the end + # that all the signals we register get properly unregistered + self.pre_signals = ( + len(signals.pre_init.receivers), + len(signals.post_init.receivers), + len(signals.pre_save.receivers), + len(signals.post_save.receivers), + len(signals.pre_delete.receivers), + len(signals.post_delete.receivers) + ) + + signals.pre_init.connect(Author.pre_init) + signals.post_init.connect(Author.post_init) + signals.pre_save.connect(Author.pre_save) + signals.post_save.connect(Author.post_save) + signals.pre_delete.connect(Author.pre_delete) + signals.post_delete.connect(Author.post_delete) + + def tearDown(self): + signals.pre_init.disconnect(self.Author.pre_init) + signals.post_init.disconnect(self.Author.post_init) + signals.post_delete.disconnect(self.Author.post_delete) + signals.pre_delete.disconnect(self.Author.pre_delete) + signals.post_save.disconnect(self.Author.post_save) + signals.pre_save.disconnect(self.Author.pre_save) + + # Check that all our signals got disconnected properly. + post_signals = ( + len(signals.pre_init.receivers), + len(signals.post_init.receivers), + len(signals.pre_save.receivers), + len(signals.post_save.receivers), + len(signals.pre_delete.receivers), + len(signals.post_delete.receivers) + ) + + self.assertEqual(self.pre_signals, post_signals) + + def test_model_signals(self): + """ Model saves should throw some signals. """ + + def create_author(): + a1 = self.Author(name='Bill Shakespeare') + + self.assertEqual(self.get_signal_output(create_author), [ + "pre_init signal, Author", + "{'name': 'Bill Shakespeare'}", + "post_init signal, Bill Shakespeare", + ]) + + a1 = self.Author(name='Bill Shakespeare') + self.assertEqual(self.get_signal_output(a1.save), [ + "pre_save signal, Bill Shakespeare", + "post_save signal, Bill Shakespeare", + "Is created" + ]) + + a1.reload() + a1.name='William Shakespeare' + self.assertEqual(self.get_signal_output(a1.save), [ + "pre_save signal, William Shakespeare", + "post_save signal, William Shakespeare", + "Is updated" + ]) + + self.assertEqual(self.get_signal_output(a1.delete), [ + 'pre_delete signal, William Shakespeare', + 'post_delete signal, William Shakespeare', + ]) \ No newline at end of file From 0708d1bedc53d933750e2b871a1c16626627b1f7 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 25 May 2011 09:34:50 +0100 Subject: [PATCH 079/214] Run all tests... --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 01a201d5..d3be64b3 100644 --- a/setup.py +++ b/setup.py @@ -46,5 +46,5 @@ setup(name='mongoengine', platforms=['any'], classifiers=CLASSIFIERS, install_requires=['pymongo', 'blinker'], - test_suite='tests.signals', + test_suite='tests', ) From 3861103585beba17b886d3da044fe49017b638cb Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 25 May 2011 09:36:25 +0100 Subject: [PATCH 080/214] Updated connection exception to provide more info on the cause. Fixes #178 --- mongoengine/connection.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/connection.py b/mongoengine/connection.py index fc6c7680..7b5cd210 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -31,8 +31,8 @@ def _get_connection(reconnect=False): if _connection.get(identity) is None or reconnect: try: _connection[identity] = Connection(**_connection_settings) - except: - raise ConnectionError('Cannot connect to the database') + except Exception, e: + raise ConnectionError("Cannot connect to the database:\n%s" % e) return _connection[identity] def _get_db(reconnect=False): From 60c8254f58986f43a25f9190bb97fb07b0742b64 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 25 May 2011 11:10:42 +0100 Subject: [PATCH 081/214] Tweaks to item_frequencies Updated to use a ternary statement and added tests Refs #124 #122 Thanks to @nickvlku for the code. --- mongoengine/queryset.py | 12 ++---------- tests/queryset.py | 13 ++++++++----- 2 files changed, 10 insertions(+), 15 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 54d4845e..f5020ab8 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1298,19 +1298,11 @@ class QuerySet(object): db[collection].find(query).forEach(function(doc) { if (doc[field].constructor == Array) { doc[field].forEach(function(item) { - var preValue = 0; - if (!isNaN(frequencies[item])) { - preValue = frequencies[item]; - } - frequencies[item] = inc + preValue; + frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); }); } else { var item = doc[field]; - var preValue = 0; - if (!isNaN(frequencies[item])) { - preValue = frequencies[item]; - } - frequencies[item] = inc + preValue; + frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); } }); return frequencies; diff --git a/tests/queryset.py b/tests/queryset.py index 5cf08957..9b711406 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1339,7 +1339,7 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() BlogPost(hits=1, tags=['music', 'film', 'actors', 'watch']).save() - BlogPost(hits=2, tags=['music']).save() + BlogPost(hits=2, tags=['music', 'watch']).save() BlogPost(hits=2, tags=['music', 'actors']).save() f = BlogPost.objects.item_frequencies('tags') @@ -1347,20 +1347,23 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(set(['music', 'film', 'actors', 'watch']), set(f.keys())) self.assertEqual(f['music'], 3) self.assertEqual(f['actors'], 2) + self.assertEqual(f['watch'], 2) self.assertEqual(f['film'], 1) # Ensure query is taken into account f = BlogPost.objects(hits__gt=1).item_frequencies('tags') f = dict((key, int(val)) for key, val in f.items()) - self.assertEqual(set(['music', 'actors']), set(f.keys())) + self.assertEqual(set(['music', 'actors', 'watch']), set(f.keys())) self.assertEqual(f['music'], 2) self.assertEqual(f['actors'], 1) + self.assertEqual(f['watch'], 1) # Check that normalization works f = BlogPost.objects.item_frequencies('tags', normalize=True) - self.assertAlmostEqual(f['music'], 3.0/7.0) - self.assertAlmostEqual(f['actors'], 2.0/7.0) - self.assertAlmostEqual(f['film'], 1.0/7.0) + self.assertAlmostEqual(f['music'], 3.0/8.0) + self.assertAlmostEqual(f['actors'], 2.0/8.0) + self.assertAlmostEqual(f['watch'], 2.0/8.0) + self.assertAlmostEqual(f['film'], 1.0/8.0) # Check item_frequencies works for non-list fields f = BlogPost.objects.item_frequencies('hits') From b1cdd1eb268680aa0425aead0f1080560f080e5c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 25 May 2011 12:01:41 +0100 Subject: [PATCH 082/214] Updated docs regarding ReferenceFields Closes #149 --- docs/tutorial.rst | 15 +++++++++++++++ mongoengine/fields.py | 15 +++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 5db2c4df..63f8fe9b 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -152,6 +152,21 @@ We can then store a list of comment documents in our post document:: tags = ListField(StringField(max_length=30)) comments = ListField(EmbeddedDocumentField(Comment)) +Handling deletions of references +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The :class:`~mongoengine.ReferenceField` object takes a keyword +`reverse_delete_rule` for handling deletion rules if the reference is deleted. +To delete all the posts if a user is deleted set the rule:: + + class Post(Document): + title = StringField(max_length=120, required=True) + author = ReferenceField(User, reverse_delete_rule=CASCADE) + tags = ListField(StringField(max_length=30)) + comments = ListField(EmbeddedDocumentField(Comment)) + +See :class:`~mongoengine.ReferenceField` for more information. + Adding data to our Tumblelog ============================ Now that we've defined how our documents will be structured, let's start adding diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 11366dd0..b12c507f 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -555,9 +555,24 @@ class MapField(BaseField): class ReferenceField(BaseField): """A reference to a document that will be automatically dereferenced on access (lazily). + + Use the `reverse_delete_rule` to handle what should happen if the document + the field is referencing is deleted. + + The options are: + + * DO_NOTHING - don't do anything (default). + * NULLIFY - Updates the reference to null. + * CASCADE - Deletes the documents associated with the reference. + * DENY - Prevent the deletion of the reference object. """ def __init__(self, document_type, reverse_delete_rule=DO_NOTHING, **kwargs): + """Initialises the Reference Field. + + :param reverse_delete_rule: Determines what to do when the referring + object is deleted + """ if not isinstance(document_type, basestring): if not issubclass(document_type, (Document, basestring)): raise ValidationError('Argument to ReferenceField constructor ' From fac3f038a84fb768894f0885f85f3cedaf7ac1dc Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 25 May 2011 12:20:56 +0100 Subject: [PATCH 083/214] Added regression test for issue with unset and pop Closes #118 --- tests/queryset.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/tests/queryset.py b/tests/queryset.py index 9b711406..a45aaf3e 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1105,6 +1105,32 @@ class QuerySetTest(unittest.TestCase): self.assertTrue('code' not in post.tags) self.assertEqual(len(post.tags), 1) + def test_update_one_pop_generic_reference(self): + + class BlogTag(Document): + name = StringField(required=True) + + class BlogPost(Document): + slug = StringField() + tags = ListField(ReferenceField(BlogTag), required=True) + + tag_1 = BlogTag(name='code') + tag_1.save() + tag_2 = BlogTag(name='mongodb') + tag_2.save() + + post = BlogPost(slug="test", tags=[tag_1]) + post.save() + + post = BlogPost(slug="test-2", tags=[tag_1, tag_2]) + post.save() + self.assertEqual(len(post.tags), 2) + + BlogPost.objects(slug="test-2").update_one(pop__tags=-1) + + post.reload() + self.assertEqual(len(post.tags), 1) + def test_order_by(self): """Ensure that QuerySets may be ordered. """ From eb892241ee3bd1f98d2d8c0fe420841ddb21037c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 25 May 2011 13:31:01 +0100 Subject: [PATCH 084/214] Added regression test for editting embedded documents Closes #35 --- tests/queryset.py | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/tests/queryset.py b/tests/queryset.py index a45aaf3e..296377f3 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1114,6 +1114,9 @@ class QuerySetTest(unittest.TestCase): slug = StringField() tags = ListField(ReferenceField(BlogTag), required=True) + BlogPost.drop_collection() + BlogTag.drop_collection() + tag_1 = BlogTag(name='code') tag_1.save() tag_2 = BlogTag(name='mongodb') @@ -1131,6 +1134,40 @@ class QuerySetTest(unittest.TestCase): post.reload() self.assertEqual(len(post.tags), 1) + BlogPost.drop_collection() + BlogTag.drop_collection() + + def test_editting_embedded_objects(self): + + class BlogTag(EmbeddedDocument): + name = StringField(required=True) + + class BlogPost(Document): + slug = StringField() + tags = ListField(EmbeddedDocumentField(BlogTag), required=True) + + BlogPost.drop_collection() + + tag_1 = BlogTag(name='code') + tag_2 = BlogTag(name='mongodb') + + post = BlogPost(slug="test", tags=[tag_1]) + post.save() + + post = BlogPost(slug="test-2", tags=[tag_1, tag_2]) + post.save() + self.assertEqual(len(post.tags), 2) + + BlogPost.objects(slug="test-2").update_one(set__tags__0__name="python") + post.reload() + self.assertEquals(post.tags[0].name, 'python') + + BlogPost.objects(slug="test-2").update_one(pop__tags=-1) + post.reload() + self.assertEqual(len(post.tags), 1) + + BlogPost.drop_collection() + def test_order_by(self): """Ensure that QuerySets may be ordered. """ From 5ab13518dbecc826965232a34744bd6ce1cba31e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 25 May 2011 13:50:52 +0100 Subject: [PATCH 085/214] Added test confirming order_by reference field doesnt work --- tests/queryset.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/tests/queryset.py b/tests/queryset.py index 296377f3..a2d78d72 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1187,6 +1187,29 @@ class QuerySetTest(unittest.TestCase): ages = [p.age for p in self.Person.objects.order_by('-name')] self.assertEqual(ages, [30, 40, 20]) + def test_confirm_order_by_reference_wont_work(self): + """Ordering by reference is not possible. Use map / reduce.. or + denormalise""" + + class Author(Document): + author = ReferenceField(self.Person) + + Author.drop_collection() + + person_a = self.Person(name="User A", age=20) + person_a.save() + person_b = self.Person(name="User B", age=40) + person_b.save() + person_c = self.Person(name="User C", age=30) + person_c.save() + + Author(author=person_a).save() + Author(author=person_b).save() + Author(author=person_c).save() + + names = [a.author.name for a in Author.objects.order_by('-author__age')] + self.assertEqual(names, ['User A', 'User B', 'User C']) + def test_map_reduce(self): """Ensure map/reduce is both mapping and reducing. """ From bf6f03a4129ead4975c16822f7516fbae3696f85 Mon Sep 17 00:00:00 2001 From: Alistair Roche Date: Wed, 25 May 2011 17:25:39 +0100 Subject: [PATCH 086/214] Improved MapFields setting --- mongoengine/fields.py | 6 +++++- tests/queryset.py | 38 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 43 insertions(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index b12c507f..b2aab5a4 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -537,7 +537,11 @@ class MapField(BaseField): return dict( [(key,self.field.to_mongo(item)) for key,item in value.iteritems()] ) def prepare_query_value(self, op, value): - return self.field.prepare_query_value(op, value) + if op not in ('set', 'unset'): + return self.field.prepare_query_value(op, value) + for key in value: + value[key] = self.field.prepare_query_value(op, value[key]) + return value def lookup_member(self, member_name): return self.field.lookup_member(member_name) diff --git a/tests/queryset.py b/tests/queryset.py index a2d78d72..82cd870d 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -261,6 +261,44 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() + def test_mapfield_update(self): + """Ensure that the MapField can be updated.""" + class Member(EmbeddedDocument): + gender = StringField() + age = IntField() + + class Club(Document): + members = MapField(EmbeddedDocumentField(Member)) + + Club.drop_collection() + + club = Club() + club.members['John'] = Member(gender="M", age=13) + club.save() + + Club.objects().update( + set__members={"John": Member(gender="F", age=14)}) + + club = Club.objects().first() + self.assertEqual(club.members['John'].gender, "F") + self.assertEqual(club.members['John'].age, 14) + + def test_dictfield_update(self): + """Ensure that the MapField can be updated.""" + class Club(Document): + members = DictField() + + club = Club() + club.members['John'] = dict(gender="M", age=13) + club.save() + + Club.objects().update( + set__members={"John": dict(gender="F", age=14)}) + + club = Club.objects().first() + self.assertEqual(club.members['John']['gender'], "F") + self.assertEqual(club.members['John']['age'], 14) + def test_get_or_create(self): """Ensure that ``get_or_create`` returns one result or creates a new document. From 97a13103441b61ebde77f1ca510b0bf556feafd1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 26 May 2011 11:11:00 +0100 Subject: [PATCH 087/214] Tweakin test --- tests/queryset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/queryset.py b/tests/queryset.py index 82cd870d..d5611fdf 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -284,7 +284,7 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(club.members['John'].age, 14) def test_dictfield_update(self): - """Ensure that the MapField can be updated.""" + """Ensure that the DictField can be updated.""" class Club(Document): members = DictField() From 9dd3504765fe209030213164fe6e3f37a7c4bbb2 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 26 May 2011 11:56:56 +0100 Subject: [PATCH 088/214] Updated changelog --- docs/changelog.rst | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index d7c6fe85..29d03bc2 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,6 +2,42 @@ Changelog ========= +Changes in dev +============== + +- Updated connection exception so it provides more info on the cause. +- Added searching multiple levels deep in ``DictField`` +- Added ``DictField`` entries containing strings to use matching operators +- Added ``MapField``, similar to ``DictField`` +- Added Abstract Base Classes +- Added Custom Objects Managers +- Added sliced subfields updating +- Added ``NotRegistered`` exception if dereferencing ``Document`` not in the registry +- Added a write concern for ``save``, ``update``, ``update_one`` and ``get_or_create`` +- Added slicing / subarray fetching controls +- Fixed various unique index and other index issues +- Fixed threaded connection issues +- Added spherical geospatial query operators +- Updated queryset to handle latest version of pymongo + map_reduce now requires an output. +- Added ``Document`` __hash__, __ne__ for pickling +- Added ``FileField`` optional size arg for read method +- Fixed ``FileField`` seek and tell methods for reading files +- Added ``QuerySet.clone`` to support copying querysets +- Fixed item_frequencies when using name thats the same as a native js function +- Added reverse delete rules +- Fixed issue with unset operation +- Fixed Q-object bug +- Added ``QuerySet.all_fields`` resets previous .only() and .exlude() +- Added ``QuerySet.exclude`` +- Added django style choices +- Fixed order and filter issue +- Added ``QuerySet.only`` subfield support +- Added creation_counter to ``BaseField`` allowing fields to be sorted in the + way the user has specified them +- Fixed various errors +- Added many tests + Changes in v0.4 =============== - Added ``GridFSStorage`` Django storage backend From c903af032fd65614585e9b9a377abf14f046fdfc Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 26 May 2011 15:44:43 +0100 Subject: [PATCH 089/214] Added inline_map_reduce functionality Also added map_reduce method for calculating item_frequencies Closes #183 --- docs/changelog.rst | 2 ++ mongoengine/queryset.py | 63 ++++++++++++++++++++++++++++++++++++--- setup.py | 2 +- tests/queryset.py | 65 ++++++++++++++++++++++++++--------------- 4 files changed, 104 insertions(+), 28 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 29d03bc2..686b326f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,8 @@ Changelog Changes in dev ============== +- Added optional map_reduce method item_frequencies +- Added inline_map_reduce option to map_reduce - Updated connection exception so it provides more info on the cause. - Added searching multiple levels deep in ``DictField`` - Added ``DictField`` entries containing strings to use matching operators diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index f5020ab8..17a1b0da 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -774,7 +774,8 @@ class QuerySet(object): :param map_f: map function, as :class:`~pymongo.code.Code` or string :param reduce_f: reduce function, as :class:`~pymongo.code.Code` or string - :param output: output collection name + :param output: output collection name, if set to 'inline' will try to + use :class:`~pymongo.collection.Collection.inline_map_reduce` :param finalize_f: finalize function, an optional function that performs any post-reduction processing. :param scope: values to insert into map/reduce global scope. Optional. @@ -824,8 +825,17 @@ class QuerySet(object): if limit: mr_args['limit'] = limit - results = self._collection.map_reduce(map_f, reduce_f, output, **mr_args) - results = results.find() + + if output == 'inline' or (not keep_temp and not self._ordering): + map_reduce_function = 'inline_map_reduce' + else: + map_reduce_function = 'map_reduce' + mr_args['out'] = output + + results = getattr(self._collection, map_reduce_function)(map_f, reduce_f, **mr_args) + + if map_reduce_function == 'map_reduce': + results = results.find() if self._ordering: results = results.sort(self._ordering) @@ -1266,7 +1276,7 @@ class QuerySet(object): """ return self.exec_js(average_func, field) - def item_frequencies(self, field, normalize=False): + def item_frequencies(self, field, normalize=False, map_reduce=False): """Returns a dictionary of all items present in a field across the whole queried set of documents, and their corresponding frequency. This is useful for generating tag clouds, or searching documents. @@ -1276,7 +1286,52 @@ class QuerySet(object): :param field: the field to use :param normalize: normalize the results so they add to 1.0 + :param map_reduce: Use map_reduce over exec_js """ + if map_reduce: + return self._item_frequencies_map_reduce(field, normalize=normalize) + return self._item_frequencies_exec_js(field, normalize=normalize) + + def _item_frequencies_map_reduce(self, field, normalize=False): + map_func = """ + function() { + if (this[~%(field)s].constructor == Array) { + this[~%(field)s].forEach(function(item) { + emit(item, 1); + }); + } else { + emit(this[~%(field)s], 1); + } + } + """ % dict(field=field) + reduce_func = """ + function(key, values) { + var total = 0; + var valuesSize = values.length; + for (var i=0; i < valuesSize; i++) { + total += parseInt(values[i], 10); + } + return total; + } + """ + values = self.map_reduce(map_func, reduce_func, 'inline', keep_temp=False) + frequencies = {} + for f in values: + key = f.key + if isinstance(key, float): + if int(key) == key: + key = int(key) + key = str(key) + frequencies[key] = f.value + + if normalize: + count = sum(frequencies.values()) + frequencies = dict([(k, v/count) for k,v in frequencies.items()]) + + return frequencies + + def _item_frequencies_exec_js(self, field, normalize=False): + """Uses exec_js to execute""" freq_func = """ function(field) { if (options.normalize) { diff --git a/setup.py b/setup.py index e0585b7c..0c19d8d0 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ def get_version(version_tuple): version = '%s.%s' % (version, version_tuple[2]) return version -# Dirty hack to get version number from monogengine/__init__.py - we can't +# Dirty hack to get version number from monogengine/__init__.py - we can't # import it as it depends on PyMongo and PyMongo isn't installed until this # file is read init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') diff --git a/tests/queryset.py b/tests/queryset.py index d5611fdf..1f03fbd9 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1466,35 +1466,54 @@ class QuerySetTest(unittest.TestCase): BlogPost(hits=2, tags=['music', 'watch']).save() BlogPost(hits=2, tags=['music', 'actors']).save() - f = BlogPost.objects.item_frequencies('tags') - f = dict((key, int(val)) for key, val in f.items()) - self.assertEqual(set(['music', 'film', 'actors', 'watch']), set(f.keys())) - self.assertEqual(f['music'], 3) - self.assertEqual(f['actors'], 2) - self.assertEqual(f['watch'], 2) - self.assertEqual(f['film'], 1) + def test_assertions(f): + f = dict((key, int(val)) for key, val in f.items()) + self.assertEqual(set(['music', 'film', 'actors', 'watch']), set(f.keys())) + self.assertEqual(f['music'], 3) + self.assertEqual(f['actors'], 2) + self.assertEqual(f['watch'], 2) + self.assertEqual(f['film'], 1) + + exec_js = BlogPost.objects.item_frequencies('tags') + map_reduce = BlogPost.objects.item_frequencies('tags', map_reduce=True) + test_assertions(exec_js) + test_assertions(map_reduce) # Ensure query is taken into account - f = BlogPost.objects(hits__gt=1).item_frequencies('tags') - f = dict((key, int(val)) for key, val in f.items()) - self.assertEqual(set(['music', 'actors', 'watch']), set(f.keys())) - self.assertEqual(f['music'], 2) - self.assertEqual(f['actors'], 1) - self.assertEqual(f['watch'], 1) + def test_assertions(f): + f = dict((key, int(val)) for key, val in f.items()) + self.assertEqual(set(['music', 'actors', 'watch']), set(f.keys())) + self.assertEqual(f['music'], 2) + self.assertEqual(f['actors'], 1) + self.assertEqual(f['watch'], 1) + + exec_js = BlogPost.objects(hits__gt=1).item_frequencies('tags') + map_reduce = BlogPost.objects(hits__gt=1).item_frequencies('tags', map_reduce=True) + test_assertions(exec_js) + test_assertions(map_reduce) # Check that normalization works - f = BlogPost.objects.item_frequencies('tags', normalize=True) - self.assertAlmostEqual(f['music'], 3.0/8.0) - self.assertAlmostEqual(f['actors'], 2.0/8.0) - self.assertAlmostEqual(f['watch'], 2.0/8.0) - self.assertAlmostEqual(f['film'], 1.0/8.0) + def test_assertions(f): + self.assertAlmostEqual(f['music'], 3.0/8.0) + self.assertAlmostEqual(f['actors'], 2.0/8.0) + self.assertAlmostEqual(f['watch'], 2.0/8.0) + self.assertAlmostEqual(f['film'], 1.0/8.0) + + exec_js = BlogPost.objects.item_frequencies('tags', normalize=True) + map_reduce = BlogPost.objects.item_frequencies('tags', normalize=True, map_reduce=True) + test_assertions(exec_js) + test_assertions(map_reduce) # Check item_frequencies works for non-list fields - f = BlogPost.objects.item_frequencies('hits') - f = dict((key, int(val)) for key, val in f.items()) - self.assertEqual(set(['1', '2']), set(f.keys())) - self.assertEqual(f['1'], 1) - self.assertEqual(f['2'], 2) + def test_assertions(f): + self.assertEqual(set(['1', '2']), set(f.keys())) + self.assertEqual(f['1'], 1) + self.assertEqual(f['2'], 2) + + exec_js = BlogPost.objects.item_frequencies('hits') + map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True) + test_assertions(exec_js) + test_assertions(map_reduce) BlogPost.drop_collection() From 6f5bd7b0b90eb33760896ba907634013d404b4c8 Mon Sep 17 00:00:00 2001 From: Colin Howe Date: Thu, 26 May 2011 18:54:52 +0100 Subject: [PATCH 090/214] Test needs a connection... --- tests/queryset.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/queryset.py b/tests/queryset.py index 1f03fbd9..081ffb32 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -2101,6 +2101,9 @@ class QuerySetTest(unittest.TestCase): class QTest(unittest.TestCase): + def setUp(self): + connect(db='mongoenginetest') + def test_empty_q(self): """Ensure that empty Q objects won't hurt. """ From 1fa47206aa817dc4556e703de9121d69fb8b064c Mon Sep 17 00:00:00 2001 From: Colin Howe Date: Thu, 26 May 2011 19:39:41 +0100 Subject: [PATCH 091/214] Support for sparse indexes and omitting types from indexes --- mongoengine/queryset.py | 31 ++++++++++++++++++++++--------- tests/document.py | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 9 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 17a1b0da..68afefca 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -382,15 +382,17 @@ class QuerySet(object): return self @classmethod - def _build_index_spec(cls, doc_cls, key_or_list): + def _build_index_spec(cls, doc_cls, spec): """Build a PyMongo index spec from a MongoEngine index spec. """ - if isinstance(key_or_list, basestring): - key_or_list = [key_or_list] + if isinstance(spec, basestring): + spec = {'fields': [spec]} + if isinstance(spec, (list, tuple)): + spec = {'fields': spec} index_list = [] use_types = doc_cls._meta.get('allow_inheritance', True) - for key in key_or_list: + for key in spec['fields']: # Get direction from + or - direction = pymongo.ASCENDING if key.startswith("-"): @@ -411,10 +413,18 @@ class QuerySet(object): use_types = False # If _types is being used, prepend it to every specified index - if doc_cls._meta.get('allow_inheritance') and use_types: + if (spec.get('types', True) and doc_cls._meta.get('allow_inheritance') + and use_types): index_list.insert(0, ('_types', 1)) - return index_list + spec['fields'] = index_list + + if spec.get('sparse', False) and len(spec['fields']) > 1: + raise ValueError( + 'Sparse indexes can only have one field in them. ' + 'See https://jira.mongodb.org/browse/SERVER-2193') + + return spec def __call__(self, q_obj=None, class_check=True, **query): """Filter the selected documents by calling the @@ -465,9 +475,12 @@ class QuerySet(object): # Ensure document-defined indexes are created if self._document._meta['indexes']: - for key_or_list in self._document._meta['indexes']: - self._collection.ensure_index(key_or_list, - background=background, **index_opts) + for spec in self._document._meta['indexes']: + opts = index_opts.copy() + opts['unique'] = spec.get('unique', False) + opts['sparse'] = spec.get('sparse', False) + self._collection.ensure_index(spec['fields'], + background=background, **opts) # If _types is being used (for polymorphism), it needs an index if '_types' in self._query: diff --git a/tests/document.py b/tests/document.py index fe67312e..a8120469 100644 --- a/tests/document.py +++ b/tests/document.py @@ -377,6 +377,40 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() + + def test_dictionary_indexes(self): + """Ensure that indexes are used when meta[indexes] contains dictionaries + instead of lists. + """ + class BlogPost(Document): + date = DateTimeField(db_field='addDate', default=datetime.now) + category = StringField() + tags = ListField(StringField()) + meta = { + 'indexes': [ + { 'fields': ['-date'], 'unique': True, + 'sparse': True, 'types': False }, + ], + } + + BlogPost.drop_collection() + + info = BlogPost.objects._collection.index_information() + # _id, '-date' + self.assertEqual(len(info), 3) + + # Indexes are lazy so use list() to perform query + list(BlogPost.objects) + info = BlogPost.objects._collection.index_information() + info = [(value['key'], + value.get('unique', False), + value.get('sparse', False)) + for key, value in info.iteritems()] + self.assertTrue(([('addDate', -1)], True, True) in info) + + BlogPost.drop_collection() + + def test_unique(self): """Ensure that uniqueness constraints are applied to fields. """ From 5d778648e697651ca681d5347051e6071cfe8487 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 27 May 2011 11:33:40 +0100 Subject: [PATCH 092/214] Inital tests for dereferencing improvements --- mongoengine/base.py | 1 + mongoengine/fields.py | 215 +++++++++++++++++++++++-------- mongoengine/tests.py | 58 +++++++++ tests/dereference.py | 288 ++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 512 insertions(+), 50 deletions(-) create mode 100644 mongoengine/tests.py create mode 100644 tests/dereference.py diff --git a/mongoengine/base.py b/mongoengine/base.py index ffceb794..4e3154fd 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -126,6 +126,7 @@ class BaseField(object): self.validate(value) + class ObjectIdField(BaseField): """An field wrapper around MongoDB's ObjectIds. """ diff --git a/mongoengine/fields.py b/mongoengine/fields.py index b2aab5a4..c21829c9 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -337,33 +337,54 @@ class ListField(BaseField): # Document class being used rather than a document object return self - if isinstance(self.field, ReferenceField): - referenced_type = self.field.document_type - # Get value from document instance if available - value_list = instance._data.get(self.name) - if value_list: - deref_list = [] - for value in value_list: - # Dereference DBRefs - if isinstance(value, (pymongo.dbref.DBRef)): - value = _get_db().dereference(value) - deref_list.append(referenced_type._from_son(value)) - else: - deref_list.append(value) - instance._data[self.name] = deref_list + # Get value from document instance if available + value_list = instance._data.get(self.name) + if isinstance(self.field, ReferenceField) and value_list: + db = _get_db() + value_list = [(k,v) for k,v in enumerate(value_list)] + deref_list = [] + collections = {} - if isinstance(self.field, GenericReferenceField): - value_list = instance._data.get(self.name) - if value_list: - deref_list = [] - for value in value_list: - # Dereference DBRefs - if isinstance(value, (dict, pymongo.son.SON)): - deref_list.append(self.field.dereference(value)) - else: - deref_list.append(value) - instance._data[self.name] = deref_list + for k, v in value_list: + deref_list.append(v) + # Save any DBRefs + if isinstance(v, (pymongo.dbref.DBRef)): + collections.setdefault(v.collection, []).append((k, v)) + # For each collection get the references + for collection, dbrefs in collections.items(): + id_map = dict([(v.id, k) for k, v in dbrefs]) + references = db[collection].find({'_id': {'$in': id_map.keys()}}) + for ref in references: + key = id_map[ref['_id']] + deref_list[key] = get_document(ref['_cls'])._from_son(ref) + instance._data[self.name] = deref_list + + # Get value from document instance if available + if isinstance(self.field, GenericReferenceField) and value_list: + + db = _get_db() + value_list = [(k,v) for k,v in enumerate(value_list)] + deref_list = [] + classes = {} + + for k, v in value_list: + deref_list.append(v) + # Save any DBRefs + if isinstance(v, (dict, pymongo.son.SON)): + classes.setdefault(v['_cls'], []).append((k, v)) + + # For each collection get the references + for doc_cls, dbrefs in classes.items(): + id_map = dict([(v['_ref'].id, k) for k, v in dbrefs]) + doc_cls = get_document(doc_cls) + collection = doc_cls._meta['collection'] + references = db[collection].find({'_id': {'$in': id_map.keys()}}) + + for ref in references: + key = id_map[ref['_id']] + deref_list[key] = doc_cls._from_son(ref) + instance._data[self.name] = deref_list return super(ListField, self).__get__(instance, owner) def to_python(self, value): @@ -501,32 +522,53 @@ class MapField(BaseField): # Document class being used rather than a document object return self - if isinstance(self.field, ReferenceField): - referenced_type = self.field.document_type - # Get value from document instance if available - value_dict = instance._data.get(self.name) - if value_dict: - deref_dict = [] - for key,value in value_dict.iteritems(): - # Dereference DBRefs - if isinstance(value, (pymongo.dbref.DBRef)): - value = _get_db().dereference(value) - deref_dict[key] = referenced_type._from_son(value) - else: - deref_dict[key] = value - instance._data[self.name] = deref_dict + # Get value from document instance if available + value_list = instance._data.get(self.name) + if isinstance(self.field, ReferenceField) and value_list: + db = _get_db() + deref_dict = {} + collections = {} - if isinstance(self.field, GenericReferenceField): - value_dict = instance._data.get(self.name) - if value_dict: - deref_dict = [] - for key,value in value_dict.iteritems(): - # Dereference DBRefs - if isinstance(value, (dict, pymongo.son.SON)): - deref_dict[key] = self.field.dereference(value) - else: - deref_dict[key] = value - instance._data[self.name] = deref_dict + for k, v in value_list.items(): + deref_dict[k] = v + # Save any DBRefs + if isinstance(v, (pymongo.dbref.DBRef)): + collections.setdefault(v.collection, []).append((k, v)) + + # For each collection get the references + for collection, dbrefs in collections.items(): + id_map = dict([(v.id, k) for k, v in dbrefs]) + references = db[collection].find({'_id': {'$in': id_map.keys()}}) + for ref in references: + key = id_map[ref['_id']] + deref_dict[key] = get_document(ref['_cls'])._from_son(ref) + instance._data[self.name] = deref_dict + + # Get value from document instance if available + if isinstance(self.field, GenericReferenceField) and value_list: + + db = _get_db() + value_list = [(k,v) for k,v in value_list.items()] + deref_dict = {} + classes = {} + + for k, v in value_list: + deref_dict[k] = v + # Save any DBRefs + if isinstance(v, (dict, pymongo.son.SON)): + classes.setdefault(v['_cls'], []).append((k, v)) + + # For each collection get the references + for doc_cls, dbrefs in classes.items(): + id_map = dict([(v['_ref'].id, k) for k, v in dbrefs]) + doc_cls = get_document(doc_cls) + collection = doc_cls._meta['collection'] + references = db[collection].find({'_id': {'$in': id_map.keys()}}) + + for ref in references: + key = id_map[ref['_id']] + deref_dict[key] = doc_cls._from_son(ref) + instance._data[self.name] = deref_dict return super(MapField, self).__get__(instance, owner) @@ -869,3 +911,76 @@ class GeoPointField(BaseField): if (not isinstance(value[0], (float, int)) and not isinstance(value[1], (float, int))): raise ValidationError('Both values in point must be float or int.') + + + +class DereferenceMixin(object): + """ WORK IN PROGRESS""" + + def __get__(self, instance, owner): + """Descriptor to automatically dereference references. + """ + if instance is None: + # Document class being used rather than a document object + return self + + # Get value from document instance if available + value_list = instance._data.get(self.name) + if not value_list: + return super(MapField, self).__get__(instance, owner) + + is_dict = True + if not hasattr(value_list, 'items'): + is_dict = False + value_list = dict([(k,v) for k,v in enumerate(value_list)]) + + if isinstance(self.field, ReferenceField) and value_list: + db = _get_db() + dbref = {} + if not is_dict: + dbref = [] + collections = {} + + for k, v in value_list.items(): + dbref[k] = v + # Save any DBRefs + if isinstance(v, (pymongo.dbref.DBRef)): + collections.setdefault(v.collection, []).append((k, v)) + + # For each collection get the references + for collection, dbrefs in collections.items(): + id_map = dict([(v.id, k) for k, v in dbrefs]) + references = db[collection].find({'_id': {'$in': id_map.keys()}}) + for ref in references: + key = id_map[ref['_id']] + dbref[key] = get_document(ref['_cls'])._from_son(ref) + + instance._data[self.name] = dbref + + # Get value from document instance if available + if isinstance(self.field, GenericReferenceField) and value_list: + + db = _get_db() + value_list = [(k,v) for k,v in value_list.items()] + dbref = {} + classes = {} + + for k, v in value_list: + dbref[k] = v + # Save any DBRefs + if isinstance(v, (dict, pymongo.son.SON)): + classes.setdefault(v['_cls'], []).append((k, v)) + + # For each collection get the references + for doc_cls, dbrefs in classes.items(): + id_map = dict([(v['_ref'].id, k) for k, v in dbrefs]) + doc_cls = get_document(doc_cls) + collection = doc_cls._meta['collection'] + references = db[collection].find({'_id': {'$in': id_map.keys()}}) + + for ref in references: + key = id_map[ref['_id']] + dbref[key] = doc_cls._from_son(ref) + instance._data[self.name] = dbref + + return super(DereferenceField, self).__get__(instance, owner) \ No newline at end of file diff --git a/mongoengine/tests.py b/mongoengine/tests.py new file mode 100644 index 00000000..4932bc2c --- /dev/null +++ b/mongoengine/tests.py @@ -0,0 +1,58 @@ +from mongoengine.connection import _get_db + +class query_counter(object): + """ Query_counter contextmanager to get the number of queries. """ + + def __init__(self): + """ Construct the query_counter. """ + self.counter = 0 + self.db = _get_db() + + def __enter__(self): + """ On every with block we need to drop the profile collection. """ + self.db.set_profiling_level(0) + self.db.system.profile.drop() + self.db.set_profiling_level(2) + return self + + def __exit__(self, t, value, traceback): + """ Reset the profiling level. """ + self.db.set_profiling_level(0) + + def __eq__(self, value): + """ == Compare querycounter. """ + return value == self._get_count() + + def __ne__(self, value): + """ != Compare querycounter. """ + return not self.__eq__(value) + + def __lt__(self, value): + """ < Compare querycounter. """ + return self._get_count() < value + + def __le__(self, value): + """ <= Compare querycounter. """ + return self._get_count() <= value + + def __gt__(self, value): + """ > Compare querycounter. """ + return self._get_count() > value + + def __ge__(self, value): + """ >= Compare querycounter. """ + return self._get_count() >= value + + def __int__(self): + """ int representation. """ + return self._get_count() + + def __repr__(self): + """ repr query_counter as the number of queries. """ + return u"%s" % self._get_count() + + def _get_count(self): + """ Get the number of queries. """ + count = self.db.system.profile.find().count() - self.counter + self.counter += 1 + return count diff --git a/tests/dereference.py b/tests/dereference.py new file mode 100644 index 00000000..2764ee72 --- /dev/null +++ b/tests/dereference.py @@ -0,0 +1,288 @@ +import unittest + +from mongoengine import * +from mongoengine.connection import _get_db +from mongoengine.tests import query_counter + + +class FieldTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + self.db = _get_db() + + def ztest_list_item_dereference(self): + """Ensure that DBRef items in ListFields are dereferenced. + """ + class User(Document): + name = StringField() + + class Group(Document): + members = ListField(ReferenceField(User)) + + User.drop_collection() + Group.drop_collection() + + for i in xrange(1, 51): + user = User(name='user %s' % i) + user.save() + + group = Group(members=User.objects) + group.save() + + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first() + self.assertEqual(q, 1) + + [m for m in group_obj.members] + self.assertEqual(q, 2) + + User.drop_collection() + Group.drop_collection() + + def ztest_recursive_reference(self): + """Ensure that ReferenceFields can reference their own documents. + """ + class Employee(Document): + name = StringField() + boss = ReferenceField('self') + friends = ListField(ReferenceField('self')) + + bill = Employee(name='Bill Lumbergh') + bill.save() + + michael = Employee(name='Michael Bolton') + michael.save() + + samir = Employee(name='Samir Nagheenanajar') + samir.save() + + friends = [michael, samir] + peter = Employee(name='Peter Gibbons', boss=bill, friends=friends) + peter.save() + + with query_counter() as q: + self.assertEqual(q, 0) + + peter = Employee.objects.with_id(peter.id) + self.assertEqual(q, 1) + + peter.boss + self.assertEqual(q, 2) + + peter.friends + self.assertEqual(q, 3) + + def ztest_generic_reference(self): + + class UserA(Document): + name = StringField() + + class UserB(Document): + name = StringField() + + class UserC(Document): + name = StringField() + + class Group(Document): + members = ListField(GenericReferenceField()) + + UserA.drop_collection() + UserB.drop_collection() + UserC.drop_collection() + Group.drop_collection() + + members = [] + for i in xrange(1, 51): + a = UserA(name='User A %s' % i) + a.save() + + b = UserB(name='User B %s' % i) + b.save() + + c = UserC(name='User C %s' % i) + c.save() + + members += [a, b, c] + + group = Group(members=members) + group.save() + + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first() + self.assertEqual(q, 1) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + UserA.drop_collection() + UserB.drop_collection() + UserC.drop_collection() + Group.drop_collection() + + def test_map_field_reference(self): + + class User(Document): + name = StringField() + + class Group(Document): + members = MapField(ReferenceField(User)) + + User.drop_collection() + Group.drop_collection() + + members = [] + for i in xrange(1, 51): + user = User(name='user %s' % i) + user.save() + members.append(user) + + group = Group(members=dict([(str(u.id), u) for u in members])) + group.save() + + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first() + self.assertEqual(q, 1) + + [m for m in group_obj.members] + self.assertEqual(q, 2) + + User.drop_collection() + Group.drop_collection() + + def ztest_generic_reference_dict_field(self): + + class UserA(Document): + name = StringField() + + class UserB(Document): + name = StringField() + + class UserC(Document): + name = StringField() + + class Group(Document): + members = DictField() + + UserA.drop_collection() + UserB.drop_collection() + UserC.drop_collection() + Group.drop_collection() + + members = [] + for i in xrange(1, 51): + a = UserA(name='User A %s' % i) + a.save() + + b = UserB(name='User B %s' % i) + b.save() + + c = UserC(name='User C %s' % i) + c.save() + + members += [a, b, c] + + group = Group(members=dict([(str(u.id), u) for u in members])) + group.save() + + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first() + self.assertEqual(q, 1) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + group.members = {} + group.save() + + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first() + self.assertEqual(q, 1) + + [m for m in group_obj.members] + self.assertEqual(q, 1) + + UserA.drop_collection() + UserB.drop_collection() + UserC.drop_collection() + Group.drop_collection() + + def test_generic_reference_map_field(self): + + class UserA(Document): + name = StringField() + + class UserB(Document): + name = StringField() + + class UserC(Document): + name = StringField() + + class Group(Document): + members = MapField(GenericReferenceField()) + + UserA.drop_collection() + UserB.drop_collection() + UserC.drop_collection() + Group.drop_collection() + + members = [] + for i in xrange(1, 51): + a = UserA(name='User A %s' % i) + a.save() + + b = UserB(name='User B %s' % i) + b.save() + + c = UserC(name='User C %s' % i) + c.save() + + members += [a, b, c] + + group = Group(members=dict([(str(u.id), u) for u in members])) + group.save() + + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first() + self.assertEqual(q, 1) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + group.members = {} + group.save() + + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first() + self.assertEqual(q, 1) + + [m for m in group_obj.members] + self.assertEqual(q, 1) + + UserA.drop_collection() + UserB.drop_collection() + UserC.drop_collection() + Group.drop_collection() \ No newline at end of file From 40df08c74c44546fd04f23f1cba4da0f5f162d0e Mon Sep 17 00:00:00 2001 From: Colin Howe Date: Sun, 29 May 2011 13:33:00 +0100 Subject: [PATCH 093/214] Fix QuerySet.ensure_index for new index specs --- mongoengine/queryset.py | 10 +++++++--- tests/queryset.py | 16 ++++++++++++++++ 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 68afefca..2de15ed4 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -376,9 +376,13 @@ class QuerySet(object): construct a multi-field index); keys may be prefixed with a **+** or a **-** to determine the index ordering """ - index_list = QuerySet._build_index_spec(self._document, key_or_list) - self._collection.ensure_index(index_list, drop_dups=drop_dups, - background=background) + index_spec = QuerySet._build_index_spec(self._document, key_or_list) + self._collection.ensure_index( + index_spec['fields'], + drop_dups=drop_dups, + background=background, + sparse=index_spec.get('sparse', False), + unique=index_spec.get('unique', False)) return self @classmethod diff --git a/tests/queryset.py b/tests/queryset.py index 081ffb32..8d046902 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -2099,6 +2099,22 @@ class QuerySetTest(unittest.TestCase): Number.drop_collection() + def test_ensure_index(self): + """Ensure that manual creation of indexes works. + """ + class Comment(Document): + message = StringField() + + Comment.objects.ensure_index('message') + + info = Comment.objects._collection.index_information() + info = [(value['key'], + value.get('unique', False), + value.get('sparse', False)) + for key, value in info.iteritems()] + self.assertTrue(([('_types', 1), ('message', 1)], False, False) in info) + + class QTest(unittest.TestCase): def setUp(self): From 9a2cf206b22f7e9697b5e2d7ea47d37230f68206 Mon Sep 17 00:00:00 2001 From: Colin Howe Date: Sun, 29 May 2011 13:38:54 +0100 Subject: [PATCH 094/214] Documentation for new-style indices --- docs/guide/defining-documents.rst | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index e333674e..a524520c 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -341,9 +341,10 @@ Indexes You can specify indexes on collections to make querying faster. This is done by creating a list of index specifications called :attr:`indexes` in the :attr:`~mongoengine.Document.meta` dictionary, where an index specification may -either be a single field name, or a tuple containing multiple field names. A -direction may be specified on fields by prefixing the field name with a **+** -or a **-** sign. Note that direction only matters on multi-field indexes. :: +either be a single field name, a tuple containing multiple field names, or a +dictionary containing a full index definition. A direction may be specified on +fields by prefixing the field name with a **+** or a **-** sign. Note that +direction only matters on multi-field indexes. :: class Page(Document): title = StringField() @@ -352,6 +353,21 @@ or a **-** sign. Note that direction only matters on multi-field indexes. :: 'indexes': ['title', ('title', '-rating')] } +If a dictionary is passed then the following options are available: + +:attr:`fields` (Default: None) + The fields to index. Specified in the same format as described above. + +:attr:`types` (Default: True) + Whether the index should have the :attr:`_types` field added automatically + to the start of the index. + +:attr:`sparse` (Default: False) + Whether the index should be sparse. + +:attr:`unique` (Default: False) + Whether the index should be sparse. + .. note:: Geospatial indexes will be automatically created for all :class:`~mongoengine.GeoPointField`\ s From ec7effa0ef8c3a71d1f8dd0695639f60763b9858 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 6 Jun 2011 11:04:06 +0100 Subject: [PATCH 095/214] Added DereferenceBaseField class Handles the lazy dereferencing of all items in a list / dict. Improves query efficiency by an order of magnitude. --- mongoengine/base.py | 82 ++++++++++++++++ mongoengine/fields.py | 223 ++++-------------------------------------- tests/dereference.py | 6 +- 3 files changed, 104 insertions(+), 207 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 4e3154fd..ce61547e 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -5,6 +5,7 @@ from queryset import DO_NOTHING import sys import pymongo import pymongo.objectid +from operator import itemgetter class NotRegistered(Exception): @@ -127,6 +128,87 @@ class BaseField(object): self.validate(value) +class DereferenceBaseField(BaseField): + """Handles the lazy dereferencing of a queryset. Will dereference all + items in a list / dict rather than one at a time. + """ + + def __get__(self, instance, owner): + """Descriptor to automatically dereference references. + """ + from fields import ReferenceField, GenericReferenceField + from connection import _get_db + + if instance is None: + # Document class being used rather than a document object + return self + + # Get value from document instance if available + value_list = instance._data.get(self.name) + if not value_list: + return super(DereferenceBaseField, self).__get__(instance, owner) + + is_list = False + if not hasattr(value_list, 'items'): + is_list = True + value_list = dict([(k,v) for k,v in enumerate(value_list)]) + + if isinstance(self.field, ReferenceField) and value_list: + db = _get_db() + dbref = {} + collections = {} + + for k, v in value_list.items(): + dbref[k] = v + # Save any DBRefs + if isinstance(v, (pymongo.dbref.DBRef)): + collections.setdefault(v.collection, []).append((k, v)) + + # For each collection get the references + for collection, dbrefs in collections.items(): + id_map = dict([(v.id, k) for k, v in dbrefs]) + references = db[collection].find({'_id': {'$in': id_map.keys()}}) + for ref in references: + key = id_map[ref['_id']] + dbref[key] = get_document(ref['_cls'])._from_son(ref) + + if is_list: + dbref = [v for k,v in sorted(dbref.items(), key=itemgetter(0))] + instance._data[self.name] = dbref + + # Get value from document instance if available + if isinstance(self.field, GenericReferenceField) and value_list: + db = _get_db() + value_list = [(k,v) for k,v in value_list.items()] + dbref = {} + classes = {} + + for k, v in value_list: + dbref[k] = v + # Save any DBRefs + if isinstance(v, (dict, pymongo.son.SON)): + classes.setdefault(v['_cls'], []).append((k, v)) + + # For each collection get the references + for doc_cls, dbrefs in classes.items(): + id_map = dict([(v['_ref'].id, k) for k, v in dbrefs]) + doc_cls = get_document(doc_cls) + collection = doc_cls._meta['collection'] + references = db[collection].find({'_id': {'$in': id_map.keys()}}) + + for ref in references: + key = id_map[ref['_id']] + dbref[key] = doc_cls._from_son(ref) + + if is_list: + dbref = [v for k,v in sorted(dbref.items(), key=itemgetter(0))] + + instance._data[self.name] = dbref + + return super(DereferenceBaseField, self).__get__(instance, owner) + + + class ObjectIdField(BaseField): """An field wrapper around MongoDB's ObjectIds. """ diff --git a/mongoengine/fields.py b/mongoengine/fields.py index c21829c9..dc03fc05 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1,4 +1,5 @@ -from base import BaseField, ObjectIdField, ValidationError, get_document +from base import (BaseField, DereferenceBaseField, ObjectIdField, + ValidationError, get_document) from queryset import DO_NOTHING from document import Document, EmbeddedDocument from connection import _get_db @@ -12,7 +13,6 @@ import pymongo.binary import datetime, time import decimal import gridfs -import warnings __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', @@ -118,8 +118,8 @@ class EmailField(StringField): EMAIL_REGEX = re.compile( r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom - r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string - r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE # domain + r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string + r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE # domain ) def validate(self, value): @@ -153,6 +153,7 @@ class IntField(BaseField): def prepare_query_value(self, op, value): return int(value) + class FloatField(BaseField): """An floating point number field. """ @@ -178,6 +179,7 @@ class FloatField(BaseField): def prepare_query_value(self, op, value): return float(value) + class DecimalField(BaseField): """A fixed-point decimal number field. @@ -252,21 +254,21 @@ class DateTimeField(BaseField): else: usecs = 0 kwargs = {'microsecond': usecs} - try: # Seconds are optional, so try converting seconds first. + try: # Seconds are optional, so try converting seconds first. return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M:%S')[:6], **kwargs) - except ValueError: - try: # Try without seconds. + try: # Try without seconds. return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M')[:5], **kwargs) - except ValueError: # Try without hour/minutes/seconds. + except ValueError: # Try without hour/minutes/seconds. try: return datetime.datetime(*time.strptime(value, '%Y-%m-%d')[:3], **kwargs) except ValueError: return None + class EmbeddedDocumentField(BaseField): """An embedded document field. Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`. @@ -314,7 +316,7 @@ class EmbeddedDocumentField(BaseField): return self.to_mongo(value) -class ListField(BaseField): +class ListField(DereferenceBaseField): """A list field that wraps a standard field, allowing multiple instances of the field to be used as a list in the database. """ @@ -330,63 +332,6 @@ class ListField(BaseField): kwargs.setdefault('default', lambda: []) super(ListField, self).__init__(**kwargs) - def __get__(self, instance, owner): - """Descriptor to automatically dereference references. - """ - if instance is None: - # Document class being used rather than a document object - return self - - # Get value from document instance if available - value_list = instance._data.get(self.name) - if isinstance(self.field, ReferenceField) and value_list: - db = _get_db() - value_list = [(k,v) for k,v in enumerate(value_list)] - deref_list = [] - collections = {} - - for k, v in value_list: - deref_list.append(v) - # Save any DBRefs - if isinstance(v, (pymongo.dbref.DBRef)): - collections.setdefault(v.collection, []).append((k, v)) - - # For each collection get the references - for collection, dbrefs in collections.items(): - id_map = dict([(v.id, k) for k, v in dbrefs]) - references = db[collection].find({'_id': {'$in': id_map.keys()}}) - for ref in references: - key = id_map[ref['_id']] - deref_list[key] = get_document(ref['_cls'])._from_son(ref) - instance._data[self.name] = deref_list - - # Get value from document instance if available - if isinstance(self.field, GenericReferenceField) and value_list: - - db = _get_db() - value_list = [(k,v) for k,v in enumerate(value_list)] - deref_list = [] - classes = {} - - for k, v in value_list: - deref_list.append(v) - # Save any DBRefs - if isinstance(v, (dict, pymongo.son.SON)): - classes.setdefault(v['_cls'], []).append((k, v)) - - # For each collection get the references - for doc_cls, dbrefs in classes.items(): - id_map = dict([(v['_ref'].id, k) for k, v in dbrefs]) - doc_cls = get_document(doc_cls) - collection = doc_cls._meta['collection'] - references = db[collection].find({'_id': {'$in': id_map.keys()}}) - - for ref in references: - key = id_map[ref['_id']] - deref_list[key] = doc_cls._from_son(ref) - instance._data[self.name] = deref_list - return super(ListField, self).__get__(instance, owner) - def to_python(self, value): return [self.field.to_python(item) for item in value] @@ -480,10 +425,10 @@ class DictField(BaseField): if op in match_operators and isinstance(value, basestring): return StringField().prepare_query_value(op, value) - return super(DictField,self).prepare_query_value(op, value) + return super(DictField, self).prepare_query_value(op, value) -class MapField(BaseField): +class MapField(DereferenceBaseField): """A field that maps a name to a specified field type. Similar to a DictField, except the 'value' of each item must match the specified field type. @@ -515,68 +460,11 @@ class MapField(BaseField): except Exception, err: raise ValidationError('Invalid MapField item (%s)' % str(item)) - def __get__(self, instance, owner): - """Descriptor to automatically dereference references. - """ - if instance is None: - # Document class being used rather than a document object - return self - - # Get value from document instance if available - value_list = instance._data.get(self.name) - if isinstance(self.field, ReferenceField) and value_list: - db = _get_db() - deref_dict = {} - collections = {} - - for k, v in value_list.items(): - deref_dict[k] = v - # Save any DBRefs - if isinstance(v, (pymongo.dbref.DBRef)): - collections.setdefault(v.collection, []).append((k, v)) - - # For each collection get the references - for collection, dbrefs in collections.items(): - id_map = dict([(v.id, k) for k, v in dbrefs]) - references = db[collection].find({'_id': {'$in': id_map.keys()}}) - for ref in references: - key = id_map[ref['_id']] - deref_dict[key] = get_document(ref['_cls'])._from_son(ref) - instance._data[self.name] = deref_dict - - # Get value from document instance if available - if isinstance(self.field, GenericReferenceField) and value_list: - - db = _get_db() - value_list = [(k,v) for k,v in value_list.items()] - deref_dict = {} - classes = {} - - for k, v in value_list: - deref_dict[k] = v - # Save any DBRefs - if isinstance(v, (dict, pymongo.son.SON)): - classes.setdefault(v['_cls'], []).append((k, v)) - - # For each collection get the references - for doc_cls, dbrefs in classes.items(): - id_map = dict([(v['_ref'].id, k) for k, v in dbrefs]) - doc_cls = get_document(doc_cls) - collection = doc_cls._meta['collection'] - references = db[collection].find({'_id': {'$in': id_map.keys()}}) - - for ref in references: - key = id_map[ref['_id']] - deref_dict[key] = doc_cls._from_son(ref) - instance._data[self.name] = deref_dict - - return super(MapField, self).__get__(instance, owner) - def to_python(self, value): - return dict( [(key,self.field.to_python(item)) for key,item in value.iteritems()] ) + return dict([(key, self.field.to_python(item)) for key, item in value.iteritems()]) def to_mongo(self, value): - return dict( [(key,self.field.to_mongo(item)) for key,item in value.iteritems()] ) + return dict([(key, self.field.to_mongo(item)) for key, item in value.iteritems()]) def prepare_query_value(self, op, value): if op not in ('set', 'unset'): @@ -794,11 +682,11 @@ class GridFSProxy(object): self.newfile = self.fs.new_file(**kwargs) self.grid_id = self.newfile._id - def put(self, file, **kwargs): + def put(self, file_obj, **kwargs): if self.grid_id: raise GridFSError('This document already has a file. Either delete ' 'it or call replace to overwrite it') - self.grid_id = self.fs.put(file, **kwargs) + self.grid_id = self.fs.put(file_obj, **kwargs) def write(self, string): if self.grid_id: @@ -827,9 +715,9 @@ class GridFSProxy(object): self.grid_id = None self.gridout = None - def replace(self, file, **kwargs): + def replace(self, file_obj, **kwargs): self.delete() - self.put(file, **kwargs) + self.put(file_obj, **kwargs) def close(self): if self.newfile: @@ -911,76 +799,3 @@ class GeoPointField(BaseField): if (not isinstance(value[0], (float, int)) and not isinstance(value[1], (float, int))): raise ValidationError('Both values in point must be float or int.') - - - -class DereferenceMixin(object): - """ WORK IN PROGRESS""" - - def __get__(self, instance, owner): - """Descriptor to automatically dereference references. - """ - if instance is None: - # Document class being used rather than a document object - return self - - # Get value from document instance if available - value_list = instance._data.get(self.name) - if not value_list: - return super(MapField, self).__get__(instance, owner) - - is_dict = True - if not hasattr(value_list, 'items'): - is_dict = False - value_list = dict([(k,v) for k,v in enumerate(value_list)]) - - if isinstance(self.field, ReferenceField) and value_list: - db = _get_db() - dbref = {} - if not is_dict: - dbref = [] - collections = {} - - for k, v in value_list.items(): - dbref[k] = v - # Save any DBRefs - if isinstance(v, (pymongo.dbref.DBRef)): - collections.setdefault(v.collection, []).append((k, v)) - - # For each collection get the references - for collection, dbrefs in collections.items(): - id_map = dict([(v.id, k) for k, v in dbrefs]) - references = db[collection].find({'_id': {'$in': id_map.keys()}}) - for ref in references: - key = id_map[ref['_id']] - dbref[key] = get_document(ref['_cls'])._from_son(ref) - - instance._data[self.name] = dbref - - # Get value from document instance if available - if isinstance(self.field, GenericReferenceField) and value_list: - - db = _get_db() - value_list = [(k,v) for k,v in value_list.items()] - dbref = {} - classes = {} - - for k, v in value_list: - dbref[k] = v - # Save any DBRefs - if isinstance(v, (dict, pymongo.son.SON)): - classes.setdefault(v['_cls'], []).append((k, v)) - - # For each collection get the references - for doc_cls, dbrefs in classes.items(): - id_map = dict([(v['_ref'].id, k) for k, v in dbrefs]) - doc_cls = get_document(doc_cls) - collection = doc_cls._meta['collection'] - references = db[collection].find({'_id': {'$in': id_map.keys()}}) - - for ref in references: - key = id_map[ref['_id']] - dbref[key] = doc_cls._from_son(ref) - instance._data[self.name] = dbref - - return super(DereferenceField, self).__get__(instance, owner) \ No newline at end of file diff --git a/tests/dereference.py b/tests/dereference.py index 2764ee72..b6cee89e 100644 --- a/tests/dereference.py +++ b/tests/dereference.py @@ -11,7 +11,7 @@ class FieldTest(unittest.TestCase): connect(db='mongoenginetest') self.db = _get_db() - def ztest_list_item_dereference(self): + def test_list_item_dereference(self): """Ensure that DBRef items in ListFields are dereferenced. """ class User(Document): @@ -42,7 +42,7 @@ class FieldTest(unittest.TestCase): User.drop_collection() Group.drop_collection() - def ztest_recursive_reference(self): + def test_recursive_reference(self): """Ensure that ReferenceFields can reference their own documents. """ class Employee(Document): @@ -75,7 +75,7 @@ class FieldTest(unittest.TestCase): peter.friends self.assertEqual(q, 3) - def ztest_generic_reference(self): + def test_generic_reference(self): class UserA(Document): name = StringField() From 7312db5c252bf3c395357cba3b7254cdccd1c6c0 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 6 Jun 2011 11:07:27 +0100 Subject: [PATCH 096/214] Updated docs / authors. Thanks @jorgebastida for the awesome query_counter test context manager. --- AUTHORS | 1 + docs/changelog.rst | 2 ++ mongoengine/tests.py | 1 + 3 files changed, 4 insertions(+) diff --git a/AUTHORS b/AUTHORS index 93fe819e..aecdcaa9 100644 --- a/AUTHORS +++ b/AUTHORS @@ -3,3 +3,4 @@ Matt Dennewitz Deepak Thukral Florian Schlachter Steve Challis +Ross Lawley diff --git a/docs/changelog.rst b/docs/changelog.rst index 686b326f..58da0d94 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,8 @@ Changelog Changes in dev ============== +- Added query_counter context manager for tests +- Added DereferenceBaseField - for improved performance in field dereferencing - Added optional map_reduce method item_frequencies - Added inline_map_reduce option to map_reduce - Updated connection exception so it provides more info on the cause. diff --git a/mongoengine/tests.py b/mongoengine/tests.py index 4932bc2c..9584bc7c 100644 --- a/mongoengine/tests.py +++ b/mongoengine/tests.py @@ -1,5 +1,6 @@ from mongoengine.connection import _get_db + class query_counter(object): """ Query_counter contextmanager to get the number of queries. """ From 0e4507811611b80be6529f2376c5e3e9b4d5bdef Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 6 Jun 2011 11:34:43 +0100 Subject: [PATCH 097/214] Added Blinker signal support --- docs/changelog.rst | 1 + docs/guide/index.rst | 1 + mongoengine/__init__.py | 4 +++- mongoengine/signals.py | 3 +++ 4 files changed, 8 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 58da0d94..659bdb4e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added blinker signal support - Added query_counter context manager for tests - Added DereferenceBaseField - for improved performance in field dereferencing - Added optional map_reduce method item_frequencies diff --git a/docs/guide/index.rst b/docs/guide/index.rst index aac72469..d56e7479 100644 --- a/docs/guide/index.rst +++ b/docs/guide/index.rst @@ -11,3 +11,4 @@ User Guide document-instances querying gridfs + signals diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 6d18ffe7..de635f96 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -6,9 +6,11 @@ import connection from connection import * import queryset from queryset import * +import signals +from signals import * __all__ = (document.__all__ + fields.__all__ + connection.__all__ + - queryset.__all__) + queryset.__all__ + signals.__all__) __author__ = 'Harry Marr' diff --git a/mongoengine/signals.py b/mongoengine/signals.py index 4caa5530..0a697534 100644 --- a/mongoengine/signals.py +++ b/mongoengine/signals.py @@ -1,5 +1,8 @@ # -*- coding: utf-8 -*- +__all__ = ['pre_init', 'post_init', 'pre_save', 'post_save', + 'pre_delete', 'post_delete'] + signals_available = False try: from blinker import Namespace From 74b5043ef9441938c6668af6eb510adccc8e531a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 6 Jun 2011 11:39:58 +0100 Subject: [PATCH 098/214] Added signals documentation --- docs/guide/signals.rst | 49 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 docs/guide/signals.rst diff --git a/docs/guide/signals.rst b/docs/guide/signals.rst new file mode 100644 index 00000000..d80a421b --- /dev/null +++ b/docs/guide/signals.rst @@ -0,0 +1,49 @@ +.. _signals: + +Signals +======= + +.. versionadded:: 0.5 + +Signal support is provided by the excellent `blinker`_ library and +will gracefully fall back if it is not available. + + +The following document signals exist in MongoEngine and are pretty self explaintary: + + * `mongoengine.signals.pre_init` + * `mongoengine.signals.post_init` + * `mongoengine.signals.pre_save` + * `mongoengine.signals.post_save` + * `mongoengine.signals.pre_delete` + * `mongoengine.signals.post_delete` + +Example usage:: + + from mongoengine import * + from mongoengine import signals + + class Author(Document): + name = StringField() + + def __unicode__(self): + return self.name + + @classmethod + def pre_save(cls, instance, **kwargs): + logging.debug("Pre Save: %s" % instance.name) + + @classmethod + def post_save(cls, instance, **kwargs): + logging.debug("Post Save: %s" % instance.name) + if 'created' in kwargs: + if kwargs['created']: + logging.debug("Created") + else: + logging.debug("Updated") + + signals.pre_save.connect(Author.pre_save) + signals.post_save.connect(Author.post_save) + + +.. _blinker: http://pypi.python.org/pypi/blinker \ No newline at end of file From 56f00a64d77655bee2d00ebd783d07655a6900ff Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 6 Jun 2011 12:37:06 +0100 Subject: [PATCH 099/214] Added bulk insert method. Updated changelog and added tests / query_counter tests --- docs/changelog.rst | 1 + mongoengine/queryset.py | 42 ++++++++++++++++++++- tests/queryset.py | 83 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 125 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 659bdb4e..29ecdf7a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added insert method for bulk inserts - Added blinker signal support - Added query_counter context manager for tests - Added DereferenceBaseField - for improved performance in field dereferencing diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 2de15ed4..0e87db7a 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -378,7 +378,7 @@ class QuerySet(object): """ index_spec = QuerySet._build_index_spec(self._document, key_or_list) self._collection.ensure_index( - index_spec['fields'], + index_spec['fields'], drop_dups=drop_dups, background=background, sparse=index_spec.get('sparse', False), @@ -719,6 +719,46 @@ class QuerySet(object): result = None return result + def insert(self, doc_or_docs, load_bulk=True): + """bulk insert documents + + :param docs_or_doc: a document or list of documents to be inserted + :param load_bulk (optional): If True returns the list of document instances + + By default returns document instances, set ``load_bulk`` to False to + return just ``ObjectIds`` + + .. versionadded:: 0.5 + """ + from document import Document + + docs = doc_or_docs + return_one = False + if isinstance(docs, Document) or issubclass(docs.__class__, Document): + return_one = True + docs = [docs] + + raw = [] + for doc in docs: + if not isinstance(doc, self._document): + msg = "Some documents inserted aren't instances of %s" % str(self._document) + raise OperationError(msg) + if doc.pk: + msg = "Some documents have ObjectIds use doc.update() instead" + raise OperationError(msg) + raw.append(doc.to_mongo()) + + ids = self._collection.insert(raw) + + if not load_bulk: + return return_one and ids[0] or ids + + documents = self.in_bulk(ids) + results = [] + for obj_id in ids: + results.append(documents.get(obj_id)) + return return_one and results[0] or results + def with_id(self, object_id): """Retrieve the object matching the id provided. diff --git a/tests/queryset.py b/tests/queryset.py index 8d046902..0b64e3e9 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -9,6 +9,7 @@ from mongoengine.queryset import (QuerySet, QuerySetManager, MultipleObjectsReturned, DoesNotExist, QueryFieldList) from mongoengine import * +from mongoengine.tests import query_counter class QuerySetTest(unittest.TestCase): @@ -331,6 +332,88 @@ class QuerySetTest(unittest.TestCase): person = self.Person.objects.get(age=50) self.assertEqual(person.name, "User C") + def test_bulk_insert(self): + """Ensure that query by array position works. + """ + + class Comment(EmbeddedDocument): + name = StringField() + + class Post(EmbeddedDocument): + comments = ListField(EmbeddedDocumentField(Comment)) + + class Blog(Document): + title = StringField() + tags = ListField(StringField()) + posts = ListField(EmbeddedDocumentField(Post)) + + Blog.drop_collection() + + with query_counter() as q: + self.assertEqual(q, 0) + + comment1 = Comment(name='testa') + comment2 = Comment(name='testb') + post1 = Post(comments=[comment1, comment2]) + post2 = Post(comments=[comment2, comment2]) + + blogs = [] + for i in xrange(1, 100): + blogs.append(Blog(title="post %s" % i, posts=[post1, post2])) + + Blog.objects.insert(blogs, load_bulk=False) + self.assertEqual(q, 2) # 1 for the inital connection and 1 for the insert + + Blog.objects.insert(blogs) + self.assertEqual(q, 4) # 1 for insert, and 1 for in bulk + + Blog.drop_collection() + + comment1 = Comment(name='testa') + comment2 = Comment(name='testb') + post1 = Post(comments=[comment1, comment2]) + post2 = Post(comments=[comment2, comment2]) + blog1 = Blog(title="code", posts=[post1, post2]) + blog2 = Blog(title="mongodb", posts=[post2, post1]) + blog1, blog2 = Blog.objects.insert([blog1, blog2]) + self.assertEqual(blog1.title, "code") + self.assertEqual(blog2.title, "mongodb") + + self.assertEqual(Blog.objects.count(), 2) + + # test handles people trying to upsert + def throw_operation_error(): + blogs = Blog.objects + Blog.objects.insert(blogs) + + self.assertRaises(OperationError, throw_operation_error) + + # test handles other classes being inserted + def throw_operation_error_wrong_doc(): + class Author(Document): + pass + Blog.objects.insert(Author()) + + self.assertRaises(OperationError, throw_operation_error_wrong_doc) + + def throw_operation_error_not_a_document(): + Blog.objects.insert("HELLO WORLD") + + self.assertRaises(OperationError, throw_operation_error_not_a_document) + + Blog.drop_collection() + + blog1 = Blog(title="code", posts=[post1, post2]) + blog1 = Blog.objects.insert(blog1) + self.assertEqual(blog1.title, "code") + self.assertEqual(Blog.objects.count(), 1) + + Blog.drop_collection() + blog1 = Blog(title="code", posts=[post1, post2]) + obj_id = Blog.objects.insert(blog1, load_bulk=False) + self.assertEquals(obj_id.__class__.__name__, 'ObjectId') + + def test_repeated_iteration(self): """Ensure that QuerySet rewinds itself one iteration finishes. """ From 55e20bda12ea6ee7a39d6d5ebdf124bfb5cc4689 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 6 Jun 2011 14:35:46 +0100 Subject: [PATCH 100/214] Added slave_okay syntax to querysets. * slave_okay (optional): if True, allows this query to be run against a replica secondary. --- mongoengine/queryset.py | 7 ++++++- tests/queryset.py | 13 +++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 0e87db7a..7b4fef35 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -336,6 +336,7 @@ class QuerySet(object): self._snapshot = False self._timeout = True self._class_check = True + self._slave_okay = False # If inheritance is allowed, only return instances and instances of # subclasses of the class being used @@ -430,7 +431,7 @@ class QuerySet(object): return spec - def __call__(self, q_obj=None, class_check=True, **query): + def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query): """Filter the selected documents by calling the :class:`~mongoengine.queryset.QuerySet` with a query. @@ -440,6 +441,8 @@ class QuerySet(object): objects, only the last one will be used :param class_check: If set to False bypass class name check when querying collection + :param slave_okay: if True, allows this query to be run against a + replica secondary. :param query: Django-style query keyword arguments """ query = Q(**query) @@ -449,6 +452,7 @@ class QuerySet(object): self._mongo_query = None self._cursor_obj = None self._class_check = class_check + self._slave_okay = slave_okay return self def filter(self, *q_objs, **query): @@ -506,6 +510,7 @@ class QuerySet(object): cursor_args = { 'snapshot': self._snapshot, 'timeout': self._timeout, + 'slave_okay': self._slave_okay } if self._loaded_fields: cursor_args['fields'] = self._loaded_fields.as_dict() diff --git a/tests/queryset.py b/tests/queryset.py index 0b64e3e9..28d44861 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -413,6 +413,19 @@ class QuerySetTest(unittest.TestCase): obj_id = Blog.objects.insert(blog1, load_bulk=False) self.assertEquals(obj_id.__class__.__name__, 'ObjectId') + def test_slave_okay(self): + """Ensures that a query can take slave_okay syntax + """ + person1 = self.Person(name="User A", age=20) + person1.save() + person2 = self.Person(name="User B", age=30) + person2.save() + + # Retrieve the first person from the database + person = self.Person.objects(slave_okay=True).first() + self.assertTrue(isinstance(person, self.Person)) + self.assertEqual(person.name, "User A") + self.assertEqual(person.age, 20) def test_repeated_iteration(self): """Ensure that QuerySet rewinds itself one iteration finishes. From 711db45c022cae092069432d42e9267411f80008 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 6 Jun 2011 14:36:44 +0100 Subject: [PATCH 101/214] Changelist updated --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 29ecdf7a..ed877ebb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added slave_okay kwarg to queryset - Added insert method for bulk inserts - Added blinker signal support - Added query_counter context manager for tests From cfcd77b193da1eb03ef5632f88cd2189f58b2974 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 8 Jun 2011 10:33:56 +0100 Subject: [PATCH 102/214] Added tests displaying datetime behaviour. Updated datetimefield documentation --- mongoengine/fields.py | 4 +++ tests/fields.py | 60 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 64 insertions(+) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index dc03fc05..1995d345 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -229,6 +229,10 @@ class BooleanField(BaseField): class DateTimeField(BaseField): """A datetime field. + + Note: Microseconds are rounded to the nearest millisecond. + Pre UTC microsecond support is effecively broken see + `tests.field.test_datetime` for more information. """ def validate(self, value): diff --git a/tests/fields.py b/tests/fields.py index 00b1c886..320e33db 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -187,6 +187,66 @@ class FieldTest(unittest.TestCase): log.time = '1pm' self.assertRaises(ValidationError, log.validate) + def test_datetime(self): + """Tests showing pymongo datetime fields handling of microseconds. + Microseconds are rounded to the nearest millisecond and pre UTC + handling is wonky. + + See: http://api.mongodb.org/python/current/api/bson/son.html#dt + """ + class LogEntry(Document): + date = DateTimeField() + + LogEntry.drop_collection() + + # Post UTC - microseconds are rounded (down) nearest millisecond and dropped + d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999) + d2 = datetime.datetime(1970, 01, 01, 00, 00, 01) + log = LogEntry() + log.date = d1 + log.save() + log.reload() + self.assertNotEquals(log.date, d1) + self.assertEquals(log.date, d2) + + # Post UTC - microseconds are rounded (down) nearest millisecond + d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999) + d2 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9000) + log.date = d1 + log.save() + log.reload() + self.assertNotEquals(log.date, d1) + self.assertEquals(log.date, d2) + + # Pre UTC dates microseconds below 1000 are dropped + d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) + d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) + log.date = d1 + log.save() + log.reload() + self.assertNotEquals(log.date, d1) + self.assertEquals(log.date, d2) + + # Pre UTC microseconds above 1000 is wonky. + # log.date has an invalid microsecond value so I can't construct + # a date to compare. + # + # However, the timedelta is predicable with pre UTC timestamps + # It always adds 16 seconds and [777216-776217] microseconds + for i in xrange(1001, 3113, 33): + d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) + log.date = d1 + log.save() + log.reload() + self.assertNotEquals(log.date, d1) + + delta = log.date - d1 + self.assertEquals(delta.seconds, 16) + microseconds = 777216 - (i % 1000) + self.assertEquals(delta.microseconds, microseconds) + + LogEntry.drop_collection() + def test_list_validation(self): """Ensure that a list field only accepts lists with valid elements. """ From d15f5ccbf43e31557c43eb238028537e9a59c089 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 8 Jun 2011 10:41:08 +0100 Subject: [PATCH 103/214] Added _slave_okay to clone --- mongoengine/queryset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 7b4fef35..a1e1245f 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -353,7 +353,7 @@ class QuerySet(object): copy_props = ('_initial_query', '_query_obj', '_where_clause', '_loaded_fields', '_ordering', '_snapshot', - '_timeout', '_limit', '_skip') + '_timeout', '_limit', '_skip', '_slave_okay') for prop in copy_props: val = getattr(self, prop) From 3c88faa889e01071c6953992307112f20140f2f7 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 8 Jun 2011 12:06:26 +0100 Subject: [PATCH 104/214] Updated slave_okay syntax Now inline with .timeout() and .snapshot(). Made them chainable - so its easier to use and added tests for cursor_args --- mongoengine/queryset.py | 37 ++++++++++++++++++++++++++----------- tests/queryset.py | 26 +++++++++++++++++++++++++- 2 files changed, 51 insertions(+), 12 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index a1e1245f..f542cc87 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -452,7 +452,6 @@ class QuerySet(object): self._mongo_query = None self._cursor_obj = None self._class_check = class_check - self._slave_okay = slave_okay return self def filter(self, *q_objs, **query): @@ -504,18 +503,23 @@ class QuerySet(object): return self._collection_obj + @property + def _cursor_args(self): + cursor_args = { + 'snapshot': self._snapshot, + 'timeout': self._timeout, + 'slave_okay': self._slave_okay + } + if self._loaded_fields: + cursor_args['fields'] = self._loaded_fields.as_dict() + return cursor_args + @property def _cursor(self): if self._cursor_obj is None: - cursor_args = { - 'snapshot': self._snapshot, - 'timeout': self._timeout, - 'slave_okay': self._slave_okay - } - if self._loaded_fields: - cursor_args['fields'] = self._loaded_fields.as_dict() + self._cursor_obj = self._collection.find(self._query, - **cursor_args) + **self._cursor_args) # Apply where clauses to cursor if self._where_clause: self._cursor_obj.where(self._where_clause) @@ -772,7 +776,7 @@ class QuerySet(object): id_field = self._document._meta['id_field'] object_id = self._document._fields[id_field].to_mongo(object_id) - result = self._collection.find_one({'_id': object_id}) + result = self._collection.find_one({'_id': object_id}, **self._cursor_args) if result is not None: result = self._document._from_son(result) return result @@ -788,7 +792,8 @@ class QuerySet(object): """ doc_map = {} - docs = self._collection.find({'_id': {'$in': object_ids}}) + docs = self._collection.find({'_id': {'$in': object_ids}}, + **self._cursor_args) for doc in docs: doc_map[doc['_id']] = self._document._from_son(doc) @@ -1085,6 +1090,7 @@ class QuerySet(object): :param enabled: whether or not snapshot mode is enabled """ self._snapshot = enabled + return self def timeout(self, enabled): """Enable or disable the default mongod timeout when querying. @@ -1092,6 +1098,15 @@ class QuerySet(object): :param enabled: whether or not the timeout is used """ self._timeout = enabled + return self + + def slave_okay(self, enabled): + """Enable or disable the slave_okay when querying. + + :param enabled: whether or not the slave_okay is enabled + """ + self._slave_okay = enabled + return self def delete(self, safe=False): """Delete the documents matched by the query. diff --git a/tests/queryset.py b/tests/queryset.py index 28d44861..1947254b 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -422,11 +422,35 @@ class QuerySetTest(unittest.TestCase): person2.save() # Retrieve the first person from the database - person = self.Person.objects(slave_okay=True).first() + person = self.Person.objects.slave_okay(True).first() self.assertTrue(isinstance(person, self.Person)) self.assertEqual(person.name, "User A") self.assertEqual(person.age, 20) + def test_cursor_args(self): + """Ensures the cursor args can be set as expected + """ + p = self.Person.objects + # Check default + self.assertEqual(p._cursor_args, + {'snapshot': False, 'slave_okay': False, 'timeout': True}) + + p.snapshot(False).slave_okay(False).timeout(False) + self.assertEqual(p._cursor_args, + {'snapshot': False, 'slave_okay': False, 'timeout': False}) + + p.snapshot(True).slave_okay(False).timeout(False) + self.assertEqual(p._cursor_args, + {'snapshot': True, 'slave_okay': False, 'timeout': False}) + + p.snapshot(True).slave_okay(True).timeout(False) + self.assertEqual(p._cursor_args, + {'snapshot': True, 'slave_okay': True, 'timeout': False}) + + p.snapshot(True).slave_okay(True).timeout(True) + self.assertEqual(p._cursor_args, + {'snapshot': True, 'slave_okay': True, 'timeout': True}) + def test_repeated_iteration(self): """Ensure that QuerySet rewinds itself one iteration finishes. """ From 7c62fdc0b82f13bae0796b0d749ecb87002240a7 Mon Sep 17 00:00:00 2001 From: Colin Howe Date: Wed, 8 Jun 2011 12:20:58 +0100 Subject: [PATCH 105/214] Allow for types to never be auto-prepended to indices --- mongoengine/queryset.py | 9 ++++++--- tests/queryset.py | 16 ++++++++++++++++ 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 17a1b0da..303afb6a 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -410,8 +410,10 @@ class QuerySet(object): if use_types and not all(f._index_with_types for f in fields): use_types = False - # If _types is being used, prepend it to every specified index - if doc_cls._meta.get('allow_inheritance') and use_types: + # If _types is being used, create an index for it + index_types = doc_cls._meta.get('index_types', True) + allow_inheritance = doc_cls._meta.get('allow_inheritance') + if index_types and allow_inheritance and use_types: index_list.insert(0, ('_types', 1)) return index_list @@ -457,6 +459,7 @@ class QuerySet(object): background = self._document._meta.get('index_background', False) drop_dups = self._document._meta.get('index_drop_dups', False) index_opts = self._document._meta.get('index_options', {}) + index_types = self._document._meta.get('index_types', True) # Ensure indexes created by uniqueness constraints for index in self._document._meta['unique_indexes']: @@ -470,7 +473,7 @@ class QuerySet(object): background=background, **index_opts) # If _types is being used (for polymorphism), it needs an index - if '_types' in self._query: + if index_types and '_types' in self._query: self._collection.ensure_index('_types', background=background, **index_opts) diff --git a/tests/queryset.py b/tests/queryset.py index 1f03fbd9..1e5e7a5a 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1710,6 +1710,22 @@ class QuerySetTest(unittest.TestCase): self.assertTrue([('_types', 1)] in info) self.assertTrue([('_types', 1), ('date', -1)] in info) + def test_dont_index_types(self): + """Ensure that index_types will, when disabled, prevent _types + being added to all indices. + """ + class BlogPost(Document): + date = DateTimeField() + meta = {'index_types': False, + 'indexes': ['-date']} + + # Indexes are lazy so use list() to perform query + list(BlogPost.objects) + info = BlogPost.objects._collection.index_information() + info = [value['key'] for key, value in info.iteritems()] + self.assertTrue([('_types', 1)] not in info) + self.assertTrue([('date', -1)] in info) + BlogPost.drop_collection() class BlogPost(Document): From aa32d4301479a7cd45071ca3e5607ebe319f225e Mon Sep 17 00:00:00 2001 From: Colin Howe Date: Wed, 8 Jun 2011 12:36:32 +0100 Subject: [PATCH 106/214] Pydoc update --- mongoengine/document.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mongoengine/document.py b/mongoengine/document.py index b563f427..cae8343d 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -53,6 +53,11 @@ class Document(BaseDocument): dictionary. The value should be a list of field names or tuples of field names. Index direction may be specified by prefixing the field names with a **+** or **-** sign. + + By default, _types will be added to the start of every index (that + doesn't contain a list) if allow_inheritence is True. This can be + disabled by either setting types to False on the specific index or + by setting index_types to False on the meta dictionary for the document. """ __metaclass__ = TopLevelDocumentMetaclass From 6dc2672dbab4d0914e838c4df867daa911a33dcf Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 8 Jun 2011 13:03:42 +0100 Subject: [PATCH 107/214] Updated changelog --- docs/changelog.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index ed877ebb..0a2a273f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,7 +5,8 @@ Changelog Changes in dev ============== -- Added slave_okay kwarg to queryset +- Added queryset.slave_okay(enabled) method +- Updated queryset.timeout(enabled) and queryset.snapshot(enabled) to be chainable - Added insert method for bulk inserts - Added blinker signal support - Added query_counter context manager for tests From d32dd9ff62c0984af5062a4b52f974bb009b22a3 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 8 Jun 2011 13:07:08 +0100 Subject: [PATCH 108/214] Added _get_FIELD_display() for handy choice field display lookups closes #188 --- docs/changelog.rst | 1 + mongoengine/base.py | 12 +++++++++++- tests/fields.py | 29 +++++++++++++++++++++++++++++ 3 files changed, 41 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 0a2a273f..c76b1154 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added get_FIELD_display() method for easy choice field displaying. - Added queryset.slave_okay(enabled) method - Updated queryset.timeout(enabled) and queryset.snapshot(enabled) to be chainable - Added insert method for bulk inserts diff --git a/mongoengine/base.py b/mongoengine/base.py index 76bb1ab7..3875fea5 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -8,6 +8,7 @@ import sys import pymongo import pymongo.objectid from operator import itemgetter +from functools import partial class NotRegistered(Exception): @@ -61,6 +62,7 @@ class BaseField(object): self.primary_key = primary_key self.validation = validation self.choices = choices + # Adjust the appropriate creation counter, and save our local copy. if self.db_field == '_id': self.creation_counter = BaseField.auto_creation_counter @@ -471,7 +473,10 @@ class BaseDocument(object): self._data = {} # Assign default values to instance - for attr_name in self._fields.keys(): + for attr_name, field in self._fields.items(): + if field.choices: # dynamically adds a way to get the display value for a field with choices + setattr(self, 'get_%s_display' % attr_name, partial(self._get_FIELD_display, field=field)) + # Use default value if present value = getattr(self, attr_name, None) setattr(self, attr_name, value) @@ -484,6 +489,11 @@ class BaseDocument(object): signals.post_init.send(self) + def _get_FIELD_display(self, field): + """Returns the display value for a choice field""" + value = getattr(self, field.name) + return dict(field.choices).get(value, value) + def validate(self): """Ensure that all fields' values are valid and that required fields are present. diff --git a/tests/fields.py b/tests/fields.py index 320e33db..d8970043 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -773,6 +773,35 @@ class FieldTest(unittest.TestCase): Shirt.drop_collection() + def test_choices_get_field_display(self): + """Test dynamic helper for returning the display value of a choices field. + """ + class Shirt(Document): + size = StringField(max_length=3, choices=(('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), + ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) + style = StringField(max_length=3, choices=(('S', 'Small'), ('B', 'Baggy'), ('W', 'wide')), default='S') + + Shirt.drop_collection() + + shirt = Shirt() + + self.assertEqual(shirt.get_size_display(), None) + self.assertEqual(shirt.get_style_display(), 'Small') + + shirt.size = "XXL" + shirt.style = "B" + self.assertEqual(shirt.get_size_display(), 'Extra Extra Large') + self.assertEqual(shirt.get_style_display(), 'Baggy') + + # Set as Z - an invalid choice + shirt.size = "Z" + shirt.style = "Z" + self.assertEqual(shirt.get_size_display(), 'Z') + self.assertEqual(shirt.get_style_display(), 'Z') + self.assertRaises(ValidationError, shirt.validate) + + Shirt.drop_collection() + def test_file_fields(self): """Ensure that file fields can be written to and their data retrieved """ From 602d7dad0020937364f7076a1930d46209d6009d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 8 Jun 2011 17:10:26 +0100 Subject: [PATCH 109/214] Improvements to Abstract Base Classes Added test example highlighting what to do to migrate a class from complex (allows inheritance) to simple. --- mongoengine/base.py | 13 +++++-- tests/document.py | 90 ++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 97 insertions(+), 6 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 3875fea5..8a0a1f23 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -263,7 +263,7 @@ class DocumentMetaclass(type): superclasses[base._class_name] = base superclasses.update(base._superclasses) - if hasattr(base, '_meta'): + if hasattr(base, '_meta') and not base._meta.get('abstract'): # Ensure that the Document class may be subclassed - # inheritance may be disabled to remove dependency on # additional fields _cls and _types @@ -280,7 +280,7 @@ class DocumentMetaclass(type): # Only simple classes - direct subclasses of Document - may set # allow_inheritance to False - if not simple_class and not meta['allow_inheritance']: + if not simple_class and not meta['allow_inheritance'] and not meta['abstract']: raise ValueError('Only direct subclasses of Document may set ' '"allow_inheritance" to False') attrs['_meta'] = meta @@ -360,8 +360,9 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): # Subclassed documents inherit collection from superclass for base in bases: - if hasattr(base, '_meta') and 'collection' in base._meta: - collection = base._meta['collection'] + if hasattr(base, '_meta'): + if 'collection' in base._meta: + collection = base._meta['collection'] # Propagate index options. for key in ('index_background', 'index_drop_dups', 'index_opts'): @@ -370,6 +371,9 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): id_field = id_field or base._meta.get('id_field') base_indexes += base._meta.get('indexes', []) + # Propagate 'allow_inheritance' + if 'allow_inheritance' in base._meta: + base_meta['allow_inheritance'] = base._meta['allow_inheritance'] meta = { 'abstract': False, @@ -384,6 +388,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): 'index_opts': {}, 'queryset_class': QuerySet, 'delete_rules': {}, + 'allow_inheritance': True } meta.update(base_meta) diff --git a/tests/document.py b/tests/document.py index a8120469..14541469 100644 --- a/tests/document.py +++ b/tests/document.py @@ -151,12 +151,12 @@ class DocumentTest(unittest.TestCase): """Ensure that inheritance may be disabled on simple classes and that _cls and _types will not be used. """ + class Animal(Document): - meta = {'allow_inheritance': False} name = StringField() + meta = {'allow_inheritance': False} Animal.drop_collection() - def create_dog_class(): class Dog(Animal): pass @@ -191,6 +191,92 @@ class DocumentTest(unittest.TestCase): self.assertFalse('_cls' in comment.to_mongo()) self.assertFalse('_types' in comment.to_mongo()) + def test_allow_inheritance_abstract_document(self): + """Ensure that abstract documents can set inheritance rules and that + _cls and _types will not be used. + """ + class FinalDocument(Document): + meta = {'abstract': True, + 'allow_inheritance': False} + + class Animal(FinalDocument): + name = StringField() + + Animal.drop_collection() + def create_dog_class(): + class Dog(Animal): + pass + self.assertRaises(ValueError, create_dog_class) + + # Check that _cls etc aren't present on simple documents + dog = Animal(name='dog') + dog.save() + collection = self.db[Animal._meta['collection']] + obj = collection.find_one() + self.assertFalse('_cls' in obj) + self.assertFalse('_types' in obj) + + Animal.drop_collection() + + def test_how_to_turn_off_inheritance(self): + """Demonstrates migrating from allow_inheritance = True to False. + """ + class Animal(Document): + name = StringField() + meta = { + 'indexes': ['name'] + } + + Animal.drop_collection() + + dog = Animal(name='dog') + dog.save() + + collection = self.db[Animal._meta['collection']] + obj = collection.find_one() + self.assertTrue('_cls' in obj) + self.assertTrue('_types' in obj) + + info = collection.index_information() + info = [value['key'] for key, value in info.iteritems()] + self.assertEquals([[(u'_id', 1)], [(u'_types', 1)], [(u'_types', 1), (u'name', 1)]], info) + + # Turn off inheritance + class Animal(Document): + name = StringField() + meta = { + 'allow_inheritance': False, + 'indexes': ['name'] + } + collection.update({}, {"$unset": {"_types": 1, "_cls": 1}}, False, True) + + # Confirm extra data is removed + obj = collection.find_one() + self.assertFalse('_cls' in obj) + self.assertFalse('_types' in obj) + + info = collection.index_information() + info = [value['key'] for key, value in info.iteritems()] + self.assertEquals([[(u'_id', 1)], [(u'_types', 1)], [(u'_types', 1), (u'name', 1)]], info) + + info = collection.index_information() + indexes_to_drop = [key for key, value in info.iteritems() if '_types' in dict(value['key'])] + for index in indexes_to_drop: + collection.drop_index(index) + + info = collection.index_information() + info = [value['key'] for key, value in info.iteritems()] + self.assertEquals([[(u'_id', 1)]], info) + + # Recreate indexes + dog = Animal.objects.first() + dog.save() + info = collection.index_information() + info = [value['key'] for key, value in info.iteritems()] + self.assertEquals([[(u'_id', 1)], [(u'name', 1),]], info) + + Animal.drop_collection() + def test_abstract_documents(self): """Ensure that a document superclass can be marked as abstract thereby not using it as the name for the collection.""" From 4b9bacf7316275d2c0c1efa7b5850b98374679cc Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 6 Jun 2011 17:21:54 +0100 Subject: [PATCH 110/214] Added ComplexBaseField * Handles the efficient lazy dereferencing of DBrefs. * Handles complex nested values in ListFields and DictFields * Allows for both strictly declared ListFields and DictFields where the embedded value must be of a field type or no restrictions where the values can be a mix of field types / values. * Handles DBrefences of documents where allow_inheritance = False. --- mongoengine/base.py | 206 +++++++++++++++++++++------- mongoengine/fields.py | 102 +++----------- mongoengine/queryset.py | 47 +++++-- tests/dereference.py | 112 +++++++++++++++- tests/fields.py | 287 +++++++++++++++++++++++++++++++--------- 5 files changed, 555 insertions(+), 199 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 8a0a1f23..a22795c7 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -132,15 +132,19 @@ class BaseField(object): self.validate(value) -class DereferenceBaseField(BaseField): - """Handles the lazy dereferencing of a queryset. Will dereference all +class ComplexBaseField(BaseField): + """Handles complex fields, such as lists / dictionaries. + + Allows for nesting of embedded documents inside complex types. + Handles the lazy dereferencing of a queryset by lazily dereferencing all items in a list / dict rather than one at a time. """ + field = None + def __get__(self, instance, owner): """Descriptor to automatically dereference references. """ - from fields import ReferenceField, GenericReferenceField from connection import _get_db if instance is None: @@ -149,68 +153,175 @@ class DereferenceBaseField(BaseField): # Get value from document instance if available value_list = instance._data.get(self.name) - if not value_list: - return super(DereferenceBaseField, self).__get__(instance, owner) + if not value_list or isinstance(value_list, basestring): + return super(ComplexBaseField, self).__get__(instance, owner) is_list = False if not hasattr(value_list, 'items'): is_list = True value_list = dict([(k,v) for k,v in enumerate(value_list)]) - if isinstance(self.field, ReferenceField) and value_list: - db = _get_db() - dbref = {} - collections = {} + for k,v in value_list.items(): + if isinstance(v, dict) and '_cls' in v and '_ref' not in v: + value_list[k] = get_document(v['_cls'].split('.')[-1])._from_son(v) - for k, v in value_list.items(): - dbref[k] = v - # Save any DBRefs + # Handle all dereferencing + db = _get_db() + dbref = {} + collections = {} + for k, v in value_list.items(): + dbref[k] = v + # Save any DBRefs + if isinstance(v, (pymongo.dbref.DBRef)): + # direct reference (DBRef) + collections.setdefault(v.collection, []).append((k, v)) + elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v: + # generic reference + collection = get_document(v['_cls'])._meta['collection'] + collections.setdefault(collection, []).append((k, v)) + + # For each collection get the references + for collection, dbrefs in collections.items(): + id_map = {} + for k, v in dbrefs: if isinstance(v, (pymongo.dbref.DBRef)): - collections.setdefault(v.collection, []).append((k, v)) + # direct reference (DBRef), has no _cls information + id_map[v.id] = (k, None) + elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v: + # generic reference - includes _cls information + id_map[v['_ref'].id] = (k, get_document(v['_cls'])) - # For each collection get the references - for collection, dbrefs in collections.items(): - id_map = dict([(v.id, k) for k, v in dbrefs]) - references = db[collection].find({'_id': {'$in': id_map.keys()}}) - for ref in references: - key = id_map[ref['_id']] - dbref[key] = get_document(ref['_cls'])._from_son(ref) + references = db[collection].find({'_id': {'$in': id_map.keys()}}) + for ref in references: + key, doc_cls = id_map[ref['_id']] + if not doc_cls: # If no doc_cls get it from the referenced doc + doc_cls = get_document(ref['_cls']) + dbref[key] = doc_cls._from_son(ref) - if is_list: - dbref = [v for k,v in sorted(dbref.items(), key=itemgetter(0))] - instance._data[self.name] = dbref + if is_list: + dbref = [v for k,v in sorted(dbref.items(), key=itemgetter(0))] + instance._data[self.name] = dbref + return super(ComplexBaseField, self).__get__(instance, owner) - # Get value from document instance if available - if isinstance(self.field, GenericReferenceField) and value_list: - db = _get_db() - value_list = [(k,v) for k,v in value_list.items()] - dbref = {} - classes = {} + def to_python(self, value): + """Convert a MongoDB-compatible type to a Python type. + """ + from mongoengine import Document - for k, v in value_list: - dbref[k] = v - # Save any DBRefs - if isinstance(v, (dict, pymongo.son.SON)): - classes.setdefault(v['_cls'], []).append((k, v)) + if isinstance(value, basestring): + return value - # For each collection get the references - for doc_cls, dbrefs in classes.items(): - id_map = dict([(v['_ref'].id, k) for k, v in dbrefs]) - doc_cls = get_document(doc_cls) - collection = doc_cls._meta['collection'] - references = db[collection].find({'_id': {'$in': id_map.keys()}}) + if hasattr(value, 'to_python'): + return value.to_python() - for ref in references: - key = id_map[ref['_id']] - dbref[key] = doc_cls._from_son(ref) + is_list = False + if not hasattr(value, 'items'): + try: + is_list = True + value = dict([(k,v) for k,v in enumerate(value)]) + except TypeError: # Not iterable return the value + return value - if is_list: - dbref = [v for k,v in sorted(dbref.items(), key=itemgetter(0))] + if self.field: + value_dict = dict([(key, self.field.to_python(item)) for key, item in value.items()]) + else: + value_dict = {} + for k,v in value.items(): + if isinstance(v, Document): + # We need the id from the saved object to create the DBRef + if v.pk is None: + raise ValidationError('You can only reference documents once ' + 'they have been saved to the database') + collection = v._meta['collection'] + value_dict[k] = pymongo.dbref.DBRef(collection, v.pk) + elif hasattr(v, 'to_python'): + value_dict[k] = v.to_python() + else: + value_dict[k] = self.to_python(v) - instance._data[self.name] = dbref + if is_list: # Convert back to a list + return [v for k,v in sorted(value_dict.items(), key=itemgetter(0))] + return value_dict - return super(DereferenceBaseField, self).__get__(instance, owner) + def to_mongo(self, value): + """Convert a Python type to a MongoDB-compatible type. + """ + from mongoengine import Document + if isinstance(value, basestring): + return value + + if hasattr(value, 'to_mongo'): + return value.to_mongo() + + is_list = False + if not hasattr(value, 'items'): + try: + is_list = True + value = dict([(k,v) for k,v in enumerate(value)]) + except TypeError: # Not iterable return the value + return value + + if self.field: + value_dict = dict([(key, self.field.to_mongo(item)) for key, item in value.items()]) + else: + value_dict = {} + for k,v in value.items(): + if isinstance(v, Document): + # We need the id from the saved object to create the DBRef + if v.pk is None: + raise ValidationError('You can only reference documents once ' + 'they have been saved to the database') + + # If its a document that is not inheritable it won't have + # _types / _cls data so make it a generic reference allows + # us to dereference + meta = getattr(v, 'meta', getattr(v, '_meta', {})) + if meta and not meta['allow_inheritance'] and not self.field: + from fields import GenericReferenceField + value_dict[k] = GenericReferenceField().to_mongo(v) + else: + collection = v._meta['collection'] + value_dict[k] = pymongo.dbref.DBRef(collection, v.pk) + elif hasattr(v, 'to_mongo'): + value_dict[k] = v.to_mongo() + else: + value_dict[k] = self.to_mongo(v) + + if is_list: # Convert back to a list + return [v for k,v in sorted(value_dict.items(), key=itemgetter(0))] + return value_dict + + def validate(self, value): + """If field provided ensure the value is valid. + """ + if self.field: + try: + if hasattr(value, 'iteritems'): + [self.field.validate(v) for k,v in value.iteritems()] + else: + [self.field.validate(v) for v in value] + except Exception, err: + raise ValidationError('Invalid %s item (%s)' % ( + self.field.__class__.__name__, str(v))) + + def prepare_query_value(self, op, value): + return self.to_mongo(value) + + def lookup_member(self, member_name): + if self.field: + return self.field.lookup_member(member_name) + return None + + def _set_owner_document(self, owner_document): + if self.field: + self.field.owner_document = owner_document + self._owner_document = owner_document + + def _get_owner_document(self, owner_document): + self._owner_document = owner_document + + owner_document = property(_get_owner_document, _set_owner_document) class ObjectIdField(BaseField): @@ -219,7 +330,6 @@ class ObjectIdField(BaseField): def to_python(self, value): return value - # return unicode(value) def to_mongo(self, value): if not isinstance(value, pymongo.objectid.ObjectId): diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 1995d345..f9b2580b 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -1,4 +1,4 @@ -from base import (BaseField, DereferenceBaseField, ObjectIdField, +from base import (BaseField, ComplexBaseField, ObjectIdField, ValidationError, get_document) from queryset import DO_NOTHING from document import Document, EmbeddedDocument @@ -301,6 +301,8 @@ class EmbeddedDocumentField(BaseField): return value def to_mongo(self, value): + if isinstance(value, basestring): + return value return self.document_type.to_mongo(value) def validate(self, value): @@ -320,7 +322,7 @@ class EmbeddedDocumentField(BaseField): return self.to_mongo(value) -class ListField(DereferenceBaseField): +class ListField(ComplexBaseField): """A list field that wraps a standard field, allowing multiple instances of the field to be used as a list in the database. """ @@ -328,48 +330,25 @@ class ListField(DereferenceBaseField): # ListFields cannot be indexed with _types - MongoDB doesn't support this _index_with_types = False - def __init__(self, field, **kwargs): - if not isinstance(field, BaseField): - raise ValidationError('Argument to ListField constructor must be ' - 'a valid field') + def __init__(self, field=None, **kwargs): self.field = field kwargs.setdefault('default', lambda: []) super(ListField, self).__init__(**kwargs) - def to_python(self, value): - return [self.field.to_python(item) for item in value] - - def to_mongo(self, value): - return [self.field.to_mongo(item) for item in value] - def validate(self, value): """Make sure that a list of valid fields is being used. """ if not isinstance(value, (list, tuple)): raise ValidationError('Only lists and tuples may be used in a ' 'list field') - - try: - [self.field.validate(item) for item in value] - except Exception, err: - raise ValidationError('Invalid ListField item (%s)' % str(item)) + super(ListField, self).validate(value) def prepare_query_value(self, op, value): - if op in ('set', 'unset'): - return [self.field.prepare_query_value(op, v) for v in value] - return self.field.prepare_query_value(op, value) - - def lookup_member(self, member_name): - return self.field.lookup_member(member_name) - - def _set_owner_document(self, owner_document): - self.field.owner_document = owner_document - self._owner_document = owner_document - - def _get_owner_document(self, owner_document): - self._owner_document = owner_document - - owner_document = property(_get_owner_document, _set_owner_document) + if self.field: + if op in ('set', 'unset') and not isinstance(value, basestring): + return [self.field.prepare_query_value(op, v) for v in value] + return self.field.prepare_query_value(op, value) + return super(ListField, self).prepare_query_value(op, value) class SortedListField(ListField): @@ -388,20 +367,21 @@ class SortedListField(ListField): super(SortedListField, self).__init__(field, **kwargs) def to_mongo(self, value): + value = super(SortedListField, self).to_mongo(value) if self._ordering is not None: - return sorted([self.field.to_mongo(item) for item in value], - key=itemgetter(self._ordering)) - return sorted([self.field.to_mongo(item) for item in value]) + return sorted(value, key=itemgetter(self._ordering)) + return sorted(value) -class DictField(BaseField): +class DictField(ComplexBaseField): """A dictionary field that wraps a standard Python dictionary. This is similar to an embedded document, but the structure is not defined. .. versionadded:: 0.3 """ - def __init__(self, basecls=None, *args, **kwargs): + def __init__(self, basecls=None, field=None, *args, **kwargs): + self.field = field self.basecls = basecls or BaseField assert issubclass(self.basecls, BaseField) kwargs.setdefault('default', lambda: {}) @@ -417,6 +397,7 @@ class DictField(BaseField): if any(('.' in k or '$' in k) for k in value): raise ValidationError('Invalid dictionary key name - keys may not ' 'contain "." or "$" characters') + super(DictField, self).validate(value) def lookup_member(self, member_name): return DictField(basecls=self.basecls, db_field=member_name) @@ -432,7 +413,7 @@ class DictField(BaseField): return super(DictField, self).prepare_query_value(op, value) -class MapField(DereferenceBaseField): +class MapField(DictField): """A field that maps a name to a specified field type. Similar to a DictField, except the 'value' of each item must match the specified field type. @@ -444,50 +425,7 @@ class MapField(DereferenceBaseField): if not isinstance(field, BaseField): raise ValidationError('Argument to MapField constructor must be ' 'a valid field') - self.field = field - kwargs.setdefault('default', lambda: {}) - super(MapField, self).__init__(*args, **kwargs) - - def validate(self, value): - """Make sure that a list of valid fields is being used. - """ - if not isinstance(value, dict): - raise ValidationError('Only dictionaries may be used in a ' - 'DictField') - - if any(('.' in k or '$' in k) for k in value): - raise ValidationError('Invalid dictionary key name - keys may not ' - 'contain "." or "$" characters') - - try: - [self.field.validate(item) for item in value.values()] - except Exception, err: - raise ValidationError('Invalid MapField item (%s)' % str(item)) - - def to_python(self, value): - return dict([(key, self.field.to_python(item)) for key, item in value.iteritems()]) - - def to_mongo(self, value): - return dict([(key, self.field.to_mongo(item)) for key, item in value.iteritems()]) - - def prepare_query_value(self, op, value): - if op not in ('set', 'unset'): - return self.field.prepare_query_value(op, value) - for key in value: - value[key] = self.field.prepare_query_value(op, value[key]) - return value - - def lookup_member(self, member_name): - return self.field.lookup_member(member_name) - - def _set_owner_document(self, owner_document): - self.field.owner_document = owner_document - self._owner_document = owner_document - - def _get_owner_document(self, owner_document): - self._owner_document = owner_document - - owner_document = property(_get_owner_document, _set_owner_document) + super(MapField, self).__init__(field=field, *args, **kwargs) class ReferenceField(BaseField): diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 1dfe55af..666567e2 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -549,11 +549,12 @@ class QuerySet(object): parts = [parts] fields = [] field = None + for field_name in parts: # Handle ListField indexing: if field_name.isdigit(): try: - field = field.field + new_field = field.field except AttributeError, err: raise InvalidQueryError( "Can't use index on unsubscriptable field (%s)" % err) @@ -567,11 +568,17 @@ class QuerySet(object): field = document._fields[field_name] else: # Look up subfield on the previous field - field = field.lookup_member(field_name) - if field is None: + new_field = field.lookup_member(field_name) + from base import ComplexBaseField + if not new_field and isinstance(field, ComplexBaseField): + fields.append(field_name) + continue + elif not new_field: raise InvalidQueryError('Cannot resolve field "%s"' - % field_name) + % field_name) + field = new_field # update field to the new field type fields.append(field) + return fields @classmethod @@ -615,14 +622,33 @@ class QuerySet(object): if _doc_cls: # Switch field names to proper names [set in Field(name='foo')] fields = QuerySet._lookup_field(_doc_cls, parts) - parts = [field.db_field for field in fields] + parts = [] + + cleaned_fields = [] + append_field = True + for field in fields: + if isinstance(field, str): + parts.append(field) + append_field = False + else: + parts.append(field.db_field) + if append_field: + cleaned_fields.append(field) # Convert value to proper value - field = fields[-1] + field = cleaned_fields[-1] + singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] singular_ops += match_operators if op in singular_ops: - value = field.prepare_query_value(op, value) + if isinstance(field, basestring): + if op in match_operators and isinstance(value, basestring): + from mongoengine import StringField + value = StringField().prepare_query_value(op, value) + else: + value = field + else: + value = field.prepare_query_value(op, value) elif op in ('in', 'nin', 'all', 'near'): # 'in', 'nin' and 'all' require a list of values value = [field.prepare_query_value(op, v) for v in value] @@ -1170,14 +1196,19 @@ class QuerySet(object): fields = QuerySet._lookup_field(_doc_cls, parts) parts = [] + cleaned_fields = [] + append_field = True for field in fields: if isinstance(field, str): parts.append(field) + append_field = False else: parts.append(field.db_field) + if append_field: + cleaned_fields.append(field) # Convert value to proper value - field = fields[-1] + field = cleaned_fields[-1] if op in (None, 'set', 'push', 'pull', 'addToSet'): value = field.prepare_query_value(op, value) diff --git a/tests/dereference.py b/tests/dereference.py index b6cee89e..68792721 100644 --- a/tests/dereference.py +++ b/tests/dereference.py @@ -122,6 +122,64 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] self.assertEqual(q, 4) + for m in group_obj.members: + self.assertTrue('User' in m.__class__.__name__) + + UserA.drop_collection() + UserB.drop_collection() + UserC.drop_collection() + Group.drop_collection() + + def test_list_field_complex(self): + + class UserA(Document): + name = StringField() + + class UserB(Document): + name = StringField() + + class UserC(Document): + name = StringField() + + class Group(Document): + members = ListField() + + UserA.drop_collection() + UserB.drop_collection() + UserC.drop_collection() + Group.drop_collection() + + members = [] + for i in xrange(1, 51): + a = UserA(name='User A %s' % i) + a.save() + + b = UserB(name='User B %s' % i) + b.save() + + c = UserC(name='User C %s' % i) + c.save() + + members += [a, b, c] + + group = Group(members=members) + group.save() + + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first() + self.assertEqual(q, 1) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + for m in group_obj.members: + self.assertTrue('User' in m.__class__.__name__) + UserA.drop_collection() UserB.drop_collection() UserC.drop_collection() @@ -156,10 +214,13 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] self.assertEqual(q, 2) + for k, m in group_obj.members.iteritems(): + self.assertTrue(isinstance(m, User)) + User.drop_collection() Group.drop_collection() - def ztest_generic_reference_dict_field(self): + def test_dict_field(self): class UserA(Document): name = StringField() @@ -206,6 +267,9 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] self.assertEqual(q, 4) + for k, m in group_obj.members.iteritems(): + self.assertTrue('User' in m.__class__.__name__) + group.members = {} group.save() @@ -218,11 +282,54 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] self.assertEqual(q, 1) + for k, m in group_obj.members.iteritems(): + self.assertTrue('User' in m.__class__.__name__) + UserA.drop_collection() UserB.drop_collection() UserC.drop_collection() Group.drop_collection() + def test_dict_field_no_field_inheritance(self): + + class UserA(Document): + name = StringField() + meta = {'allow_inheritance': False} + + class Group(Document): + members = DictField() + + UserA.drop_collection() + Group.drop_collection() + + members = [] + for i in xrange(1, 51): + a = UserA(name='User A %s' % i) + a.save() + + members += [a] + + group = Group(members=dict([(str(u.id), u) for u in members])) + group.save() + + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first() + self.assertEqual(q, 1) + + [m for m in group_obj.members] + self.assertEqual(q, 2) + + [m for m in group_obj.members] + self.assertEqual(q, 2) + + for k, m in group_obj.members.iteritems(): + self.assertTrue(isinstance(m, UserA)) + + UserA.drop_collection() + Group.drop_collection() + def test_generic_reference_map_field(self): class UserA(Document): @@ -270,6 +377,9 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] self.assertEqual(q, 4) + for k, m in group_obj.members.iteritems(): + self.assertTrue('User' in m.__class__.__name__) + group.members = {} group.save() diff --git a/tests/fields.py b/tests/fields.py index d8970043..4d51ed51 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -322,6 +322,108 @@ class FieldTest(unittest.TestCase): BlogPost.drop_collection() + def test_list_field(self): + """Ensure that list types work as expected. + """ + class BlogPost(Document): + info = ListField() + + BlogPost.drop_collection() + + post = BlogPost() + post.info = 'my post' + self.assertRaises(ValidationError, post.validate) + + post.info = {'title': 'test'} + self.assertRaises(ValidationError, post.validate) + + post.info = ['test'] + post.save() + + post = BlogPost() + post.info = [{'test': 'test'}] + post.save() + + post = BlogPost() + post.info = [{'test': 3}] + post.save() + + + self.assertEquals(BlogPost.objects.count(), 3) + self.assertEquals(BlogPost.objects.filter(info__exact='test').count(), 1) + self.assertEquals(BlogPost.objects.filter(info__0__test='test').count(), 1) + + # Confirm handles non strings or non existing keys + self.assertEquals(BlogPost.objects.filter(info__0__test__exact='5').count(), 0) + self.assertEquals(BlogPost.objects.filter(info__100__test__exact='test').count(), 0) + BlogPost.drop_collection() + + def test_list_field_Strict(self): + """Ensure that list field handles validation if provided a strict field type.""" + + class Simple(Document): + mapping = ListField(field=IntField()) + + Simple.drop_collection() + + e = Simple() + e.mapping = [1] + e.save() + + def create_invalid_mapping(): + e.mapping = ["abc"] + e.save() + + self.assertRaises(ValidationError, create_invalid_mapping) + + Simple.drop_collection() + + def test_list_field_complex(self): + """Ensure that the list fields can handle the complex types.""" + + class SettingBase(EmbeddedDocument): + pass + + class StringSetting(SettingBase): + value = StringField() + + class IntegerSetting(SettingBase): + value = IntField() + + class Simple(Document): + mapping = ListField() + + Simple.drop_collection() + e = Simple() + e.mapping.append(StringSetting(value='foo')) + e.mapping.append(IntegerSetting(value=42)) + e.mapping.append({'number': 1, 'string': 'Hi!', 'float': 1.001, + 'complex': IntegerSetting(value=42), 'list': + [IntegerSetting(value=42), StringSetting(value='foo')]}) + e.save() + + e2 = Simple.objects.get(id=e.id) + self.assertTrue(isinstance(e2.mapping[0], StringSetting)) + self.assertTrue(isinstance(e2.mapping[1], IntegerSetting)) + + # Test querying + self.assertEquals(Simple.objects.filter(mapping__1__value=42).count(), 1) + self.assertEquals(Simple.objects.filter(mapping__2__number=1).count(), 1) + self.assertEquals(Simple.objects.filter(mapping__2__complex__value=42).count(), 1) + self.assertEquals(Simple.objects.filter(mapping__2__list__0__value=42).count(), 1) + self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 1) + + # Confirm can update + Simple.objects().update(set__mapping__1=IntegerSetting(value=10)) + self.assertEquals(Simple.objects.filter(mapping__1__value=10).count(), 1) + + Simple.objects().update( + set__mapping__2__list__1=StringSetting(value='Boo')) + self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 0) + self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1) + + Simple.drop_collection() + def test_dict_field(self): """Ensure that dict types work as expected. """ @@ -363,6 +465,131 @@ class FieldTest(unittest.TestCase): self.assertEquals(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) BlogPost.drop_collection() + def test_dictfield_Strict(self): + """Ensure that dict field handles validation if provided a strict field type.""" + + class Simple(Document): + mapping = DictField(field=IntField()) + + Simple.drop_collection() + + e = Simple() + e.mapping['someint'] = 1 + e.save() + + def create_invalid_mapping(): + e.mapping['somestring'] = "abc" + e.save() + + self.assertRaises(ValidationError, create_invalid_mapping) + + Simple.drop_collection() + + def test_dictfield_complex(self): + """Ensure that the dict field can handle the complex types.""" + + class SettingBase(EmbeddedDocument): + pass + + class StringSetting(SettingBase): + value = StringField() + + class IntegerSetting(SettingBase): + value = IntField() + + class Simple(Document): + mapping = DictField() + + Simple.drop_collection() + e = Simple() + e.mapping['somestring'] = StringSetting(value='foo') + e.mapping['someint'] = IntegerSetting(value=42) + e.mapping['nested_dict'] = {'number': 1, 'string': 'Hi!', 'float': 1.001, + 'complex': IntegerSetting(value=42), 'list': + [IntegerSetting(value=42), StringSetting(value='foo')]} + e.save() + + e2 = Simple.objects.get(id=e.id) + self.assertTrue(isinstance(e2.mapping['somestring'], StringSetting)) + self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting)) + + # Test querying + self.assertEquals(Simple.objects.filter(mapping__someint__value=42).count(), 1) + self.assertEquals(Simple.objects.filter(mapping__nested_dict__number=1).count(), 1) + self.assertEquals(Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1) + self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1) + self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1) + + # Confirm can update + Simple.objects().update( + set__mapping={"someint": IntegerSetting(value=10)}) + Simple.objects().update( + set__mapping__nested_dict__list__1=StringSetting(value='Boo')) + self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0) + self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1) + + Simple.drop_collection() + + def test_mapfield(self): + """Ensure that the MapField handles the declared type.""" + + class Simple(Document): + mapping = MapField(IntField()) + + Simple.drop_collection() + + e = Simple() + e.mapping['someint'] = 1 + e.save() + + def create_invalid_mapping(): + e.mapping['somestring'] = "abc" + e.save() + + self.assertRaises(ValidationError, create_invalid_mapping) + + def create_invalid_class(): + class NoDeclaredType(Document): + mapping = MapField() + + self.assertRaises(ValidationError, create_invalid_class) + + Simple.drop_collection() + + def test_complex_mapfield(self): + """Ensure that the MapField can handle complex declared types.""" + + class SettingBase(EmbeddedDocument): + pass + + class StringSetting(SettingBase): + value = StringField() + + class IntegerSetting(SettingBase): + value = IntField() + + class Extensible(Document): + mapping = MapField(EmbeddedDocumentField(SettingBase)) + + Extensible.drop_collection() + + e = Extensible() + e.mapping['somestring'] = StringSetting(value='foo') + e.mapping['someint'] = IntegerSetting(value=42) + e.save() + + e2 = Extensible.objects.get(id=e.id) + self.assertTrue(isinstance(e2.mapping['somestring'], StringSetting)) + self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting)) + + def create_invalid_mapping(): + e.mapping['someint'] = 123 + e.save() + + self.assertRaises(ValidationError, create_invalid_mapping) + + Extensible.drop_collection() + def test_embedded_document_validation(self): """Ensure that invalid embedded documents cannot be assigned to embedded document fields. @@ -933,66 +1160,6 @@ class FieldTest(unittest.TestCase): self.assertEqual(d2.data, {}) self.assertEqual(d2.data2, {}) - def test_mapfield(self): - """Ensure that the MapField handles the declared type.""" - - class Simple(Document): - mapping = MapField(IntField()) - - Simple.drop_collection() - - e = Simple() - e.mapping['someint'] = 1 - e.save() - - def create_invalid_mapping(): - e.mapping['somestring'] = "abc" - e.save() - - self.assertRaises(ValidationError, create_invalid_mapping) - - def create_invalid_class(): - class NoDeclaredType(Document): - mapping = MapField() - - self.assertRaises(ValidationError, create_invalid_class) - - Simple.drop_collection() - - def test_complex_mapfield(self): - """Ensure that the MapField can handle complex declared types.""" - - class SettingBase(EmbeddedDocument): - pass - - class StringSetting(SettingBase): - value = StringField() - - class IntegerSetting(SettingBase): - value = IntField() - - class Extensible(Document): - mapping = MapField(EmbeddedDocumentField(SettingBase)) - - Extensible.drop_collection() - - e = Extensible() - e.mapping['somestring'] = StringSetting(value='foo') - e.mapping['someint'] = IntegerSetting(value=42) - e.save() - - e2 = Extensible.objects.get(id=e.id) - self.assertTrue(isinstance(e2.mapping['somestring'], StringSetting)) - self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting)) - - def create_invalid_mapping(): - e.mapping['someint'] = 123 - e.save() - - self.assertRaises(ValidationError, create_invalid_mapping) - - Extensible.drop_collection() - if __name__ == '__main__': unittest.main() From b9255f73c381c820d13ec30fba499a3fe6868a3e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 9 Jun 2011 11:28:57 +0100 Subject: [PATCH 111/214] Updated docs --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index c76b1154..f4be4ca6 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,13 +5,13 @@ Changelog Changes in dev ============== +- Added ComplexBaseField - for improved flexibility and performance. - Added get_FIELD_display() method for easy choice field displaying. - Added queryset.slave_okay(enabled) method - Updated queryset.timeout(enabled) and queryset.snapshot(enabled) to be chainable - Added insert method for bulk inserts - Added blinker signal support - Added query_counter context manager for tests -- Added DereferenceBaseField - for improved performance in field dereferencing - Added optional map_reduce method item_frequencies - Added inline_map_reduce option to map_reduce - Updated connection exception so it provides more info on the cause. From a66417e9d098b03b5dfaf04ab23fa8d185dd38e2 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 9 Jun 2011 11:31:47 +0100 Subject: [PATCH 112/214] pep8 update --- tests/fields.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/fields.py b/tests/fields.py index 4d51ed51..1b199982 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -358,7 +358,7 @@ class FieldTest(unittest.TestCase): self.assertEquals(BlogPost.objects.filter(info__100__test__exact='test').count(), 0) BlogPost.drop_collection() - def test_list_field_Strict(self): + def test_list_field_strict(self): """Ensure that list field handles validation if provided a strict field type.""" class Simple(Document): @@ -465,7 +465,7 @@ class FieldTest(unittest.TestCase): self.assertEquals(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) BlogPost.drop_collection() - def test_dictfield_Strict(self): + def test_dictfield_strict(self): """Ensure that dict field handles validation if provided a strict field type.""" class Simple(Document): From 199b4eb860a93c581c1ddfc915f7094fc28de678 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 9 Jun 2011 12:08:37 +0100 Subject: [PATCH 113/214] Added django_tests and regression test for order_by Refs #190 --- setup.py | 2 +- tests/django_tests.py | 44 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) create mode 100644 tests/django_tests.py diff --git a/setup.py b/setup.py index d3be64b3..1f65ae5d 100644 --- a/setup.py +++ b/setup.py @@ -45,6 +45,6 @@ setup(name='mongoengine', long_description=LONG_DESCRIPTION, platforms=['any'], classifiers=CLASSIFIERS, - install_requires=['pymongo', 'blinker'], + install_requires=['pymongo', 'blinker', 'django>=1.3'], test_suite='tests', ) diff --git a/tests/django_tests.py b/tests/django_tests.py new file mode 100644 index 00000000..e5e26022 --- /dev/null +++ b/tests/django_tests.py @@ -0,0 +1,44 @@ + +# -*- coding: utf-8 -*- + +import unittest + +from mongoengine import * + + +class QuerySetTest(unittest.TestCase): + + def setUp(self): + connect(db='mongoenginetest') + + class Person(Document): + name = StringField() + age = IntField() + self.Person = Person + + def test_order_by_in_django_template(self): + """Ensure that QuerySets are properly ordered in Django template. + """ + self.Person.drop_collection() + + self.Person(name="A", age=20).save() + self.Person(name="D", age=10).save() + self.Person(name="B", age=40).save() + self.Person(name="C", age=30).save() + + from django.conf import settings + settings.configure() + from django.template import Context, Template + + t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") + + d = {"ol": self.Person.objects.order_by('-name')} + self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:') + d = {"ol": self.Person.objects.order_by('+name')} + self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:') + d = {"ol": self.Person.objects.order_by('-age')} + self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:') + d = {"ol": self.Person.objects.order_by('+age')} + self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:') + + self.Person.drop_collection() \ No newline at end of file From 417bb1b35d21c4bf02cb0acfd95f5b1ff6c49d70 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 9 Jun 2011 12:15:36 +0100 Subject: [PATCH 114/214] Added regression test for #185 --- tests/django_tests.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/tests/django_tests.py b/tests/django_tests.py index e5e26022..6be1ea25 100644 --- a/tests/django_tests.py +++ b/tests/django_tests.py @@ -5,6 +5,9 @@ import unittest from mongoengine import * +from django.template import Context, Template +from django.conf import settings +settings.configure() class QuerySetTest(unittest.TestCase): @@ -26,10 +29,6 @@ class QuerySetTest(unittest.TestCase): self.Person(name="B", age=40).save() self.Person(name="C", age=30).save() - from django.conf import settings - settings.configure() - from django.template import Context, Template - t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") d = {"ol": self.Person.objects.order_by('-name')} @@ -41,4 +40,18 @@ class QuerySetTest(unittest.TestCase): d = {"ol": self.Person.objects.order_by('+age')} self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:') - self.Person.drop_collection() \ No newline at end of file + self.Person.drop_collection() + + def test_q_object_filter_in_template(self): + + self.Person.drop_collection() + + self.Person(name="A", age=20).save() + self.Person(name="D", age=10).save() + self.Person(name="B", age=40).save() + self.Person(name="C", age=30).save() + + t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") + + d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))} + self.assertEqual(t.render(Context(d)), u'D-10:C-30:') \ No newline at end of file From b2848b85194dee2429d35036c96a6c800cef42bf Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 9 Jun 2011 14:20:21 +0100 Subject: [PATCH 115/214] Added ComplexDateTimeField Thanks to @pelletier for the code. Refs #187 --- docs/apireference.rst | 2 + mongoengine/fields.py | 97 +++++++++++++++++++++++++++++++++++++++- tests/fields.py | 101 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 198 insertions(+), 2 deletions(-) diff --git a/docs/apireference.rst b/docs/apireference.rst index a3d287ab..2442803d 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -53,6 +53,8 @@ Fields .. autoclass:: mongoengine.DateTimeField +.. autoclass:: mongoengine.ComplexDateTimeField + .. autoclass:: mongoengine.EmbeddedDocumentField .. autoclass:: mongoengine.DictField diff --git a/mongoengine/fields.py b/mongoengine/fields.py index f9b2580b..5d5304ae 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -18,8 +18,9 @@ import gridfs __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', 'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', 'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', - 'DecimalField', 'URLField', 'GenericReferenceField', 'FileField', - 'BinaryField', 'SortedListField', 'EmailField', 'GeoPointField'] + 'DecimalField', 'ComplexDateTimeField', 'URLField', + 'GenericReferenceField', 'FileField', 'BinaryField', + 'SortedListField', 'EmailField', 'GeoPointField'] RECURSIVE_REFERENCE_CONSTANT = 'self' @@ -273,6 +274,98 @@ class DateTimeField(BaseField): return None +class ComplexDateTimeField(StringField): + """ + ComplexDateTimeField handles microseconds exactly instead of rounding + like DateTimeField does. + + Derives from a StringField so you can do `gte` and `lte` filtering by + using lexicographical comparison when filtering / sorting strings. + + The stored string has the following format: + + YYYY,MM,DD,HH,MM,SS,NNNNNN + + Where NNNNNN is the number of microseconds of the represented `datetime`. + The `,` as the separator can be easily modified by passing the `separator` + keyword when initializing the field. + """ + + def __init__(self, separator=',', **kwargs): + self.names = ['year', 'month', 'day', 'hour', 'minute', 'second', + 'microsecond'] + self.separtor = separator + super(ComplexDateTimeField, self).__init__(**kwargs) + + def _leading_zero(self, number): + """ + Converts the given number to a string. + + If it has only one digit, a leading zero so as it has always at least + two digits. + """ + if int(number) < 10: + return "0%s" % number + else: + return str(number) + + def _convert_from_datetime(self, val): + """ + Convert a `datetime` object to a string representation (which will be + stored in MongoDB). This is the reverse function of + `_convert_from_string`. + + >>> a = datetime(2011, 6, 8, 20, 26, 24, 192284) + >>> RealDateTimeField()._convert_from_datetime(a) + '2011,06,08,20,26,24,192284' + """ + data = [] + for name in self.names: + data.append(self._leading_zero(getattr(val, name))) + return ','.join(data) + + def _convert_from_string(self, data): + """ + Convert a string representation to a `datetime` object (the object you + will manipulate). This is the reverse function of + `_convert_from_datetime`. + + >>> a = '2011,06,08,20,26,24,192284' + >>> ComplexDateTimeField()._convert_from_string(a) + datetime.datetime(2011, 6, 8, 20, 26, 24, 192284) + """ + data = data.split(',') + data = map(int, data) + values = {} + for i in range(7): + values[self.names[i]] = data[i] + return datetime.datetime(**values) + + def __get__(self, instance, owner): + data = super(ComplexDateTimeField, self).__get__(instance, owner) + if data == None: + return datetime.datetime.now() + return self._convert_from_string(data) + + def __set__(self, obj, val): + data = self._convert_from_datetime(val) + return super(ComplexDateTimeField, self).__set__(obj, data) + + def validate(self, value): + if not isinstance(value, datetime.datetime): + raise ValidationError('Only datetime objects may used in a \ + ComplexDateTimeField') + + def to_python(self, value): + return self._convert_from_string(value) + + def to_mongo(self, value): + return self._convert_from_datetime(value) + + def prepare_query_value(self, op, value): + return self._convert_from_datetime(value) + + class EmbeddedDocumentField(BaseField): """An embedded document field. Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`. diff --git a/tests/fields.py b/tests/fields.py index 1b199982..531167c8 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -247,6 +247,107 @@ class FieldTest(unittest.TestCase): LogEntry.drop_collection() + def test_complexdatetime_storage(self): + """Tests for complex datetime fields - which can handle microseconds + without rounding. + """ + class LogEntry(Document): + date = ComplexDateTimeField() + + LogEntry.drop_collection() + + # Post UTC - microseconds are rounded (down) nearest millisecond and dropped - with default datetimefields + d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999) + log = LogEntry() + log.date = d1 + log.save() + log.reload() + self.assertEquals(log.date, d1) + + # Post UTC - microseconds are rounded (down) nearest millisecond - with default datetimefields + d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999) + log.date = d1 + log.save() + log.reload() + self.assertEquals(log.date, d1) + + # Pre UTC dates microseconds below 1000 are dropped - with default datetimefields + d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) + log.date = d1 + log.save() + log.reload() + self.assertEquals(log.date, d1) + + # Pre UTC microseconds above 1000 is wonky - with default datetimefields + # log.date has an invalid microsecond value so I can't construct + # a date to compare. + for i in xrange(1001, 3113, 33): + d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) + log.date = d1 + log.save() + log.reload() + self.assertEquals(log.date, d1) + log1 = LogEntry.objects.get(date=d1) + self.assertEqual(log, log1) + + LogEntry.drop_collection() + + def test_complexdatetime_usage(self): + """Tests for complex datetime fields - which can handle microseconds + without rounding. + """ + class LogEntry(Document): + date = ComplexDateTimeField() + + LogEntry.drop_collection() + + d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999) + log = LogEntry() + log.date = d1 + log.save() + + log1 = LogEntry.objects.get(date=d1) + self.assertEquals(log, log1) + + LogEntry.drop_collection() + + # create 60 log entries + for i in xrange(1950, 2010): + d = datetime.datetime(i, 01, 01, 00, 00, 01, 999) + LogEntry(date=d).save() + + self.assertEqual(LogEntry.objects.count(), 60) + + # Test ordering + logs = LogEntry.objects.order_by("date") + count = logs.count() + i = 0 + while i == count-1: + self.assertTrue(logs[i].date <= logs[i+1].date) + i +=1 + + logs = LogEntry.objects.order_by("-date") + count = logs.count() + i = 0 + while i == count-1: + self.assertTrue(logs[i].date >= logs[i+1].date) + i +=1 + + # Test searching + logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980,1,1)) + self.assertEqual(logs.count(), 30) + + logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980,1,1)) + self.assertEqual(logs.count(), 30) + + logs = LogEntry.objects.filter( + date__lte=datetime.datetime(2011,1,1), + date__gte=datetime.datetime(2000,1,1), + ) + self.assertEqual(logs.count(), 10) + + LogEntry.drop_collection() + def test_list_validation(self): """Ensure that a list field only accepts lists with valid elements. """ From fb09fde2097bd557a9173c749f45a8688cf62050 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 9 Jun 2011 14:26:52 +0100 Subject: [PATCH 116/214] Updated changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index f4be4ca6..0bbb5b82 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added ComplexDateTimeField - Handles datetimes correctly with microseconds - Added ComplexBaseField - for improved flexibility and performance. - Added get_FIELD_display() method for easy choice field displaying. - Added queryset.slave_okay(enabled) method From fd7f882011ce548efd7ae5fcb0f59fd38d38e98b Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 9 Jun 2011 16:09:06 +0100 Subject: [PATCH 117/214] Save no longer tramples over documents now sets or unsets explicit fields. Fixes #146, refs #18 Thanks @zhangcheng for the initial code --- docs/changelog.rst | 5 ++- mongoengine/base.py | 9 +++-- mongoengine/document.py | 10 +++++ setup.py | 2 +- tests/document.py | 84 +++++++++++++++++++++++++++++++++++++++++ 5 files changed, 104 insertions(+), 6 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 0bbb5b82..ecd7ef57 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,9 +5,10 @@ Changelog Changes in dev ============== +- Fixed saving so sets updated values rather than overwrites - Added ComplexDateTimeField - Handles datetimes correctly with microseconds -- Added ComplexBaseField - for improved flexibility and performance. -- Added get_FIELD_display() method for easy choice field displaying. +- Added ComplexBaseField - for improved flexibility and performance +- Added get_FIELD_display() method for easy choice field displaying - Added queryset.slave_okay(enabled) method - Updated queryset.timeout(enabled) and queryset.snapshot(enabled) to be chainable - Added insert method for bulk inserts diff --git a/mongoengine/base.py b/mongoengine/base.py index a22795c7..aed17bc3 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -92,6 +92,9 @@ class BaseField(object): """Descriptor for assigning a value to a field in a document. """ instance._data[self.name] = value + # If the field set is in the _present_fields list add it so we can track + if hasattr(instance, '_present_fields') and self.name not in instance._present_fields: + instance._present_fields.append(self.name) def to_python(self, value): """Convert a MongoDB-compatible type to a Python type. @@ -592,13 +595,14 @@ class BaseDocument(object): if field.choices: # dynamically adds a way to get the display value for a field with choices setattr(self, 'get_%s_display' % attr_name, partial(self._get_FIELD_display, field=field)) - # Use default value if present value = getattr(self, attr_name, None) setattr(self, attr_name, value) + # Assign initial values to instance for attr_name in values.keys(): try: - setattr(self, attr_name, values.pop(attr_name)) + value = values.pop(attr_name) + setattr(self, attr_name, value) except AttributeError: pass @@ -739,7 +743,6 @@ class BaseDocument(object): cls = subclasses[class_name] present_fields = data.keys() - for field_name, field in cls._fields.items(): if field.db_field in data: value = data[field.db_field] diff --git a/mongoengine/document.py b/mongoengine/document.py index cae8343d..e25bea06 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -95,6 +95,16 @@ class Document(BaseDocument): collection = self.__class__.objects._collection if force_insert: object_id = collection.insert(doc, safe=safe, **write_options) + elif '_id' in doc: + # Perform a set rather than a save - this will only save set fields + object_id = doc.pop('_id') + collection.update({'_id': object_id}, {"$set": doc}, upsert=True, safe=safe, **write_options) + + # Find and unset any fields explicitly set to None + if hasattr(self, '_present_fields'): + removals = dict([(k, 1) for k in self._present_fields if k not in doc and k != '_id']) + if removals: + collection.update({'_id': object_id}, {"$unset": removals}, upsert=True, safe=safe, **write_options) else: object_id = collection.save(doc, safe=safe, **write_options) except pymongo.errors.OperationFailure, err: diff --git a/setup.py b/setup.py index 1f65ae5d..37ec4375 100644 --- a/setup.py +++ b/setup.py @@ -45,6 +45,6 @@ setup(name='mongoengine', long_description=LONG_DESCRIPTION, platforms=['any'], classifiers=CLASSIFIERS, - install_requires=['pymongo', 'blinker', 'django>=1.3'], + install_requires=['pymongo', 'blinker', 'django==1.3'], test_suite='tests', ) diff --git a/tests/document.py b/tests/document.py index 14541469..f0af8f2d 100644 --- a/tests/document.py +++ b/tests/document.py @@ -789,6 +789,90 @@ class DocumentTest(unittest.TestCase): except ValidationError: self.fail() + def test_update(self): + """Ensure that an existing document is updated instead of be overwritten. + """ + # Create person object and save it to the database + person = self.Person(name='Test User', age=30) + person.save() + + # Create same person object, with same id, without age + same_person = self.Person(name='Test') + same_person.id = person.id + same_person.save() + + # Confirm only one object + self.assertEquals(self.Person.objects.count(), 1) + + # reload + person.reload() + same_person.reload() + + # Confirm the same + self.assertEqual(person, same_person) + self.assertEqual(person.name, same_person.name) + self.assertEqual(person.age, same_person.age) + + # Confirm the saved values + self.assertEqual(person.name, 'Test') + self.assertEqual(person.age, 30) + + # Test only / exclude only updates included fields + person = self.Person.objects.only('name').get() + person.name = 'User' + person.save() + + person.reload() + self.assertEqual(person.name, 'User') + self.assertEqual(person.age, 30) + + # test exclude only updates set fields + person = self.Person.objects.exclude('name').get() + person.age = 21 + person.save() + + person.reload() + self.assertEqual(person.name, 'User') + self.assertEqual(person.age, 21) + + # Test only / exclude can set non excluded / included fields + person = self.Person.objects.only('name').get() + person.name = 'Test' + person.age = 30 + person.save() + + person.reload() + self.assertEqual(person.name, 'Test') + self.assertEqual(person.age, 30) + + # test exclude only updates set fields + person = self.Person.objects.exclude('name').get() + person.name = 'User' + person.age = 21 + person.save() + + person.reload() + self.assertEqual(person.name, 'User') + self.assertEqual(person.age, 21) + + # Confirm does remove unrequired fields + person = self.Person.objects.exclude('name').get() + person.age = None + person.save() + + person.reload() + self.assertEqual(person.name, 'User') + self.assertEqual(person.age, None) + + person = self.Person.objects.get() + person.name = None + person.age = None + person.save() + + person.reload() + self.assertEqual(person.name, None) + self.assertEqual(person.age, None) + def test_delete(self): """Ensure that document may be deleted using the delete method. """ From 82fbe7128f78d103f10b814d503717fe85b4cd0e Mon Sep 17 00:00:00 2001 From: Colin Howe Date: Fri, 10 Jun 2011 17:31:42 +0100 Subject: [PATCH 118/214] Improve validation warnings --- mongoengine/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index aed17bc3..43a21961 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -627,8 +627,8 @@ class BaseDocument(object): try: field._validate(value) except (ValueError, AttributeError, AssertionError), e: - raise ValidationError('Invalid value for field of type "%s": %s' - % (field.__class__.__name__, value)) + raise ValidationError('Invalid value for field named "%s" of type "%s": %s' + % (field.name, field.__class__.__name__, value)) elif field.required: raise ValidationError('Field "%s" is required' % field.name) From 7b293783191f24302a2b8dd0b579ae8b839aae79 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 13 Jun 2011 12:40:12 +0100 Subject: [PATCH 119/214] Fixes issue converting to mongo --- mongoengine/fields.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 5d5304ae..967ce834 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -394,7 +394,7 @@ class EmbeddedDocumentField(BaseField): return value def to_mongo(self, value): - if isinstance(value, basestring): + if not isinstance(value, self.document_type): return value return self.document_type.to_mongo(value) @@ -438,7 +438,8 @@ class ListField(ComplexBaseField): def prepare_query_value(self, op, value): if self.field: - if op in ('set', 'unset') and not isinstance(value, basestring): + if op in ('set', 'unset') and (not isinstance(value, basestring) + and hasattr(value, '__iter__')): return [self.field.prepare_query_value(op, v) for v in value] return self.field.prepare_query_value(op, value) return super(ListField, self).prepare_query_value(op, value) From ea35fb1c54d3f2deb76a31a21314d4b4cc29d177 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 13 Jun 2011 12:49:09 +0100 Subject: [PATCH 120/214] More robust _present_fields additions --- mongoengine/base.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index aed17bc3..592a6784 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -91,9 +91,10 @@ class BaseField(object): def __set__(self, instance, value): """Descriptor for assigning a value to a field in a document. """ - instance._data[self.name] = value + key = self.name + instance._data[key] = value # If the field set is in the _present_fields list add it so we can track - if hasattr(instance, '_present_fields') and self.name not in instance._present_fields: + if hasattr(instance, '_present_fields') and key and key not in instance._present_fields: instance._present_fields.append(self.name) def to_python(self, value): From 0ed79a839d78ad69dcd3c60f51acfe5e83348f44 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 10 Jun 2011 17:22:05 +0100 Subject: [PATCH 121/214] Added delta tracking to documents. All saves on exisiting items do set / unset operations only on changed fields. * Note lists and dicts generally do set operations for things like pop() del[key] As there is no easy map to unset and explicitly matches the new list / dict fixes #18 --- docs/changelog.rst | 1 + docs/guide/document-instances.rst | 19 ++- mongoengine/base.py | 209 ++++++++++++++++++++++--- mongoengine/document.py | 69 +++++--- mongoengine/fields.py | 31 +++- tests/dereference.py | 4 +- tests/django_tests.py | 1 - tests/document.py | 251 +++++++++++++++++++++++++++++- tests/fields.py | 32 +++- 9 files changed, 552 insertions(+), 65 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index ecd7ef57..54efb4ff 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added delta tracking now only sets / unsets explicitly changed fields - Fixed saving so sets updated values rather than overwrites - Added ComplexDateTimeField - Handles datetimes correctly with microseconds - Added ComplexBaseField - for improved flexibility and performance diff --git a/docs/guide/document-instances.rst b/docs/guide/document-instances.rst index 7b5d165b..aeed7cdb 100644 --- a/docs/guide/document-instances.rst +++ b/docs/guide/document-instances.rst @@ -18,10 +18,21 @@ attribute syntax:: Saving and deleting documents ============================= -To save the document to the database, call the -:meth:`~mongoengine.Document.save` method. If the document does not exist in -the database, it will be created. If it does already exist, it will be -updated. +MongoEngine tracks changes to documents to provide efficient saving. To save +the document to the database, call the :meth:`~mongoengine.Document.save` method. +If the document does not exist in the database, it will be created. If it does +already exist, then any changes will be updated atomically. For example:: + + >>> page = Page(title="Test Page") + >>> page.save() # Performs an insert + >>> page.title = "My Page" + >>> page.save() # Performs an atomic set on the title field. + +.. note:: + Changes to documents are tracked and on the whole perform `set` operations. + + * ``list_field.pop(0)`` - *sets* the resulting list + * ``del(list_field)`` - *unsets* whole list To delete a document, call the :meth:`~mongoengine.Document.delete` method. Note that this will only work if the document exists in the database and has a diff --git a/mongoengine/base.py b/mongoengine/base.py index 592a6784..292184ef 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -4,6 +4,7 @@ from queryset import DO_NOTHING from mongoengine import signals +import weakref import sys import pymongo import pymongo.objectid @@ -86,16 +87,19 @@ class BaseField(object): # Allow callable default values if callable(value): value = value() + + # Convert lists / values so we can watch for any changes on them + if isinstance(value, (list, tuple)) and not isinstance(value, BaseList): + value = BaseList(value, instance=instance, name=self.name) + elif isinstance(value, dict) and not isinstance(value, BaseDict): + value = BaseDict(value, instance=instance, name=self.name) return value def __set__(self, instance, value): """Descriptor for assigning a value to a field in a document. """ - key = self.name - instance._data[key] = value - # If the field set is in the _present_fields list add it so we can track - if hasattr(instance, '_present_fields') and key and key not in instance._present_fields: - instance._present_fields.append(self.name) + instance._data[self.name] = value + instance._mark_as_changed(self.name) def to_python(self, value): """Convert a MongoDB-compatible type to a Python type. @@ -173,21 +177,27 @@ class ComplexBaseField(BaseField): db = _get_db() dbref = {} collections = {} - for k, v in value_list.items(): - dbref[k] = v + for k,v in value_list.items(): + # Save any DBRefs if isinstance(v, (pymongo.dbref.DBRef)): # direct reference (DBRef) - collections.setdefault(v.collection, []).append((k, v)) - elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v: - # generic reference - collection = get_document(v['_cls'])._meta['collection'] - collections.setdefault(collection, []).append((k, v)) + collections.setdefault(v.collection, []).append((k,v)) + elif isinstance(v, (dict, pymongo.son.SON)): + if '_ref' in v: + # generic reference + collection = get_document(v['_cls'])._meta['collection'] + collections.setdefault(collection, []).append((k,v)) + else: + # Use BaseDict so can watch any changes + dbref[k] = BaseDict(v, instance=instance, name=self.name) + else: + dbref[k] = v # For each collection get the references for collection, dbrefs in collections.items(): id_map = {} - for k, v in dbrefs: + for k,v in dbrefs: if isinstance(v, (pymongo.dbref.DBRef)): # direct reference (DBRef), has no _cls information id_map[v.id] = (k, None) @@ -203,7 +213,9 @@ class ComplexBaseField(BaseField): dbref[key] = doc_cls._from_son(ref) if is_list: - dbref = [v for k,v in sorted(dbref.items(), key=itemgetter(0))] + dbref = BaseList([v for k,v in sorted(dbref.items(), key=itemgetter(0))], instance=instance, name=self.name) + else: + dbref = BaseDict(dbref, instance=instance, name=self.name) instance._data[self.name] = dbref return super(ComplexBaseField, self).__get__(instance, owner) @@ -304,7 +316,7 @@ class ComplexBaseField(BaseField): if hasattr(value, 'iteritems'): [self.field.validate(v) for k,v in value.iteritems()] else: - [self.field.validate(v) for v in value] + [self.field.validate(v) for v in value] except Exception, err: raise ValidationError('Invalid %s item (%s)' % ( self.field.__class__.__name__, str(v))) @@ -714,7 +726,7 @@ class BaseDocument(object): self._meta.get('allow_inheritance', True) == False): data['_cls'] = self._class_name data['_types'] = self._superclasses.keys() + [self._class_name] - if data.has_key('_id') and data['_id'] is None: + if '_id' in data and data['_id'] is None: del data['_id'] return data @@ -751,9 +763,71 @@ class BaseDocument(object): else field.to_python(value)) obj = cls(**data) - obj._present_fields = present_fields + obj._changed_fields = [] return obj + def _mark_as_changed(self, key): + """Marks a key as explicitly changed by the user + """ + if not key: + return + if hasattr(self, '_changed_fields') and key not in self._changed_fields: + self._changed_fields.append(key) + + def _get_changed_fields(self, key=''): + """Returns a list of all fields that have explicitly been changed. + """ + from mongoengine import EmbeddedDocument + _changed_fields = [] + _changed_fields += getattr(self, '_changed_fields', []) + + for field_name in self._fields: + key = '%s.' % field_name + field = getattr(self, field_name, None) + if isinstance(field, EmbeddedDocument): # Grab all embedded fields that have been changed + _changed_fields += ["%s%s" % (key, k) for k in field._get_changed_fields(key) if k] + elif isinstance(field, (list, tuple)): # Loop list fields as they contain documents + for index, value in enumerate(field): + if not hasattr(value, '_get_changed_fields'): + continue + list_key = "%s%s." % (key, index) + _changed_fields += ["%s%s" % (list_key, k) for k in value._get_changed_fields(list_key) if k] + return _changed_fields + + def _delta(self): + """Returns the delta (set, unset) of the changes for a document. + Gets any values that have been explicitly changed. + """ + # Handles cases where not loaded from_son but has _id + doc = self.to_mongo() + set_fields = self._get_changed_fields() + set_data = {} + unset_data = {} + if hasattr(self, '_changed_fields'): + set_data = {} + # Fetch each set item from its path + for path in set_fields: + parts = path.split('.') + d = doc + for p in parts: + if hasattr(d, '__getattr__'): + d = getattr(p, d) + elif p.isdigit(): + d = d[int(p)] + else: + d = d.get(p) + set_data[path] = d + else: + set_data = doc + if '_id' in set_data: + del(set_data['_id']) + + for k,v in set_data.items(): + if not v: + del(set_data[k]) + unset_data[k] = 1 + return set_data, unset_data + def __eq__(self, other): if isinstance(other, self.__class__) and hasattr(other, 'id'): if self.id == other.id: @@ -764,13 +838,112 @@ class BaseDocument(object): return not self.__eq__(other) def __hash__(self): - """ For list, dic key """ + """ For list, dict key """ if self.pk is None: # For new object return super(BaseDocument,self).__hash__() else: return hash(self.pk) + +class BaseList(list): + """A special list so we can watch any changes + """ + + def __init__(self, list_items, instance, name): + self.instance = weakref.proxy(instance) + self.name = name + super(BaseList, self).__init__(list_items) + + def __setitem__(self, *args, **kwargs): + if hasattr(self, 'instance') and hasattr(self, 'name'): + self.instance._mark_as_changed(self.name) + super(BaseDict, self).__setitem__(*args, **kwargs) + + def __delitem__(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + super(BaseList, self).__delitem__(*args, **kwargs) + + def __delete__(self, *args, **kwargs): + if hasattr(self, 'instance') and hasattr(self, 'name'): + import ipdb; ipdb.set_trace() + self.instance._mark_as_changed(self.name) + delattr(self, 'instance') + delattr(self, 'name') + super(BaseDict, self).__delete__(*args, **kwargs) + + def append(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + return super(BaseList, self).append(*args, **kwargs) + + def extend(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + return super(BaseList, self).extend(*args, **kwargs) + + def insert(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + return super(BaseList, self).insert(*args, **kwargs) + + def pop(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + return super(BaseList, self).pop(*args, **kwargs) + + def remove(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + return super(BaseList, self).remove(*args, **kwargs) + + def reverse(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + return super(BaseList, self).reverse(*args, **kwargs) + + def sort(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + return super(BaseList, self).sort(*args, **kwargs) + + +class BaseDict(dict): + """A special dict so we can watch any changes + """ + + def __init__(self, dict_items, instance, name): + self.instance = weakref.proxy(instance) + self.name = name + super(BaseDict, self).__init__(dict_items) + + def __setitem__(self, *args, **kwargs): + if hasattr(self, 'instance') and hasattr(self, 'name'): + self.instance._mark_as_changed(self.name) + super(BaseDict, self).__setitem__(*args, **kwargs) + + def __setattr__(self, *args, **kwargs): + if hasattr(self, 'instance') and hasattr(self, 'name'): + self.instance._mark_as_changed(self.name) + super(BaseDict, self).__setattr__(*args, **kwargs) + + def __delete__(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + super(BaseDict, self).__delete__(*args, **kwargs) + + def __delitem__(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + super(BaseDict, self).__delitem__(*args, **kwargs) + + def __delattr__(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + super(BaseDict, self).__delattr__(*args, **kwargs) + + def clear(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + super(BaseDict, self).clear(*args, **kwargs) + + def pop(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + super(BaseDict, self).clear(*args, **kwargs) + + def popitem(self, *args, **kwargs): + self.instance._mark_as_changed(self.name) + super(BaseDict, self).clear(*args, **kwargs) + if sys.version_info < (2, 5): # Prior to Python 2.5, Exception was an old-style class import types diff --git a/mongoengine/document.py b/mongoengine/document.py index e25bea06..2f40eec7 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -1,12 +1,11 @@ from mongoengine import signals from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, - ValidationError) + ValidationError, BaseDict, BaseList) from queryset import OperationError from connection import _get_db import pymongo - __all__ = ['Document', 'EmbeddedDocument', 'ValidationError', 'OperationError'] @@ -19,6 +18,18 @@ class EmbeddedDocument(BaseDocument): __metaclass__ = DocumentMetaclass + def __delattr__(self, *args, **kwargs): + """Handle deletions of fields""" + field_name = args[0] + if field_name in self._fields: + default = self._fields[field_name].default + if callable(default): + default = default() + setattr(self, field_name, default) + else: + super(EmbeddedDocument, self).__delattr__(*args, **kwargs) + + class Document(BaseDocument): """The base class used for defining the structure and properties of @@ -59,7 +70,6 @@ class Document(BaseDocument): disabled by either setting types to False on the specific index or by setting index_types to False on the meta dictionary for the document. """ - __metaclass__ = TopLevelDocumentMetaclass def save(self, safe=True, force_insert=False, validate=True, write_options=None): @@ -95,18 +105,15 @@ class Document(BaseDocument): collection = self.__class__.objects._collection if force_insert: object_id = collection.insert(doc, safe=safe, **write_options) - elif '_id' in doc: - # Perform a set rather than a save - this will only save set fields - object_id = doc.pop('_id') - collection.update({'_id': object_id}, {"$set": doc}, upsert=True, safe=safe, **write_options) - - # Find and unset any fields explicitly set to None - if hasattr(self, '_present_fields'): - removals = dict([(k, 1) for k in self._present_fields if k not in doc and k != '_id']) - if removals: - collection.update({'_id': object_id}, {"$unset": removals}, upsert=True, safe=safe, **write_options) - else: + if created: object_id = collection.save(doc, safe=safe, **write_options) + else: + object_id = doc['_id'] + updates, removals = self._delta() + if updates: + collection.update({'_id': object_id}, {"$set": updates}, upsert=True, safe=safe, **write_options) + if removals: + collection.update({'_id': object_id}, {"$unset": removals}, upsert=True, safe=safe, **write_options) except pymongo.errors.OperationFailure, err: message = 'Could not save document (%s)' if u'duplicate key' in unicode(err): @@ -114,7 +121,7 @@ class Document(BaseDocument): raise OperationError(message % unicode(err)) id_field = self._meta['id_field'] self[id_field] = self._fields[id_field].to_python(object_id) - + self._changed_fields = [] signals.post_save.send(self, created=created) def delete(self, safe=False): @@ -135,14 +142,6 @@ class Document(BaseDocument): signals.post_delete.send(self) - @classmethod - def register_delete_rule(cls, document_cls, field_name, rule): - """This method registers the delete rules to apply when removing this - object. - """ - cls._meta['delete_rules'][(document_cls, field_name)] = rule - - def reload(self): """Reloads all attributes from the database. @@ -151,7 +150,29 @@ class Document(BaseDocument): id_field = self._meta['id_field'] obj = self.__class__.objects(**{id_field: self[id_field]}).first() for field in self._fields: - setattr(self, field, obj[field]) + setattr(self, field, self._reload(field, obj[field])) + self._changed_fields = [] + + def _reload(self, key, value): + """Used by :meth:`~mongoengine.Document.reload` to ensure the + correct instance is linked to self. + """ + if isinstance(value, BaseDict): + value = [(k, self._reload(k,v)) for k,v in value.items()] + value = BaseDict(value, instance=self, name=key) + elif isinstance(value, BaseList): + value = [self._reload(key, v) for v in value] + value = BaseList(value, instance=self, name=key) + elif isinstance(value, EmbeddedDocument): + value._changed_fields = [] + return value + + @classmethod + def register_delete_rule(cls, document_cls, field_name, rule): + """This method registers the delete rules to apply when removing this + object. + """ + cls._meta['delete_rules'][(document_cls, field_name)] = rule @classmethod def drop_collection(cls): diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 967ce834..eeb4c2c0 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -347,9 +347,9 @@ class ComplexDateTimeField(StringField): return datetime.datetime.now() return self._convert_from_string(data) - def __set__(self, obj, val): - data = self._convert_from_datetime(val) - return super(ComplexDateTimeField, self).__set__(obj, data) + def __set__(self, instance, value): + value = self._convert_from_datetime(value) + return super(ComplexDateTimeField, self).__set__(instance, value) def validate(self, value): if not isinstance(value, datetime.datetime): @@ -686,11 +686,13 @@ class GridFSProxy(object): .. versionadded:: 0.4 """ - def __init__(self, grid_id=None): + def __init__(self, grid_id=None, key=None, instance=None): self.fs = gridfs.GridFS(_get_db()) # Filesystem instance self.newfile = None # Used for partial writes self.grid_id = grid_id # Store GridFS id for file self.gridout = None + self.key = key + self.instance = instance def __getattr__(self, name): obj = self.get() @@ -723,6 +725,7 @@ class GridFSProxy(object): raise GridFSError('This document already has a file. Either delete ' 'it or call replace to overwrite it') self.grid_id = self.fs.put(file_obj, **kwargs) + self._mark_as_changed() def write(self, string): if self.grid_id: @@ -750,6 +753,12 @@ class GridFSProxy(object): self.fs.delete(self.grid_id) self.grid_id = None self.gridout = None + self._mark_as_changed() + + def _mark_as_changed(self): + """Inform the instance that `self.key` has been changed""" + if self.instance: + self.instance._mark_as_changed(self.key) def replace(self, file_obj, **kwargs): self.delete() @@ -777,10 +786,14 @@ class FileField(BaseField): grid_file = instance._data.get(self.name) self.grid_file = grid_file if self.grid_file: + if not self.grid_file.key: + self.grid_file.key = self.name + self.grid_file.instance = instance return self.grid_file - return GridFSProxy() + return GridFSProxy(key=self.name, instance=instance) def __set__(self, instance, value): + key = self.name if isinstance(value, file) or isinstance(value, str): # using "FileField() = file/string" notation grid_file = instance._data.get(self.name) @@ -794,10 +807,12 @@ class FileField(BaseField): grid_file.put(value) else: # Create a new proxy object as we don't already have one - instance._data[self.name] = GridFSProxy() - instance._data[self.name].put(value) + instance._data[key] = GridFSProxy(key=key, instance=instance) + instance._data[key].put(value) else: - instance._data[self.name] = value + instance._data[key] = value + + instance._mark_as_changed(key) def to_mongo(self, value): # Store the GridFS file id in MongoDB diff --git a/tests/dereference.py b/tests/dereference.py index 68792721..4040d5bd 100644 --- a/tests/dereference.py +++ b/tests/dereference.py @@ -281,9 +281,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] self.assertEqual(q, 1) - - for k, m in group_obj.members.iteritems(): - self.assertTrue('User' in m.__class__.__name__) + self.assertEqual(group_obj.members, {}) UserA.drop_collection() UserB.drop_collection() diff --git a/tests/django_tests.py b/tests/django_tests.py index 6be1ea25..ee8084ce 100644 --- a/tests/django_tests.py +++ b/tests/django_tests.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- import unittest diff --git a/tests/document.py b/tests/document.py index f0af8f2d..4c890800 100644 --- a/tests/document.py +++ b/tests/document.py @@ -2,6 +2,7 @@ import unittest from datetime import datetime import pymongo import pickle +import weakref from mongoengine import * from mongoengine.base import BaseField @@ -11,6 +12,7 @@ from mongoengine.connection import _get_db class PickleEmbedded(EmbeddedDocument): date = DateTimeField(default=datetime.now) + class PickleTest(Document): number = IntField() string = StringField() @@ -717,6 +719,47 @@ class DocumentTest(unittest.TestCase): self.assertEqual(person.name, "Mr Test User") self.assertEqual(person.age, 21) + def test_reload_referencing(self): + """Ensures reloading updates weakrefs correctly + """ + class Embedded(EmbeddedDocument): + dict_field = DictField() + list_field = ListField() + + class Doc(Document): + dict_field = DictField() + list_field = ListField() + embedded_field = EmbeddedDocumentField(Embedded) + + Doc.drop_collection + doc = Doc() + doc.dict_field = {'hello': 'world'} + doc.list_field = ['1', 2, {'hello': 'world'}] + + embedded_1 = Embedded() + embedded_1.dict_field = {'hello': 'world'} + embedded_1.list_field = ['1', 2, {'hello': 'world'}] + doc.embedded_field = embedded_1 + doc.save() + + doc.reload() + doc.list_field.append(1) + doc.dict_field['woot'] = "woot" + doc.embedded_field.list_field.append(1) + doc.embedded_field.dict_field['woot'] = "woot" + + self.assertEquals(doc._get_changed_fields(), [ + 'list_field', 'dict_field', 'embedded_field.list_field', + 'embedded_field.dict_field']) + doc.save() + + doc.reload() + self.assertEquals(doc._get_changed_fields(), []) + self.assertEquals(len(doc.list_field), 4) + self.assertEquals(len(doc.dict_field), 2) + self.assertEquals(len(doc.embedded_field.list_field), 4) + self.assertEquals(len(doc.embedded_field.dict_field), 2) + def test_dictionary_access(self): """Ensure that dictionary-style field access works properly. """ @@ -873,6 +916,197 @@ class DocumentTest(unittest.TestCase): self.assertEqual(person.name, None) self.assertEqual(person.age, None) + def test_delta(self): + + class Doc(Document): + string_field = StringField() + int_field = IntField() + dict_field = DictField() + list_field = ListField() + + Doc.drop_collection + doc = Doc() + doc.save() + + doc = Doc.objects.first() + self.assertEquals(doc._get_changed_fields(), []) + self.assertEquals(doc._delta(), ({}, {})) + + doc.string_field = 'hello' + self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {})) + + doc._changed_fields = [] + doc.int_field = 1 + self.assertEquals(doc._delta(), ({'int_field': 1}, {})) + + doc._changed_fields = [] + dict_value = {'hello': 'world', 'ping': 'pong'} + doc.dict_field = dict_value + self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {})) + + doc._changed_fields = [] + list_value = ['1', 2, {'hello': 'world'}] + doc.list_field = list_value + self.assertEquals(doc._delta(), ({'list_field': list_value}, {})) + + # Test unsetting + doc._changed_fields = [] + doc._unset_fields = [] + doc.dict_field = {} + self.assertEquals(doc._delta(), ({}, {'dict_field': 1})) + + doc._changed_fields = [] + doc._unset_fields = {} + doc.list_field = [] + self.assertEquals(doc._delta(), ({}, {'list_field': 1})) + + def test_delta_recursive(self): + + class Embedded(EmbeddedDocument): + string_field = StringField() + int_field = IntField() + dict_field = DictField() + list_field = ListField() + + class Doc(Document): + string_field = StringField() + int_field = IntField() + dict_field = DictField() + list_field = ListField() + embedded_field = EmbeddedDocumentField(Embedded) + + Doc.drop_collection + doc = Doc() + doc.save() + + doc = Doc.objects.first() + self.assertEquals(doc._get_changed_fields(), []) + self.assertEquals(doc._delta(), ({}, {})) + + embedded_1 = Embedded() + embedded_1.string_field = 'hello' + embedded_1.int_field = 1 + embedded_1.dict_field = {'hello': 'world'} + embedded_1.list_field = ['1', 2, {'hello': 'world'}] + doc.embedded_field = embedded_1 + + embedded_delta = { + '_types': ['Embedded'], + '_cls': 'Embedded', + 'string_field': 'hello', + 'int_field': 1, + 'dict_field': {'hello': 'world'}, + 'list_field': ['1', 2, {'hello': 'world'}] + } + self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {})) + self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {})) + + doc.save() + doc.reload() + + doc.embedded_field.dict_field = {} + self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1})) + self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1})) + doc.save() + doc.reload() + self.assertEquals(doc.embedded_field.dict_field, {}) + + doc.embedded_field.list_field = [] + self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1})) + self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1})) + doc.save() + doc.reload() + self.assertEquals(doc.embedded_field.list_field, []) + + embedded_2 = Embedded() + embedded_2.string_field = 'hello' + embedded_2.int_field = 1 + embedded_2.dict_field = {'hello': 'world'} + embedded_2.list_field = ['1', 2, {'hello': 'world'}] + + doc.embedded_field.list_field = ['1', 2, embedded_2] + self.assertEquals(doc.embedded_field._delta(), ({ + 'list_field': ['1', 2, { + '_cls': 'Embedded', + '_types': ['Embedded'], + 'string_field': 'hello', + 'dict_field': {'hello': 'world'}, + 'int_field': 1, + 'list_field': ['1', 2, {'hello': 'world'}], + }] + }, {})) + + self.assertEquals(doc._delta(), ({ + 'embedded_field.list_field': ['1', 2, { + '_cls': 'Embedded', + '_types': ['Embedded'], + 'string_field': 'hello', + 'dict_field': {'hello': 'world'}, + 'int_field': 1, + 'list_field': ['1', 2, {'hello': 'world'}], + }] + }, {})) + doc.save() + doc.reload() + + self.assertEquals(doc.embedded_field.list_field[0], '1') + self.assertEquals(doc.embedded_field.list_field[1], 2) + for k in doc.embedded_field.list_field[2]._fields: + self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k]) + + doc.embedded_field.list_field[2].string_field = 'world' + self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) + self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) + doc.save() + doc.reload() + self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world') + + # Test list native methods + doc.embedded_field.list_field[2].list_field.pop(0) + self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) + doc.save() + doc.reload() + + doc.embedded_field.list_field[2].list_field.append(1) + self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {})) + doc.save() + doc.reload() + self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) + + doc.embedded_field.list_field[2].list_field.sort() + doc.save() + doc.reload() + self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) + + del(doc.embedded_field.list_field[2].list_field[2]['hello']) + self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) + doc.save() + doc.reload() + + del(doc.embedded_field.list_field[2].list_field) + self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) + + def test_save_only_changed_fields(self): + """Ensure save only sets / unsets changed fields + """ + + # Create person object and save it to the database + person = self.Person(name='Test User', age=30) + person.save() + person.reload() + + same_person = self.Person.objects.get() + + person.age = 21 + same_person.name = 'User' + + person.save() + same_person.save() + + person = self.Person.objects.get() + self.assertEquals(person.name, 'User') + self.assertEquals(person.age, 21) + def test_delete(self): """Ensure that document may be deleted using the delete method. """ @@ -978,12 +1212,19 @@ class DocumentTest(unittest.TestCase): promoted_employee.details.position = 'Senior Developer' promoted_employee.save() - collection = self.db[self.Person._meta['collection']] - employee_obj = collection.find_one({'name': 'Test Employee'}) - self.assertEqual(employee_obj['name'], 'Test Employee') - self.assertEqual(employee_obj['age'], 50) + promoted_employee.reload() + self.assertEqual(promoted_employee.name, 'Test Employee') + self.assertEqual(promoted_employee.age, 50) # Ensure that the 'details' embedded object saved correctly - self.assertEqual(employee_obj['details']['position'], 'Senior Developer') + self.assertEqual(promoted_employee.details.position, 'Senior Developer') + + # Test removal + promoted_employee.details = None + promoted_employee.save() + + promoted_employee.reload() + self.assertEqual(promoted_employee.details, None) + def test_save_reference(self): """Ensure that a document reference field may be saved in the database. diff --git a/tests/fields.py b/tests/fields.py index 531167c8..79cd519c 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -843,6 +843,7 @@ class FieldTest(unittest.TestCase): name = StringField() children = ListField(EmbeddedDocumentField('self')) + Tree.drop_collection tree = Tree(name="Tree") first_child = TreeNode(name="Child 1") @@ -853,15 +854,42 @@ class FieldTest(unittest.TestCase): third_child = TreeNode(name="Child 3") first_child.children.append(third_child) - tree.save() - tree_obj = Tree.objects.first() self.assertEqual(len(tree.children), 1) self.assertEqual(tree.children[0].name, first_child.name) self.assertEqual(tree.children[0].children[0].name, second_child.name) self.assertEqual(tree.children[0].children[1].name, third_child.name) + # Test updating + tree.children[0].name = 'I am Child 1' + tree.children[0].children[0].name = 'I am Child 2' + tree.children[0].children[1].name = 'I am Child 3' + tree.save() + + self.assertEqual(tree.children[0].name, 'I am Child 1') + self.assertEqual(tree.children[0].children[0].name, 'I am Child 2') + self.assertEqual(tree.children[0].children[1].name, 'I am Child 3') + + # Test removal + self.assertEqual(len(tree.children[0].children), 2) + del(tree.children[0].children[1]) + + tree.save() + self.assertEqual(len(tree.children[0].children), 1) + + tree.children[0].children.pop(0) + tree.save() + self.assertEqual(len(tree.children[0].children), 0) + self.assertEqual(tree.children[0].children, []) + + tree.children[0].children.insert(0, third_child) + tree.children[0].children.insert(0, second_child) + tree.save() + self.assertEqual(len(tree.children[0].children), 2) + self.assertEqual(tree.children[0].children[0].name, second_child.name) + self.assertEqual(tree.children[0].children[1].name, third_child.name) + def test_undefined_reference(self): """Ensure that ReferenceFields may reference undefined Documents. """ From 4c2b83d9cae5323aa4b8f90132b51107c9a58db8 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 14 Jun 2011 15:00:26 +0100 Subject: [PATCH 122/214] Remove errant __delete__ method --- mongoengine/base.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 41794f9a..8a0ded51 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -864,14 +864,6 @@ class BaseList(list): self.instance._mark_as_changed(self.name) super(BaseList, self).__delitem__(*args, **kwargs) - def __delete__(self, *args, **kwargs): - if hasattr(self, 'instance') and hasattr(self, 'name'): - import ipdb; ipdb.set_trace() - self.instance._mark_as_changed(self.name) - delattr(self, 'instance') - delattr(self, 'name') - super(BaseDict, self).__delete__(*args, **kwargs) - def append(self, *args, **kwargs): self.instance._mark_as_changed(self.name) return super(BaseList, self).append(*args, **kwargs) From 576db9ca88bef73aedb107ce7edf2e502acaf7f5 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 14 Jun 2011 15:09:03 +0100 Subject: [PATCH 123/214] Fixes DateTimeField handling of date objects. Fixes #191 --- mongoengine/fields.py | 10 +++++++--- tests/fields.py | 12 ++++++++++++ 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index eeb4c2c0..ca18255c 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -232,12 +232,16 @@ class DateTimeField(BaseField): """A datetime field. Note: Microseconds are rounded to the nearest millisecond. - Pre UTC microsecond support is effecively broken see - `tests.field.test_datetime` for more information. + Pre UTC microsecond support is effecively broken. + Use :class:`~mongoengine.fields.ComplexDateTimeField` if you + need accurate microsecond support. """ def validate(self, value): - assert isinstance(value, datetime.datetime) + assert isinstance(value, (datetime.datetime, datetime.date)) + + def to_mongo(self, value): + return self.prepare_query_value(None, value) def prepare_query_value(self, op, value): if value is None: diff --git a/tests/fields.py b/tests/fields.py index 79cd519c..773ba93c 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -182,6 +182,9 @@ class FieldTest(unittest.TestCase): log.time = datetime.datetime.now() log.validate() + log.time = datetime.date.today() + log.validate() + log.time = -1 self.assertRaises(ValidationError, log.validate) log.time = '1pm' @@ -199,6 +202,15 @@ class FieldTest(unittest.TestCase): LogEntry.drop_collection() + # Test can save dates + log = LogEntry() + log.date = datetime.date.today() + log.save() + log.reload() + self.assertEquals(log.date.date(), datetime.date.today()) + + LogEntry.drop_collection() + # Post UTC - microseconds are rounded (down) nearest millisecond and dropped d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999) d2 = datetime.datetime(1970, 01, 01, 00, 00, 01) From cb1dfdfac6f7a27ca2bd9bc99fd39e88d47219dd Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 14 Jun 2011 16:56:04 +0100 Subject: [PATCH 124/214] Fixes to signals The sender is the class of the document not the instance - easier to hook into --- docs/guide/signals.rst | 12 +++--- mongoengine/base.py | 4 +- mongoengine/document.py | 8 ++-- tests/signals.py | 87 ++++++++++++++++++++++++++++++++--------- 4 files changed, 81 insertions(+), 30 deletions(-) diff --git a/docs/guide/signals.rst b/docs/guide/signals.rst index d80a421b..3c3159f8 100644 --- a/docs/guide/signals.rst +++ b/docs/guide/signals.rst @@ -30,20 +30,20 @@ Example usage:: return self.name @classmethod - def pre_save(cls, instance, **kwargs): - logging.debug("Pre Save: %s" % instance.name) + def pre_save(cls, sender, document, **kwargs): + logging.debug("Pre Save: %s" % document.name) @classmethod - def post_save(cls, instance, **kwargs): - logging.debug("Post Save: %s" % instance.name) + def post_save(cls, sender, document, **kwargs): + logging.debug("Post Save: %s" % document.name) if 'created' in kwargs: if kwargs['created']: logging.debug("Created") else: logging.debug("Updated") - signals.pre_save.connect(Author.pre_save) - signals.post_save.connect(Author.post_save) + signals.pre_save.connect(Author.pre_save, sender=Author) + signals.post_save.connect(Author.post_save, sender=Author) .. _blinker: http://pypi.python.org/pypi/blinker \ No newline at end of file diff --git a/mongoengine/base.py b/mongoengine/base.py index 8a0ded51..c5b704e1 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -600,7 +600,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): class BaseDocument(object): def __init__(self, **values): - signals.pre_init.send(self, values=values) + signals.pre_init.send(self.__class__, document=self, values=values) self._data = {} # Assign default values to instance @@ -619,7 +619,7 @@ class BaseDocument(object): except AttributeError: pass - signals.post_init.send(self) + signals.post_init.send(self.__class__, document=self) def _get_FIELD_display(self, field): """Returns the display value for a choice field""" diff --git a/mongoengine/document.py b/mongoengine/document.py index 2f40eec7..69b19e2c 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -91,7 +91,7 @@ class Document(BaseDocument): For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers have recorded the write and will force an fsync on each server being written to. """ - signals.pre_save.send(self) + signals.pre_save.send(self.__class__, document=self) if validate: self.validate() @@ -122,7 +122,7 @@ class Document(BaseDocument): id_field = self._meta['id_field'] self[id_field] = self._fields[id_field].to_python(object_id) self._changed_fields = [] - signals.post_save.send(self, created=created) + signals.post_save.send(self.__class__, document=self, created=created) def delete(self, safe=False): """Delete the :class:`~mongoengine.Document` from the database. This @@ -130,7 +130,7 @@ class Document(BaseDocument): :param safe: check if the operation succeeded before returning """ - signals.pre_delete.send(self) + signals.pre_delete.send(self.__class__, document=self) id_field = self._meta['id_field'] object_id = self._fields[id_field].to_mongo(self[id_field]) @@ -140,7 +140,7 @@ class Document(BaseDocument): message = u'Could not delete document (%s)' % err.message raise OperationError(message) - signals.post_delete.send(self) + signals.post_delete.send(self.__class__, document=self) def reload(self): """Reloads all attributes from the database. diff --git a/tests/signals.py b/tests/signals.py index fff2d398..9c413379 100644 --- a/tests/signals.py +++ b/tests/signals.py @@ -28,21 +28,21 @@ class SignalTests(unittest.TestCase): return self.name @classmethod - def pre_init(cls, instance, **kwargs): + def pre_init(cls, sender, document, *args, **kwargs): signal_output.append('pre_init signal, %s' % cls.__name__) signal_output.append(str(kwargs['values'])) @classmethod - def post_init(cls, instance, **kwargs): - signal_output.append('post_init signal, %s' % instance) + def post_init(cls, sender, document, **kwargs): + signal_output.append('post_init signal, %s' % document) @classmethod - def pre_save(cls, instance, **kwargs): - signal_output.append('pre_save signal, %s' % instance) + def pre_save(cls, sender, document, **kwargs): + signal_output.append('pre_save signal, %s' % document) @classmethod - def post_save(cls, instance, **kwargs): - signal_output.append('post_save signal, %s' % instance) + def post_save(cls, sender, document, **kwargs): + signal_output.append('post_save signal, %s' % document) if 'created' in kwargs: if kwargs['created']: signal_output.append('Is created') @@ -50,15 +50,52 @@ class SignalTests(unittest.TestCase): signal_output.append('Is updated') @classmethod - def pre_delete(cls, instance, **kwargs): - signal_output.append('pre_delete signal, %s' % instance) + def pre_delete(cls, sender, document, **kwargs): + signal_output.append('pre_delete signal, %s' % document) @classmethod - def post_delete(cls, instance, **kwargs): - signal_output.append('post_delete signal, %s' % instance) - + def post_delete(cls, sender, document, **kwargs): + signal_output.append('post_delete signal, %s' % document) self.Author = Author + + class Another(Document): + name = StringField() + + def __unicode__(self): + return self.name + + @classmethod + def pre_init(cls, sender, document, **kwargs): + signal_output.append('pre_init Another signal, %s' % cls.__name__) + signal_output.append(str(kwargs['values'])) + + @classmethod + def post_init(cls, sender, document, **kwargs): + signal_output.append('post_init Another signal, %s' % document) + + @classmethod + def pre_save(cls, sender, document, **kwargs): + signal_output.append('pre_save Another signal, %s' % document) + + @classmethod + def post_save(cls, sender, document, **kwargs): + signal_output.append('post_save Another signal, %s' % document) + if 'created' in kwargs: + if kwargs['created']: + signal_output.append('Is created') + else: + signal_output.append('Is updated') + + @classmethod + def pre_delete(cls, sender, document, **kwargs): + signal_output.append('pre_delete Another signal, %s' % document) + + @classmethod + def post_delete(cls, sender, document, **kwargs): + signal_output.append('post_delete Another signal, %s' % document) + + self.Another = Another # Save up the number of connected signals so that we can check at the end # that all the signals we register get properly unregistered self.pre_signals = ( @@ -70,12 +107,19 @@ class SignalTests(unittest.TestCase): len(signals.post_delete.receivers) ) - signals.pre_init.connect(Author.pre_init) - signals.post_init.connect(Author.post_init) - signals.pre_save.connect(Author.pre_save) - signals.post_save.connect(Author.post_save) - signals.pre_delete.connect(Author.pre_delete) - signals.post_delete.connect(Author.post_delete) + signals.pre_init.connect(Author.pre_init, sender=Author) + signals.post_init.connect(Author.post_init, sender=Author) + signals.pre_save.connect(Author.pre_save, sender=Author) + signals.post_save.connect(Author.post_save, sender=Author) + signals.pre_delete.connect(Author.pre_delete, sender=Author) + signals.post_delete.connect(Author.post_delete, sender=Author) + + signals.pre_init.connect(Another.pre_init, sender=Another) + signals.post_init.connect(Another.post_init, sender=Another) + signals.pre_save.connect(Another.pre_save, sender=Another) + signals.post_save.connect(Another.post_save, sender=Another) + signals.pre_delete.connect(Another.pre_delete, sender=Another) + signals.post_delete.connect(Another.post_delete, sender=Another) def tearDown(self): signals.pre_init.disconnect(self.Author.pre_init) @@ -85,6 +129,13 @@ class SignalTests(unittest.TestCase): signals.post_save.disconnect(self.Author.post_save) signals.pre_save.disconnect(self.Author.pre_save) + signals.pre_init.disconnect(self.Another.pre_init) + signals.post_init.disconnect(self.Another.post_init) + signals.post_delete.disconnect(self.Another.post_delete) + signals.pre_delete.disconnect(self.Another.pre_delete) + signals.post_save.disconnect(self.Another.post_save) + signals.pre_save.disconnect(self.Another.pre_save) + # Check that all our signals got disconnected properly. post_signals = ( len(signals.pre_init.receivers), From 0338ac17b1be78050e82e1222a84c805b870bf9c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 15 Jun 2011 08:55:31 +0100 Subject: [PATCH 125/214] Fixes multiple assignment issue preventing saves Thanks to @wpjunior for the ticket and testcase Also fixed bug in BaseList fixes #195 --- mongoengine/base.py | 6 +++--- tests/document.py | 39 +++++++++++++++++++++++++++++++++++++-- 2 files changed, 40 insertions(+), 5 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index c5b704e1..1ca1680a 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -784,9 +784,9 @@ class BaseDocument(object): for field_name in self._fields: key = '%s.' % field_name field = getattr(self, field_name, None) - if isinstance(field, EmbeddedDocument): # Grab all embedded fields that have been changed + if isinstance(field, EmbeddedDocument) and field_name not in _changed_fields: # Grab all embedded fields that have been changed _changed_fields += ["%s%s" % (key, k) for k in field._get_changed_fields(key) if k] - elif isinstance(field, (list, tuple)): # Loop list fields as they contain documents + elif isinstance(field, (list, tuple)) and field_name not in _changed_fields: # Loop list fields as they contain documents for index, value in enumerate(field): if not hasattr(value, '_get_changed_fields'): continue @@ -858,7 +858,7 @@ class BaseList(list): def __setitem__(self, *args, **kwargs): if hasattr(self, 'instance') and hasattr(self, 'name'): self.instance._mark_as_changed(self.name) - super(BaseDict, self).__setitem__(*args, **kwargs) + super(BaseList, self).__setitem__(*args, **kwargs) def __delitem__(self, *args, **kwargs): self.instance._mark_as_changed(self.name) diff --git a/tests/document.py b/tests/document.py index 4c890800..4f90ba2d 100644 --- a/tests/document.py +++ b/tests/document.py @@ -933,31 +933,35 @@ class DocumentTest(unittest.TestCase): self.assertEquals(doc._delta(), ({}, {})) doc.string_field = 'hello' + self.assertEquals(doc._get_changed_fields(), ['string_field']) self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {})) doc._changed_fields = [] doc.int_field = 1 + self.assertEquals(doc._get_changed_fields(), ['int_field']) self.assertEquals(doc._delta(), ({'int_field': 1}, {})) doc._changed_fields = [] dict_value = {'hello': 'world', 'ping': 'pong'} doc.dict_field = dict_value + self.assertEquals(doc._get_changed_fields(), ['dict_field']) self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {})) doc._changed_fields = [] list_value = ['1', 2, {'hello': 'world'}] doc.list_field = list_value + self.assertEquals(doc._get_changed_fields(), ['list_field']) self.assertEquals(doc._delta(), ({'list_field': list_value}, {})) # Test unsetting doc._changed_fields = [] - doc._unset_fields = [] doc.dict_field = {} + self.assertEquals(doc._get_changed_fields(), ['dict_field']) self.assertEquals(doc._delta(), ({}, {'dict_field': 1})) doc._changed_fields = [] - doc._unset_fields = {} doc.list_field = [] + self.assertEquals(doc._get_changed_fields(), ['list_field']) self.assertEquals(doc._delta(), ({}, {'list_field': 1})) def test_delta_recursive(self): @@ -990,6 +994,8 @@ class DocumentTest(unittest.TestCase): embedded_1.list_field = ['1', 2, {'hello': 'world'}] doc.embedded_field = embedded_1 + self.assertEquals(doc._get_changed_fields(), ['embedded_field']) + embedded_delta = { '_types': ['Embedded'], '_cls': 'Embedded', @@ -1005,6 +1011,7 @@ class DocumentTest(unittest.TestCase): doc.reload() doc.embedded_field.dict_field = {} + self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field']) self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1})) self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1})) doc.save() @@ -1012,6 +1019,7 @@ class DocumentTest(unittest.TestCase): self.assertEquals(doc.embedded_field.dict_field, {}) doc.embedded_field.list_field = [] + self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1})) self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1})) doc.save() @@ -1025,6 +1033,7 @@ class DocumentTest(unittest.TestCase): embedded_2.list_field = ['1', 2, {'hello': 'world'}] doc.embedded_field.list_field = ['1', 2, embedded_2] + self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) self.assertEquals(doc.embedded_field._delta(), ({ 'list_field': ['1', 2, { '_cls': 'Embedded', @@ -1055,12 +1064,38 @@ class DocumentTest(unittest.TestCase): self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k]) doc.embedded_field.list_field[2].string_field = 'world' + self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field']) self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) doc.save() doc.reload() self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world') + # Test multiple assignments + doc.embedded_field.list_field[2].string_field = 'hello world' + doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] + self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) + self.assertEquals(doc.embedded_field._delta(), ({ + 'list_field': ['1', 2, { + '_types': ['Embedded'], + '_cls': 'Embedded', + 'string_field': 'hello world', + 'int_field': 1, + 'list_field': ['1', 2, {'hello': 'world'}], + 'dict_field': {'hello': 'world'}}]}, {})) + self.assertEquals(doc._delta(), ({ + 'embedded_field.list_field': ['1', 2, { + '_types': ['Embedded'], + '_cls': 'Embedded', + 'string_field': 'hello world', + 'int_field': 1, + 'list_field': ['1', 2, {'hello': 'world'}], + 'dict_field': {'hello': 'world'}} + ]}, {})) + doc.save() + doc.reload() + self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world') + # Test list native methods doc.embedded_field.list_field[2].list_field.pop(0) self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) From 94cad89e321b92239171fd0a2f11095fa2f01b09 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 15 Jun 2011 11:22:27 +0100 Subject: [PATCH 126/214] Fixes to item_frequencies - now handles path lookups fixes #194 --- .gitignore | 1 + mongoengine/queryset.py | 39 ++++++++++++++++++------- tests/queryset.py | 63 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 93 insertions(+), 10 deletions(-) diff --git a/.gitignore b/.gitignore index 8951a0ce..315674fe 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,4 @@ env/ .settings .project .pydevproject +tests/bugfix.py diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 666567e2..4ffa5324 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1303,7 +1303,16 @@ class QuerySet(object): # Substitute the correct name for the field into the javascript return u'["%s"]' % fields[-1].db_field - return re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) + def field_path_sub(match): + # Extract just the field name, and look up the field objects + field_name = match.group(1).split('.') + fields = QuerySet._lookup_field(self._document, field_name) + # Substitute the correct name for the field into the javascript + return ".".join([f.db_field for f in fields]) + + code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) + code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, code) + return code def exec_js(self, code, *fields, **options): """Execute a Javascript function on the server. A list of fields may be @@ -1405,12 +1414,15 @@ class QuerySet(object): def _item_frequencies_map_reduce(self, field, normalize=False): map_func = """ function() { - if (this[~%(field)s].constructor == Array) { - this[~%(field)s].forEach(function(item) { + path = '{{~%(field)s}}'.split('.'); + field = this; + for (p in path) { field = field[path[p]]; } + if (field.constructor == Array) { + field.forEach(function(item) { emit(item, 1); }); } else { - emit(this[~%(field)s], 1); + emit(field, 1); } } """ % dict(field=field) @@ -1443,12 +1455,16 @@ class QuerySet(object): def _item_frequencies_exec_js(self, field, normalize=False): """Uses exec_js to execute""" freq_func = """ - function(field) { + function(path) { + path = path.split('.'); + if (options.normalize) { var total = 0.0; db[collection].find(query).forEach(function(doc) { - if (doc[field].constructor == Array) { - total += doc[field].length; + field = doc; + for (p in path) { field = field[path[p]]; } + if (field.constructor == Array) { + total += field.length; } else { total++; } @@ -1461,18 +1477,21 @@ class QuerySet(object): inc /= total; } db[collection].find(query).forEach(function(doc) { - if (doc[field].constructor == Array) { - doc[field].forEach(function(item) { + field = doc; + for (p in path) { field = field[path[p]]; } + if (field.constructor == Array) { + field.forEach(function(item) { frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); }); } else { - var item = doc[field]; + var item = field; frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); } }); return frequencies; } """ + return self.exec_js(freq_func, field, normalize=normalize) def __repr__(self): diff --git a/tests/queryset.py b/tests/queryset.py index 37140f4a..cc219fba 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1116,6 +1116,11 @@ class QuerySetTest(unittest.TestCase): ] self.assertEqual(results, expected_results) + # Test template style + code = "{{~comments.content}}" + sub_code = BlogPost.objects._sub_js_fields(code) + self.assertEquals("cmnts.body", sub_code) + BlogPost.drop_collection() def test_delete(self): @@ -1637,6 +1642,64 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() + def test_item_frequencies_on_embedded(self): + """Ensure that item frequencies are properly generated from lists. + """ + + class Phone(EmbeddedDocument): + number = StringField() + + class Person(Document): + name = StringField() + phone = EmbeddedDocumentField(Phone) + + Person.drop_collection() + + doc = Person(name="Guido") + doc.phone = Phone(number='62-3331-1656') + doc.save() + + doc = Person(name="Marr") + doc.phone = Phone(number='62-3331-1656') + doc.save() + + doc = Person(name="WP Junior") + doc.phone = Phone(number='62-3332-1656') + doc.save() + + + def test_assertions(f): + f = dict((key, int(val)) for key, val in f.items()) + self.assertEqual(set(['62-3331-1656', '62-3332-1656']), set(f.keys())) + self.assertEqual(f['62-3331-1656'], 2) + self.assertEqual(f['62-3332-1656'], 1) + + exec_js = Person.objects.item_frequencies('phone.number') + map_reduce = Person.objects.item_frequencies('phone.number', map_reduce=True) + test_assertions(exec_js) + test_assertions(map_reduce) + + # Ensure query is taken into account + def test_assertions(f): + f = dict((key, int(val)) for key, val in f.items()) + self.assertEqual(set(['62-3331-1656']), set(f.keys())) + self.assertEqual(f['62-3331-1656'], 2) + + exec_js = Person.objects(phone__number='62-3331-1656').item_frequencies('phone.number') + map_reduce = Person.objects(phone__number='62-3331-1656').item_frequencies('phone.number', map_reduce=True) + test_assertions(exec_js) + test_assertions(map_reduce) + + # Check that normalization works + def test_assertions(f): + self.assertEqual(f['62-3331-1656'], 2.0/3.0) + self.assertEqual(f['62-3332-1656'], 1.0/3.0) + + exec_js = Person.objects.item_frequencies('phone.number', normalize=True) + map_reduce = Person.objects.item_frequencies('phone.number', normalize=True, map_reduce=True) + test_assertions(exec_js) + test_assertions(map_reduce) + def test_average(self): """Ensure that field can be averaged correctly. """ From ffb3e8b7b9a9f1387566ad41c4a515686a06d975 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 15 Jun 2011 11:28:41 +0100 Subject: [PATCH 127/214] Added help_text and verbose_name to fields closes #192 --- mongoengine/base.py | 4 +++- tests/fields.py | 7 +++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 1ca1680a..d50cf955 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -49,7 +49,7 @@ class BaseField(object): def __init__(self, db_field=None, name=None, required=False, default=None, unique=False, unique_with=None, primary_key=False, - validation=None, choices=None): + validation=None, choices=None, verbose_name=None, help_text=None): self.db_field = (db_field or name) if not primary_key else '_id' if name: import warnings @@ -63,6 +63,8 @@ class BaseField(object): self.primary_key = primary_key self.validation = validation self.choices = choices + self.verbose_name = verbose_name + self.help_text = help_text # Adjust the appropriate creation counter, and save our local copy. if self.db_field == '_id': diff --git a/tests/fields.py b/tests/fields.py index 773ba93c..c13f9e34 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -21,12 +21,15 @@ class FieldTest(unittest.TestCase): """ class Person(Document): name = StringField() - age = IntField(default=30) - userid = StringField(default=lambda: 'test') + age = IntField(default=30, help_text="Your real age") + userid = StringField(default=lambda: 'test', verbose_name="User Identity") person = Person(name='Test Person') self.assertEqual(person._data['age'], 30) self.assertEqual(person._data['userid'], 'test') + self.assertEqual(person._fields['name'].help_text, None) + self.assertEqual(person._fields['age'].help_text, "Your real age") + self.assertEqual(person._fields['userid'].verbose_name, "User Identity") def test_required_values(self): """Ensure that required field constraints are enforced. From 5411cc55731bc6ecf43075b08acbf00eccafb83e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 15 Jun 2011 11:30:10 +0100 Subject: [PATCH 128/214] Updated changelog --- docs/changelog.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 54efb4ff..11218e2c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,8 @@ Changelog Changes in dev ============== +- Added help_text and verbose_name to fields to help with some form libs +- Updated item_frequencies to handle embedded document lookups - Added delta tracking now only sets / unsets explicitly changed fields - Fixed saving so sets updated values rather than overwrites - Added ComplexDateTimeField - Handles datetimes correctly with microseconds From 967e72723b692114aa8357387b6b6292d1aab868 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 15 Jun 2011 14:55:38 +0100 Subject: [PATCH 129/214] Added note to item_frequencies method. Current implementation is relatively simple, for complex schemas the user will have to write their own map reduce. --- mongoengine/queryset.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 4ffa5324..76d4d1cd 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1400,6 +1400,12 @@ class QuerySet(object): the whole queried set of documents, and their corresponding frequency. This is useful for generating tag clouds, or searching documents. + .. note:: + Can only do direct simple mappings and cannot map across + :class:`~mongoengine.ReferenceField` or + :class:`~mongoengine.GenericReferenceField` for more complex + counting a manual map reduce call would is required. + If the field is a :class:`~mongoengine.ListField`, the items within each list will be counted individually. From 658b85d3277c6c7478ca426ed64f544f59f811e9 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 15 Jun 2011 16:51:49 +0100 Subject: [PATCH 130/214] Inconsistent setting of '_cls' broke inherited document referencing Fixes #199 --- docs/changelog.rst | 15 +++++++------- mongoengine/base.py | 8 ++++---- mongoengine/fields.py | 2 +- tests/document.py | 48 +++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 61 insertions(+), 12 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 11218e2c..ea926239 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Fixed issue with inconsitent setting of _cls breaking inherited referencing - Added help_text and verbose_name to fields to help with some form libs - Updated item_frequencies to handle embedded document lookups - Added delta tracking now only sets / unsets explicitly changed fields @@ -22,7 +23,7 @@ Changes in dev - Updated connection exception so it provides more info on the cause. - Added searching multiple levels deep in ``DictField`` - Added ``DictField`` entries containing strings to use matching operators -- Added ``MapField``, similar to ``DictField`` +- Added ``MapField``, similar to ``DictField`` - Added Abstract Base Classes - Added Custom Objects Managers - Added sliced subfields updating @@ -35,14 +36,14 @@ Changes in dev - Updated queryset to handle latest version of pymongo map_reduce now requires an output. - Added ``Document`` __hash__, __ne__ for pickling -- Added ``FileField`` optional size arg for read method +- Added ``FileField`` optional size arg for read method - Fixed ``FileField`` seek and tell methods for reading files -- Added ``QuerySet.clone`` to support copying querysets +- Added ``QuerySet.clone`` to support copying querysets - Fixed item_frequencies when using name thats the same as a native js function - Added reverse delete rules - Fixed issue with unset operation - Fixed Q-object bug -- Added ``QuerySet.all_fields`` resets previous .only() and .exlude() +- Added ``QuerySet.all_fields`` resets previous .only() and .exlude() - Added ``QuerySet.exclude`` - Added django style choices - Fixed order and filter issue @@ -82,7 +83,7 @@ Changes in v0.3 =============== - Added MapReduce support - Added ``contains``, ``startswith`` and ``endswith`` query operators (and - case-insensitive versions that are prefixed with 'i') + case-insensitive versions that are prefixed with 'i') - Deprecated fields' ``name`` parameter, replaced with ``db_field`` - Added ``QuerySet.only`` for only retrieving specific fields - Added ``QuerySet.in_bulk()`` for bulk querying using ids @@ -129,7 +130,7 @@ Changes in v0.2 =============== - Added ``Q`` class for building advanced queries - Added ``QuerySet`` methods for atomic updates to documents -- Fields may now specify ``unique=True`` to enforce uniqueness across a +- Fields may now specify ``unique=True`` to enforce uniqueness across a collection - Added option for default document ordering - Fixed bug in index definitions @@ -137,7 +138,7 @@ Changes in v0.2 Changes in v0.1.3 ================= - Added Django authentication backend -- Added ``Document.meta`` support for indexes, which are ensured just before +- Added ``Document.meta`` support for indexes, which are ensured just before querying takes place - A few minor bugfixes diff --git a/mongoengine/base.py b/mongoengine/base.py index d50cf955..6d343682 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -173,7 +173,7 @@ class ComplexBaseField(BaseField): for k,v in value_list.items(): if isinstance(v, dict) and '_cls' in v and '_ref' not in v: - value_list[k] = get_document(v['_cls'].split('.')[-1])._from_son(v) + value_list[k] = get_document(v['_cls'])._from_son(v) # Handle all dereferencing db = _get_db() @@ -401,6 +401,7 @@ class DocumentMetaclass(type): else: simple_class = False + doc_class_name = '.'.join(reversed(class_name)) meta = attrs.get('_meta', attrs.get('meta', {})) if 'allow_inheritance' not in meta: @@ -412,8 +413,7 @@ class DocumentMetaclass(type): raise ValueError('Only direct subclasses of Document may set ' '"allow_inheritance" to False') attrs['_meta'] = meta - - attrs['_class_name'] = '.'.join(reversed(class_name)) + attrs['_class_name'] = doc_class_name attrs['_superclasses'] = superclasses # Add the document's fields to the _fields attribute @@ -448,7 +448,7 @@ class DocumentMetaclass(type): new_class.add_to_class('MultipleObjectsReturned', exc) global _document_registry - _document_registry[name] = new_class + _document_registry[doc_class_name] = new_class return new_class diff --git a/mongoengine/fields.py b/mongoengine/fields.py index ca18255c..26999204 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -652,7 +652,7 @@ class GenericReferenceField(BaseField): id_ = id_field.to_mongo(id_) collection = document._meta['collection'] ref = pymongo.dbref.DBRef(collection, id_) - return {'_cls': document.__class__.__name__, '_ref': ref} + return {'_cls': document._class_name, '_ref': ref} def prepare_query_value(self, op, value): return self.to_mongo(value) diff --git a/tests/document.py b/tests/document.py index 4f90ba2d..3a5419da 100644 --- a/tests/document.py +++ b/tests/document.py @@ -116,6 +116,8 @@ class DocumentTest(unittest.TestCase): class Human(Mammal): pass class Dog(Mammal): pass + Animal.drop_collection() + Animal().save() Fish().save() Mammal().save() @@ -133,6 +135,52 @@ class DocumentTest(unittest.TestCase): Animal.drop_collection() + def test_polymorphic_references(self): + """Ensure that the correct subclasses are returned from a query when + using references / generic references + """ + class Animal(Document): pass + class Fish(Animal): pass + class Mammal(Animal): pass + class Human(Mammal): pass + class Dog(Mammal): pass + + class Zoo(Document): + animals = ListField(ReferenceField(Animal)) + + Zoo.drop_collection() + Animal.drop_collection() + + Animal().save() + Fish().save() + Mammal().save() + Human().save() + Dog().save() + + # Save a reference to each animal + zoo = Zoo(animals=Animal.objects) + zoo.save() + zoo.reload() + + classes = [a.__class__ for a in Zoo.objects.first().animals] + self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog]) + + Zoo.drop_collection() + + class Zoo(Document): + animals = ListField(GenericReferenceField(Animal)) + + # Save a reference to each animal + zoo = Zoo(animals=Animal.objects) + zoo.save() + zoo.reload() + + classes = [a.__class__ for a in Zoo.objects.first().animals] + self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog]) + + Zoo.drop_collection() + Animal.drop_collection() + def test_inheritance(self): """Ensure that document may inherit fields from a superclass document. """ From 22a7ee58852ae5218d2197c6fd472c04176b150e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 16 Jun 2011 09:47:44 +0100 Subject: [PATCH 131/214] Handle old named (referenced) docs Refs #199 --- mongoengine/base.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 6d343682..49efba60 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -23,13 +23,20 @@ class ValidationError(Exception): _document_registry = {} def get_document(name): - if name not in _document_registry: + doc = _document_registry.get(name, None) + if not doc: + # Possible old style names + end = ".%s" % name + possible_match = [k for k in _document_registry.keys() if k.endswith(end)] + if len(possible_match) == 1: + doc = _document_registry.get(possible_match.pop(), None) + if not doc: raise NotRegistered(""" `%s` has not been registered in the document registry. Importing the document class automatically registers it, has it been imported? """.strip() % name) - return _document_registry[name] + return doc class BaseField(object): From cae3f3eefffa3809dc396e65941f36f66c4bdb52 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 16 Jun 2011 12:50:45 +0100 Subject: [PATCH 132/214] Fixes pickling issue with choice fields Removes the dynamic __get_field_display partials before pickling --- mongoengine/base.py | 72 ++++++++++++++++++++++++++++++--------------- tests/document.py | 6 ++-- 2 files changed, 51 insertions(+), 27 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 49efba60..938808a8 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -614,9 +614,6 @@ class BaseDocument(object): self._data = {} # Assign default values to instance for attr_name, field in self._fields.items(): - if field.choices: # dynamically adds a way to get the display value for a field with choices - setattr(self, 'get_%s_display' % attr_name, partial(self._get_FIELD_display, field=field)) - value = getattr(self, attr_name, None) setattr(self, attr_name, value) @@ -628,9 +625,29 @@ class BaseDocument(object): except AttributeError: pass + # Set any get_fieldname_display methods + self.__set_field_display() + signals.post_init.send(self.__class__, document=self) - def _get_FIELD_display(self, field): + def __getstate__(self): + self_dict = self.__dict__ + removals = ["get_%s_display" % k for k,v in self._fields.items() if v.choices] + for k in removals: + if hasattr(self, k): + delattr(self, k) + return self.__dict__ + + def __setstate__(self, __dict__): + self.__dict__ = __dict__ + self.__set_field_display() + + def __set_field_display(self): + for attr_name, field in self._fields.items(): + if field.choices: # dynamically adds a way to get the display value for a field with choices + setattr(self, 'get_%s_display' % attr_name, partial(self.__get_field_display, field=field)) + + def __get_field_display(self, field): """Returns the display value for a choice field""" value = getattr(self, field.name) return dict(field.choices).get(value, value) @@ -865,42 +882,46 @@ class BaseList(list): super(BaseList, self).__init__(list_items) def __setitem__(self, *args, **kwargs): - if hasattr(self, 'instance') and hasattr(self, 'name'): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() super(BaseList, self).__setitem__(*args, **kwargs) def __delitem__(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() super(BaseList, self).__delitem__(*args, **kwargs) def append(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() return super(BaseList, self).append(*args, **kwargs) def extend(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() return super(BaseList, self).extend(*args, **kwargs) def insert(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() return super(BaseList, self).insert(*args, **kwargs) def pop(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() return super(BaseList, self).pop(*args, **kwargs) def remove(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() return super(BaseList, self).remove(*args, **kwargs) def reverse(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() return super(BaseList, self).reverse(*args, **kwargs) def sort(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() return super(BaseList, self).sort(*args, **kwargs) + def _mark_as_changed(self): + """Marks a list as changed if has an instance and a name""" + if hasattr(self, 'instance') and hasattr(self, 'name'): + self.instance._mark_as_changed(self.name) + class BaseDict(dict): """A special dict so we can watch any changes @@ -912,39 +933,42 @@ class BaseDict(dict): super(BaseDict, self).__init__(dict_items) def __setitem__(self, *args, **kwargs): - if hasattr(self, 'instance') and hasattr(self, 'name'): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() super(BaseDict, self).__setitem__(*args, **kwargs) def __setattr__(self, *args, **kwargs): - if hasattr(self, 'instance') and hasattr(self, 'name'): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() super(BaseDict, self).__setattr__(*args, **kwargs) def __delete__(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() super(BaseDict, self).__delete__(*args, **kwargs) def __delitem__(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() super(BaseDict, self).__delitem__(*args, **kwargs) def __delattr__(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() super(BaseDict, self).__delattr__(*args, **kwargs) def clear(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() super(BaseDict, self).clear(*args, **kwargs) def pop(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() super(BaseDict, self).clear(*args, **kwargs) def popitem(self, *args, **kwargs): - self.instance._mark_as_changed(self.name) + self._mark_as_changed() super(BaseDict, self).clear(*args, **kwargs) + def _mark_as_changed(self): + """Marks a dict as changed if has an instance and a name""" + if hasattr(self, 'instance') and hasattr(self, 'name'): + self.instance._mark_as_changed(self.name) + if sys.version_info < (2, 5): # Prior to Python 2.5, Exception was an old-style class import types diff --git a/tests/document.py b/tests/document.py index 3a5419da..b33f3fe7 100644 --- a/tests/document.py +++ b/tests/document.py @@ -15,7 +15,7 @@ class PickleEmbedded(EmbeddedDocument): class PickleTest(Document): number = IntField() - string = StringField() + string = StringField(choices=(('One', '1'), ('Two', '2'))) embedded = EmbeddedDocumentField(PickleEmbedded) lists = ListField(StringField()) @@ -1516,7 +1516,7 @@ class DocumentTest(unittest.TestCase): def test_picklable(self): - pickle_doc = PickleTest(number=1, string="OH HAI", lists=['1', '2']) + pickle_doc = PickleTest(number=1, string="One", lists=['1', '2']) pickle_doc.embedded = PickleEmbedded() pickle_doc.save() @@ -1525,7 +1525,7 @@ class DocumentTest(unittest.TestCase): self.assertEquals(resurrected, pickle_doc) - resurrected.string = "Working" + resurrected.string = "Two" resurrected.save() pickle_doc.reload() From 5e8604967c5eabb9a71f0b3a87f39e37d6f589bb Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 16 Jun 2011 15:00:10 +0100 Subject: [PATCH 133/214] Fixes for django Q query rendering bug Ensures that the QNodes haven't already been processed Fixes #185 --- mongoengine/queryset.py | 3 ++- tests/django_tests.py | 5 ++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 76d4d1cd..92229a1c 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -251,7 +251,8 @@ class QCombination(QNode): def accept(self, visitor): for i in range(len(self.children)): - self.children[i] = self.children[i].accept(visitor) + if isinstance(self.children[i], QNode): + self.children[i] = self.children[i].accept(visitor) return visitor.visit_combination(self) diff --git a/tests/django_tests.py b/tests/django_tests.py index ee8084ce..930cc113 100644 --- a/tests/django_tests.py +++ b/tests/django_tests.py @@ -53,4 +53,7 @@ class QuerySetTest(unittest.TestCase): t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))} - self.assertEqual(t.render(Context(d)), u'D-10:C-30:') \ No newline at end of file + self.assertEqual(t.render(Context(d)), 'D-10:C-30:') + + # Check double rendering doesn't throw an error + self.assertEqual(t.render(Context(d)), 'D-10:C-30:') \ No newline at end of file From 5cc9188c5b2fa56de911458af3d280fc36a1d3ab Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 16 Jun 2011 15:25:09 +0100 Subject: [PATCH 134/214] Improved validation of (Generic)Reference fields --- mongoengine/fields.py | 14 ++++++++++++++ tests/fields.py | 21 +++++++++++++++++++++ 2 files changed, 35 insertions(+) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 26999204..e1b43664 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -604,6 +604,11 @@ class ReferenceField(BaseField): def validate(self, value): assert isinstance(value, (self.document_type, pymongo.dbref.DBRef)) + if isinstance(value, Document) and value.id is None: + raise ValidationError('You can only reference documents once ' + 'they have been saved to the database') + + def lookup_member(self, member_name): return self.document_type._fields.get(member_name) @@ -628,6 +633,15 @@ class GenericReferenceField(BaseField): return super(GenericReferenceField, self).__get__(instance, owner) + def validate(self, value): + if not isinstance(value, (Document, pymongo.dbref.DBRef)): + raise ValidationError('GenericReferences can only contain documents') + + # We need the id from the saved object to create the DBRef + if isinstance(value, Document) and value.id is None: + raise ValidationError('You can only reference documents once ' + 'they have been saved to the database') + def dereference(self, value): doc_cls = get_document(value['_cls']) reference = value['_ref'] diff --git a/tests/fields.py b/tests/fields.py index c13f9e34..22049309 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -377,6 +377,7 @@ class FieldTest(unittest.TestCase): comments = ListField(EmbeddedDocumentField(Comment)) tags = ListField(StringField()) authors = ListField(ReferenceField(User)) + generic = ListField(GenericReferenceField()) post = BlogPost(content='Went for a walk today...') post.validate() @@ -404,8 +405,28 @@ class FieldTest(unittest.TestCase): self.assertRaises(ValidationError, post.validate) post.authors = [User()] + self.assertRaises(ValidationError, post.validate) + + user = User() + user.save() + post.authors = [user] post.validate() + post.generic = [1, 2] + self.assertRaises(ValidationError, post.validate) + + post.generic = [User(), Comment()] + self.assertRaises(ValidationError, post.validate) + + post.generic = [Comment()] + self.assertRaises(ValidationError, post.validate) + + post.generic = [user] + post.validate() + + User.drop_collection() + BlogPost.drop_collection() + def test_sorted_list_sorting(self): """Ensure that a sorted list field properly sorts values. """ From 62c8823e6423d7445dfc011a74d9a748fe0c8d65 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 17 Jun 2011 11:39:53 +0100 Subject: [PATCH 135/214] Fixing requirements Test requirements are not install requirements! --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 37ec4375..6877b625 100644 --- a/setup.py +++ b/setup.py @@ -45,6 +45,7 @@ setup(name='mongoengine', long_description=LONG_DESCRIPTION, platforms=['any'], classifiers=CLASSIFIERS, - install_requires=['pymongo', 'blinker', 'django==1.3'], + install_requires=['pymongo'], test_suite='tests', + tests_require=['blinker', 'django==1.3'] ) From 5e7efcc8c2f4947d9d25bb3e40f0cfa1e759c419 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 17 Jun 2011 12:43:28 +0100 Subject: [PATCH 136/214] Added 'hint' support, telling Mongo the proper index to use for the query. Judicious use of hints can greatly improve query performance. When doing a query on multiple fields (at least one of which is indexed) pass the indexed field as a hint to the query. Hinting will not do anything if the corresponding index does not exist. The last hint applied to this cursor takes precedence over all others. Closes #203 --- docs/changelog.rst | 1 + mongoengine/queryset.py | 21 ++++++++++++++++++++- tests/document.py | 30 +++++++++++++++++++++++++++++- 3 files changed, 50 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index ea926239..a9cfe328 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added hint() support, so cantell Mongo the proper index to use for the query - Fixed issue with inconsitent setting of _cls breaking inherited referencing - Added help_text and verbose_name to fields to help with some form libs - Updated item_frequencies to handle embedded document lookups diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 92229a1c..bfa89b4f 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -347,6 +347,7 @@ class QuerySet(object): self._cursor_obj = None self._limit = None self._skip = None + self._hint = -1 # Using -1 as None is a valid value for hint def clone(self): """Creates a copy of the current :class:`~mongoengine.queryset.QuerySet`""" @@ -354,7 +355,7 @@ class QuerySet(object): copy_props = ('_initial_query', '_query_obj', '_where_clause', '_loaded_fields', '_ordering', '_snapshot', - '_timeout', '_limit', '_skip', '_slave_okay') + '_timeout', '_limit', '_skip', '_slave_okay', '_hint') for prop in copy_props: val = getattr(self, prop) @@ -539,6 +540,9 @@ class QuerySet(object): if self._skip is not None: self._cursor_obj.skip(self._skip) + if self._hint != -1: + self._cursor_obj.hint(self._hint) + return self._cursor_obj @classmethod @@ -965,6 +969,21 @@ class QuerySet(object): self._skip = n return self + def hint(self, index=None): + """Added 'hint' support, telling Mongo the proper index to use for the + query. + + Judicious use of hints can greatly improve query performance. When doing + a query on multiple fields (at least one of which is indexed) pass the + indexed field as a hint to the query. + + Hinting will not do anything if the corresponding index does not exist. + The last hint applied to this cursor takes precedence over all others. + """ + self._cursor.hint(index) + self._hint = index + return self + def __getitem__(self, key): """Support skip and limit using getitem and slicing syntax. """ diff --git a/tests/document.py b/tests/document.py index b33f3fe7..5d44ca29 100644 --- a/tests/document.py +++ b/tests/document.py @@ -513,7 +513,6 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() - def test_dictionary_indexes(self): """Ensure that indexes are used when meta[indexes] contains dictionaries instead of lists. @@ -546,6 +545,35 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() + def test_hint(self): + + class BlogPost(Document): + tags = ListField(StringField()) + meta = { + 'indexes': [ + 'tags', + ], + } + + BlogPost.drop_collection() + + for i in xrange(0, 10): + tags = [("tag %i" % n) for n in xrange(0, i % 2)] + BlogPost(tags=tags).save() + + self.assertEquals(BlogPost.objects.count(), 10) + self.assertEquals(BlogPost.objects.hint().count(), 10) + self.assertEquals(BlogPost.objects.hint([('tags', 1)]).count(), 10) + + self.assertEquals(BlogPost.objects.hint([('ZZ', 1)]).count(), 10) + + def invalid_index(): + BlogPost.objects.hint('tags') + self.assertRaises(TypeError, invalid_index) + + def invalid_index_2(): + return BlogPost.objects.hint(('tags', 1)) + self.assertRaises(TypeError, invalid_index_2) def test_unique(self): """Ensure that uniqueness constraints are applied to fields. From f3d265bbe01159379062a110d4b1da420c57ff7c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 17 Jun 2011 10:34:29 +0100 Subject: [PATCH 137/214] Added to_dbref Thanks to Ankhbayar for the initial code Closes #202 --- mongoengine/document.py | 8 ++++++++ tests/document.py | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/mongoengine/document.py b/mongoengine/document.py index 69b19e2c..0b408cc2 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -167,6 +167,14 @@ class Document(BaseDocument): value._changed_fields = [] return value + def to_dbref(self): + """Returns an instance of :class:`~pymongo.dbref.DBRef` useful in + `__raw__` queries.""" + if not self.pk: + msg = "Only saved documents can have a valid dbref" + raise OperationError(msg) + return pymongo.dbref.DBRef(self.__class__._meta['collection'], self.pk) + @classmethod def register_delete_rule(cls, document_cls, field_name, rule): """This method registers the delete rules to apply when removing this diff --git a/tests/document.py b/tests/document.py index 5d44ca29..d4140412 100644 --- a/tests/document.py +++ b/tests/document.py @@ -777,6 +777,14 @@ class DocumentTest(unittest.TestCase): self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 30) + def test_to_dbref(self): + """Ensure that you can get a dbref of a document""" + person = self.Person(name="Test User", age=30) + self.assertRaises(OperationError, person.to_dbref) + person.save() + + person.to_dbref() + def test_reload(self): """Ensure that attributes may be reloaded. """ From 99f923e27f365f245e259c24e0e7953b1b145011 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 17 Jun 2011 15:04:07 +0100 Subject: [PATCH 138/214] Fixed queryset repr mid iteration Closes #144 --- docs/changelog.rst | 1 + mongoengine/queryset.py | 5 ++++- tests/queryset.py | 12 ++++++++++++ 3 files changed, 17 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index a9cfe328..48b58483 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Fixed queryet __repr__ mid iteration - Added hint() support, so cantell Mongo the proper index to use for the query - Fixed issue with inconsitent setting of _cls breaking inherited referencing - Added help_text and verbose_name to fields to help with some form libs diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index bfa89b4f..79d24bba 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1524,7 +1524,10 @@ class QuerySet(object): limit = REPR_OUTPUT_SIZE + 1 if self._limit is not None and self._limit < limit: limit = self._limit - data = list(self[self._skip:limit]) + try: + data = list(self[self._skip:limit]) + except pymongo.errors.InvalidOperation: + return ".. queryset mid-iteration .." if len(data) > REPR_OUTPUT_SIZE: data[-1] = "...(remaining elements truncated)..." return repr(data) diff --git a/tests/queryset.py b/tests/queryset.py index cc219fba..6f0098d5 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -463,6 +463,18 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(people1, people2) + def test_repr_iteration(self): + """Ensure that QuerySet __repr__ can handle loops + """ + self.Person(name='Person 1').save() + self.Person(name='Person 2').save() + + queryset = self.Person.objects + self.assertEquals('[, ]', repr(queryset)) + for person in queryset: + self.assertEquals('.. queryset mid-iteration ..', repr(queryset)) + + def test_regex_query_shortcuts(self): """Ensure that contains, startswith, endswith, etc work. """ From c24bc77c17fcff8f2ad1f144f4bbe44ecc942971 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 17 Jun 2011 15:07:27 +0100 Subject: [PATCH 139/214] Fixes depreciation warnings in Django Auth. Closes #156 --- mongoengine/django/auth.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index 41d307cc..2711ee18 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -99,6 +99,10 @@ class MongoEngineBackend(object): """Authenticate using MongoEngine and mongoengine.django.auth.User. """ + supports_object_permissions = False + supports_anonymous_user = False + supports_inactive_user = False + def authenticate(self, username=None, password=None): user = User.objects(username=username).first() if user: From e04e5f42efc547c9c5704a07b0b6691ec26aefc4 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 20 Jun 2011 08:46:40 +0100 Subject: [PATCH 140/214] Added test inheriting document from another file works Closes #28 --- tests/document.py | 58 ++++++++++++++++++++++++++++++++++++++--------- tests/fixtures.py | 26 +++++++++++++++++++++ 2 files changed, 73 insertions(+), 11 deletions(-) create mode 100644 tests/fixtures.py diff --git a/tests/document.py b/tests/document.py index d4140412..8f9364fe 100644 --- a/tests/document.py +++ b/tests/document.py @@ -4,22 +4,13 @@ import pymongo import pickle import weakref +from fixtures import Base, Mixin, PickleEmbedded, PickleTest + from mongoengine import * from mongoengine.base import BaseField from mongoengine.connection import _get_db -class PickleEmbedded(EmbeddedDocument): - date = DateTimeField(default=datetime.now) - - -class PickleTest(Document): - number = IntField() - string = StringField(choices=(('One', '1'), ('Two', '2'))) - embedded = EmbeddedDocumentField(PickleEmbedded) - lists = ListField(StringField()) - - class DocumentTest(unittest.TestCase): def setUp(self): @@ -108,6 +99,51 @@ class DocumentTest(unittest.TestCase): } self.assertEqual(Animal._get_subclasses(), animal_subclasses) + def test_external_super_and_sub_classes(self): + """Ensure that the correct list of sub and super classes is assembled. + when importing part of the model + """ + class Animal(Base): pass + class Fish(Animal): pass + class Mammal(Animal): pass + class Human(Mammal): pass + class Dog(Mammal): pass + + mammal_superclasses = {'Base': Base, 'Base.Animal': Animal} + self.assertEqual(Mammal._superclasses, mammal_superclasses) + + dog_superclasses = { + 'Base': Base, + 'Base.Animal': Animal, + 'Base.Animal.Mammal': Mammal, + } + self.assertEqual(Dog._superclasses, dog_superclasses) + + animal_subclasses = { + 'Base.Animal.Fish': Fish, + 'Base.Animal.Mammal': Mammal, + 'Base.Animal.Mammal.Dog': Dog, + 'Base.Animal.Mammal.Human': Human + } + self.assertEqual(Animal._get_subclasses(), animal_subclasses) + + mammal_subclasses = { + 'Base.Animal.Mammal.Dog': Dog, + 'Base.Animal.Mammal.Human': Human + } + self.assertEqual(Mammal._get_subclasses(), mammal_subclasses) + + Base.drop_collection() + + h = Human() + h.save() + + self.assertEquals(Human.objects.count(), 1) + self.assertEquals(Mammal.objects.count(), 1) + self.assertEquals(Animal.objects.count(), 1) + self.assertEquals(Base.objects.count(), 1) + Base.drop_collection() + def test_polymorphic_queries(self): """Ensure that the correct subclasses are returned from a query""" class Animal(Document): pass diff --git a/tests/fixtures.py b/tests/fixtures.py new file mode 100644 index 00000000..483b7184 --- /dev/null +++ b/tests/fixtures.py @@ -0,0 +1,26 @@ +from datetime import datetime +import pymongo + +from mongoengine import * +from mongoengine.base import BaseField +from mongoengine.connection import _get_db + + +class PickleEmbedded(EmbeddedDocument): + date = DateTimeField(default=datetime.now) + + +class PickleTest(Document): + number = IntField() + string = StringField(choices=(('One', '1'), ('Two', '2'))) + embedded = EmbeddedDocumentField(PickleEmbedded) + lists = ListField(StringField()) + + +class Mixin(object): + number = IntField() + string = StringField(choices=(('One', '1'), ('Two', '2'))) + + +class Base(Document): + pass From 1b0323bc22a8281a2f3fdc7cde635772639eba89 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 20 Jun 2011 09:44:53 +0100 Subject: [PATCH 141/214] Added document mixin support For extendable / reusable documents Fixes #204 --- docs/changelog.rst | 1 + mongoengine/base.py | 4 ++++ tests/document.py | 30 ++++++++++++++++++++++++++++++ tests/fixtures.py | 3 +-- 4 files changed, 36 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 48b58483..e3cd7232 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added Document Mixin support - Fixed queryet __repr__ mid iteration - Added hint() support, so cantell Mongo the proper index to use for the query - Fixed issue with inconsitent setting of _cls breaking inherited referencing diff --git a/mongoengine/base.py b/mongoengine/base.py index 938808a8..f8d415b0 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -389,6 +389,7 @@ class DocumentMetaclass(type): class_name = [name] superclasses = {} simple_class = True + for base in bases: # Include all fields present in superclasses if hasattr(base, '_fields'): @@ -397,6 +398,9 @@ class DocumentMetaclass(type): # Get superclasses from superclass superclasses[base._class_name] = base superclasses.update(base._superclasses) + else: # Add any mixin fields + attrs.update(dict([(k,v) for k,v in base.__dict__.items() + if issubclass(v.__class__, BaseField)])) if hasattr(base, '_meta') and not base._meta.get('abstract'): # Ensure that the Document class may be subclassed - diff --git a/tests/document.py b/tests/document.py index 8f9364fe..c5aa6e89 100644 --- a/tests/document.py +++ b/tests/document.py @@ -1380,6 +1380,36 @@ class DocumentTest(unittest.TestCase): promoted_employee.reload() self.assertEqual(promoted_employee.details, None) + def test_mixins_dont_add_to_types(self): + + class Bob(Document): name = StringField() + + Bob.drop_collection() + + p = Bob(name="Rozza") + p.save() + Bob.drop_collection() + + class Person(Document, Mixin): + pass + + Person.drop_collection() + + p = Person(name="Rozza") + p.save() + self.assertEquals(p._fields.keys(), ['name', 'id']) + + collection = self.db[Person._meta['collection']] + obj = collection.find_one() + self.assertEquals(obj['_cls'], 'Person') + self.assertEquals(obj['_types'], ['Person']) + + + + self.assertEquals(Person.objects.count(), 1) + rozza = Person.objects.get(name="Rozza") + + Person.drop_collection() def test_save_reference(self): """Ensure that a document reference field may be saved in the database. diff --git a/tests/fixtures.py b/tests/fixtures.py index 483b7184..5aaba556 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -18,8 +18,7 @@ class PickleTest(Document): class Mixin(object): - number = IntField() - string = StringField(choices=(('One', '1'), ('Two', '2'))) + name = StringField() class Base(Document): From f41c5217c6f0155d4aa7909fb52a556a79c67aba Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 20 Jun 2011 11:48:12 +0100 Subject: [PATCH 142/214] Added a cleaner way to get collection names Also handles dynamic collection naming - refs #180. --- mongoengine/base.py | 26 +++++++++++--- mongoengine/document.py | 46 ++++++++++++++++++++++-- mongoengine/fields.py | 8 +++-- mongoengine/queryset.py | 40 ++------------------- tests/document.py | 79 +++++++++++++++++++++++++++++------------ tests/queryset.py | 4 +-- 6 files changed, 130 insertions(+), 73 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index f8d415b0..e59119eb 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -22,6 +22,7 @@ class ValidationError(Exception): _document_registry = {} + def get_document(name): doc = _document_registry.get(name, None) if not doc: @@ -195,7 +196,7 @@ class ComplexBaseField(BaseField): elif isinstance(v, (dict, pymongo.son.SON)): if '_ref' in v: # generic reference - collection = get_document(v['_cls'])._meta['collection'] + collection = get_document(v['_cls'])._get_collection_name() collections.setdefault(collection, []).append((k,v)) else: # Use BaseDict so can watch any changes @@ -257,7 +258,7 @@ class ComplexBaseField(BaseField): if v.pk is None: raise ValidationError('You can only reference documents once ' 'they have been saved to the database') - collection = v._meta['collection'] + collection = v._get_collection_name() value_dict[k] = pymongo.dbref.DBRef(collection, v.pk) elif hasattr(v, 'to_python'): value_dict[k] = v.to_python() @@ -306,7 +307,7 @@ class ComplexBaseField(BaseField): from fields import GenericReferenceField value_dict[k] = GenericReferenceField().to_mongo(v) else: - collection = v._meta['collection'] + collection = v._get_collection_name() value_dict[k] = pymongo.dbref.DBRef(collection, v.pk) elif hasattr(v, 'to_mongo'): value_dict[k] = v.to_mongo() @@ -500,9 +501,14 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): # Subclassed documents inherit collection from superclass for base in bases: if hasattr(base, '_meta'): - if 'collection' in base._meta: - collection = base._meta['collection'] + if 'collection' in attrs.get('meta', {}) and not base._meta.get('abstract', False): + import warnings + msg = "Trying to set a collection on a subclass (%s)" % name + warnings.warn(msg, SyntaxWarning) + del(attrs['meta']['collection']) + if base._get_collection_name(): + collection = base._get_collection_name() # Propagate index options. for key in ('index_background', 'index_drop_dups', 'index_opts'): if key in base._meta: @@ -539,6 +545,10 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): # DocumentMetaclass before instantiating CollectionManager object new_class = super_new(cls, name, bases, attrs) + collection = attrs['_meta'].get('collection', None) + if callable(collection): + new_class._meta['collection'] = collection(new_class) + # Provide a default queryset unless one has been manually provided manager = attrs.get('objects', QuerySetManager()) if hasattr(manager, 'queryset_class'): @@ -675,6 +685,12 @@ class BaseDocument(object): elif field.required: raise ValidationError('Field "%s" is required' % field.name) + @classmethod + def _get_collection_name(cls): + """Returns the collection name for this class. + """ + return cls._meta.get('collection', None) + @classmethod def _get_subclasses(cls): """Return a dictionary of all subclasses (found recursively). diff --git a/mongoengine/document.py b/mongoengine/document.py index 0b408cc2..36bf4017 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -6,7 +6,12 @@ from connection import _get_db import pymongo -__all__ = ['Document', 'EmbeddedDocument', 'ValidationError', 'OperationError'] +__all__ = ['Document', 'EmbeddedDocument', 'ValidationError', + 'OperationError', 'InvalidCollectionError'] + + +class InvalidCollectionError(Exception): + pass class EmbeddedDocument(BaseDocument): @@ -72,6 +77,41 @@ class Document(BaseDocument): """ __metaclass__ = TopLevelDocumentMetaclass + @classmethod + def _get_collection(self): + """Returns the collection for the document.""" + db = _get_db() + collection_name = self._get_collection_name() + + if not hasattr(self, '_collection') or self._collection is None: + # Create collection as a capped collection if specified + if self._meta['max_size'] or self._meta['max_documents']: + # Get max document limit and max byte size from meta + max_size = self._meta['max_size'] or 10000000 # 10MB default + max_documents = self._meta['max_documents'] + + if collection_name in db.collection_names(): + self._collection = db[collection_name] + # The collection already exists, check if its capped + # options match the specified capped options + options = self._collection.options() + if options.get('max') != max_documents or \ + options.get('size') != max_size: + msg = ('Cannot create collection "%s" as a capped ' + 'collection as it already exists') % self._collection + raise InvalidCollectionError(msg) + else: + # Create the collection as a capped collection + opts = {'capped': True, 'size': max_size} + if max_documents: + opts['max'] = max_documents + self._collection = db.create_collection( + collection_name, **opts + ) + else: + self._collection = db[collection_name] + return self._collection + def save(self, safe=True, force_insert=False, validate=True, write_options=None): """Save the :class:`~mongoengine.Document` to the database. If the document already exists, it will be updated, otherwise it will be @@ -173,7 +213,7 @@ class Document(BaseDocument): if not self.pk: msg = "Only saved documents can have a valid dbref" raise OperationError(msg) - return pymongo.dbref.DBRef(self.__class__._meta['collection'], self.pk) + return pymongo.dbref.DBRef(self.__class__._get_collection_name(), self.pk) @classmethod def register_delete_rule(cls, document_cls, field_name, rule): @@ -188,7 +228,7 @@ class Document(BaseDocument): :class:`~mongoengine.Document` type from the database. """ db = _get_db() - db.drop_collection(cls._meta['collection']) + db.drop_collection(cls._get_collection_name()) class MapReduceDocument(object): diff --git a/mongoengine/fields.py b/mongoengine/fields.py index e1b43664..50a30a13 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -252,7 +252,7 @@ class DateTimeField(BaseField): return datetime.datetime(value.year, value.month, value.day) # Attempt to parse a datetime: - #value = smart_str(value) + # value = smart_str(value) # split usecs, because they are not recognized by strptime. if '.' in value: try: @@ -278,6 +278,7 @@ class DateTimeField(BaseField): return None + class ComplexDateTimeField(StringField): """ ComplexDateTimeField handles microseconds exactly instead of rounding @@ -526,6 +527,7 @@ class MapField(DictField): super(MapField, self).__init__(field=field, *args, **kwargs) + class ReferenceField(BaseField): """A reference to a document that will be automatically dereferenced on access (lazily). @@ -595,7 +597,7 @@ class ReferenceField(BaseField): id_ = document id_ = id_field.to_mongo(id_) - collection = self.document_type._meta['collection'] + collection = self.document_type._get_collection_name() return pymongo.dbref.DBRef(collection, id_) def prepare_query_value(self, op, value): @@ -664,7 +666,7 @@ class GenericReferenceField(BaseField): id_ = document id_ = id_field.to_mongo(id_) - collection = document._meta['collection'] + collection = document._get_collection_name() ref = pymongo.dbref.DBRef(collection, id_) return {'_cls': document._class_name, '_ref': ref} diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 79d24bba..2a5d3edb 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -11,7 +11,7 @@ import itertools import operator __all__ = ['queryset_manager', 'Q', 'InvalidQueryError', - 'InvalidCollectionError', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY'] + 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY'] # The maximum number of items to display in a QuerySet.__repr__ @@ -40,10 +40,6 @@ class OperationError(Exception): pass -class InvalidCollectionError(Exception): - pass - - RE_TYPE = type(re.compile('')) @@ -1360,7 +1356,7 @@ class QuerySet(object): fields = [QuerySet._translate_field_name(self._document, f) for f in fields] - collection = self._document._meta['collection'] + collection = self._document._get_collection_name() scope = { 'collection': collection, @@ -1550,39 +1546,9 @@ class QuerySetManager(object): # Document class being used rather than a document object return self - db = _get_db() - collection = owner._meta['collection'] - if (db, collection) not in self._collections: - # Create collection as a capped collection if specified - if owner._meta['max_size'] or owner._meta['max_documents']: - # Get max document limit and max byte size from meta - max_size = owner._meta['max_size'] or 10000000 # 10MB default - max_documents = owner._meta['max_documents'] - - if collection in db.collection_names(): - self._collections[(db, collection)] = db[collection] - # The collection already exists, check if its capped - # options match the specified capped options - options = self._collections[(db, collection)].options() - if options.get('max') != max_documents or \ - options.get('size') != max_size: - msg = ('Cannot create collection "%s" as a capped ' - 'collection as it already exists') % collection - raise InvalidCollectionError(msg) - else: - # Create the collection as a capped collection - opts = {'capped': True, 'size': max_size} - if max_documents: - opts['max'] = max_documents - self._collections[(db, collection)] = db.create_collection( - collection, **opts - ) - else: - self._collections[(db, collection)] = db[collection] - # owner is the document that contains the QuerySetManager queryset_class = owner._meta['queryset_class'] or QuerySet - queryset = queryset_class(owner, self._collections[(db, collection)]) + queryset = queryset_class(owner, owner._get_collection()) if self.get_queryset: if self.get_queryset.func_code.co_argcount == 1: queryset = self.get_queryset(queryset) diff --git a/tests/document.py b/tests/document.py index c5aa6e89..c10c903f 100644 --- a/tests/document.py +++ b/tests/document.py @@ -1,5 +1,10 @@ +import pickle +import pymongo import unittest +import warnings + from datetime import datetime + import pymongo import pickle import weakref @@ -30,7 +35,7 @@ class DocumentTest(unittest.TestCase): """ self.Person(name='Test').save() - collection = self.Person._meta['collection'] + collection = self.Person._get_collection_name() self.assertTrue(collection in self.db.collection_names()) self.Person.drop_collection() @@ -57,6 +62,23 @@ class DocumentTest(unittest.TestCase): # Ensure Document isn't treated like an actual document self.assertFalse(hasattr(Document, '_fields')) + def test_dynamic_collection_naming(self): + + def create_collection_name(cls): + return "PERSON" + + class DynamicPerson(Document): + name = StringField() + age = IntField() + + meta = {'collection': create_collection_name} + + collection = DynamicPerson._get_collection_name() + self.assertEquals(collection, 'PERSON') + + DynamicPerson(name='Test User', age=30).save() + self.assertTrue(collection in self.db.collection_names()) + def test_get_superclasses(self): """Ensure that the correct list of superclasses is assembled. """ @@ -225,8 +247,8 @@ class DocumentTest(unittest.TestCase): self.assertTrue('name' in Employee._fields) self.assertTrue('salary' in Employee._fields) - self.assertEqual(Employee._meta['collection'], - self.Person._meta['collection']) + self.assertEqual(Employee._get_collection_name(), + self.Person._get_collection_name()) # Ensure that MRO error is not raised class A(Document): pass @@ -251,7 +273,7 @@ class DocumentTest(unittest.TestCase): # Check that _cls etc aren't present on simple documents dog = Animal(name='dog') dog.save() - collection = self.db[Animal._meta['collection']] + collection = self.db[Animal._get_collection_name()] obj = collection.find_one() self.assertFalse('_cls' in obj) self.assertFalse('_types' in obj) @@ -297,7 +319,7 @@ class DocumentTest(unittest.TestCase): # Check that _cls etc aren't present on simple documents dog = Animal(name='dog') dog.save() - collection = self.db[Animal._meta['collection']] + collection = self.db[Animal._get_collection_name()] obj = collection.find_one() self.assertFalse('_cls' in obj) self.assertFalse('_types' in obj) @@ -318,7 +340,7 @@ class DocumentTest(unittest.TestCase): dog = Animal(name='dog') dog.save() - collection = self.db[Animal._meta['collection']] + collection = self.db[Animal._get_collection_name()] obj = collection.find_one() self.assertTrue('_cls' in obj) self.assertTrue('_types' in obj) @@ -381,9 +403,12 @@ class DocumentTest(unittest.TestCase): self.assertFalse('collection' in Animal._meta) self.assertFalse('collection' in Mammal._meta) - self.assertEqual(Fish._meta['collection'], 'fish') - self.assertEqual(Guppy._meta['collection'], 'fish') - self.assertEqual(Human._meta['collection'], 'human') + self.assertEqual(Animal._get_collection_name(), None) + self.assertEqual(Mammal._get_collection_name(), None) + + self.assertEqual(Fish._get_collection_name(), 'fish') + self.assertEqual(Guppy._get_collection_name(), 'fish') + self.assertEqual(Human._get_collection_name(), 'human') def create_bad_abstract(): class EvilHuman(Human): @@ -434,14 +459,21 @@ class DocumentTest(unittest.TestCase): def test_inherited_collections(self): """Ensure that subclassed documents don't override parents' collections. """ - class Drink(Document): - name = StringField() + with warnings.catch_warnings(record=True) as w: + # Cause all warnings to always be triggered. + warnings.simplefilter("always") - class AlcoholicDrink(Drink): - meta = {'collection': 'booze'} + class Drink(Document): + name = StringField() - class Drinker(Document): - drink = GenericReferenceField() + class AlcoholicDrink(Drink): + meta = {'collection': 'booze'} + + class Drinker(Document): + drink = GenericReferenceField() + + # Confirm we triggered a SyntaxWarning + assert issubclass(w[0].category, SyntaxWarning) Drink.drop_collection() AlcoholicDrink.drop_collection() @@ -455,7 +487,6 @@ class DocumentTest(unittest.TestCase): beer = AlcoholicDrink(name='Beer') beer.save() - real_person = Drinker(drink=beer) real_person.save() @@ -936,7 +967,7 @@ class DocumentTest(unittest.TestCase): person = self.Person(name='Test User', age=30) person.save() # Ensure that the object is in the database - collection = self.db[self.Person._meta['collection']] + collection = self.db[self.Person._get_collection_name()] person_obj = collection.find_one({'name': 'Test User'}) self.assertEqual(person_obj['name'], 'Test User') self.assertEqual(person_obj['age'], 30) @@ -1279,7 +1310,7 @@ class DocumentTest(unittest.TestCase): id='497ce96f395f2f052a494fd4') person.save() # Ensure that the object is in the database with the correct _id - collection = self.db[self.Person._meta['collection']] + collection = self.db[self.Person._get_collection_name()] person_obj = collection.find_one({'name': 'Test User'}) self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') @@ -1291,7 +1322,7 @@ class DocumentTest(unittest.TestCase): pk='497ce96f395f2f052a494fd4') person.save() # Ensure that the object is in the database with the correct _id - collection = self.db[self.Person._meta['collection']] + collection = self.db[self.Person._get_collection_name()] person_obj = collection.find_one({'name': 'Test User'}) self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') @@ -1314,7 +1345,7 @@ class DocumentTest(unittest.TestCase): post.comments = comments post.save() - collection = self.db[BlogPost._meta['collection']] + collection = self.db[BlogPost._get_collection_name()] post_obj = collection.find_one() self.assertEqual(post_obj['tags'], tags) for comment_obj, comment in zip(post_obj['comments'], comments): @@ -1339,7 +1370,7 @@ class DocumentTest(unittest.TestCase): employee.save() # Ensure that the object is in the database - collection = self.db[self.Person._meta['collection']] + collection = self.db[self.Person._get_collection_name()] employee_obj = collection.find_one({'name': 'Test Employee'}) self.assertEqual(employee_obj['name'], 'Test Employee') self.assertEqual(employee_obj['age'], 50) @@ -1370,6 +1401,7 @@ class DocumentTest(unittest.TestCase): promoted_employee.reload() self.assertEqual(promoted_employee.name, 'Test Employee') self.assertEqual(promoted_employee.age, 50) + # Ensure that the 'details' embedded object saved correctly self.assertEqual(promoted_employee.details.position, 'Senior Developer') @@ -1399,7 +1431,7 @@ class DocumentTest(unittest.TestCase): p.save() self.assertEquals(p._fields.keys(), ['name', 'id']) - collection = self.db[Person._meta['collection']] + collection = self.db[Person._get_collection_name()] obj = collection.find_one() self.assertEquals(obj['_cls'], 'Person') self.assertEquals(obj['_types'], ['Person']) @@ -1492,6 +1524,9 @@ class DocumentTest(unittest.TestCase): text = StringField() post = ReferenceField(BlogPost, reverse_delete_rule=CASCADE) + self.Person.drop_collection() + BlogPost.drop_collection() + Comment.drop_collection() author = self.Person(name='Test User') author.save() diff --git a/tests/queryset.py b/tests/queryset.py index 6f0098d5..c5f177c2 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- - - import unittest import pymongo from datetime import datetime, timedelta @@ -27,7 +25,7 @@ class QuerySetTest(unittest.TestCase): """ self.assertTrue(isinstance(self.Person.objects, QuerySet)) self.assertEqual(self.Person.objects._collection.name, - self.Person._meta['collection']) + self.Person._get_collection_name()) self.assertTrue(isinstance(self.Person.objects._collection, pymongo.collection.Collection)) From e3cd398f70594693a4f3539b9796e0143b659992 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 20 Jun 2011 14:00:06 +0100 Subject: [PATCH 143/214] Changed default collection naming Also added upgrade text --- docs/changelog.rst | 1 + docs/index.rst | 5 ++-- docs/upgrade.rst | 73 +++++++++++++++++++++++++++++++++++++++++++++ mongoengine/base.py | 2 +- tests/document.py | 72 +++++++++++++++++++++++++++++++++++++------- 5 files changed, 139 insertions(+), 14 deletions(-) create mode 100644 docs/upgrade.rst diff --git a/docs/changelog.rst b/docs/changelog.rst index e3cd7232..cfae79e0 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Updated default collection naming convention - Added Document Mixin support - Fixed queryet __repr__ mid iteration - Added hint() support, so cantell Mongo the proper index to use for the query diff --git a/docs/index.rst b/docs/index.rst index ccb7fbe2..3b036564 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,7 +2,7 @@ MongoEngine User Documentation ============================== -MongoEngine is an Object-Document Mapper, written in Python for working with +MongoEngine is an Object-Document Mapper, written in Python for working with MongoDB. To install it, simply run .. code-block:: console @@ -15,7 +15,7 @@ To get help with using MongoEngine, use the `MongoEngine Users mailing list `_ or come chat on the `#mongoengine IRC channel `_. -If you are interested in contributing, join the developers' `mailing list +If you are interested in contributing, join the developers' `mailing list `_. .. toctree:: @@ -26,6 +26,7 @@ If you are interested in contributing, join the developers' `mailing list apireference django changelog + upgrading Indices and tables ================== diff --git a/docs/upgrade.rst b/docs/upgrade.rst new file mode 100644 index 00000000..f005e2e2 --- /dev/null +++ b/docs/upgrade.rst @@ -0,0 +1,73 @@ +========= +Upgrading +========= + +0.4 to 0.5 +=========== + +There have been the following backwards incompatibilities from 0.4 to 0.5: + +#. Default collection naming. + +Previously it was just lowercase, its now much more pythonic and readable as its +lowercase and underscores, previously :: + + class MyAceDocument(Document): + pass + + MyAceDocument._meta['collection'] == myacedocument + +In 0.5 this will change to :: + + class MyAceDocument(Document): + pass + + MyAceDocument._get_collection_name() == my_ace_document + +To upgrade use a Mixin class to set meta like so :: + + class BaseMixin(object): + meta = { + 'collection': lambda c: c.__name__.lower() + } + + class MyAceDocument(Document, BaseMixin): + pass + + MyAceDocument._get_collection_name() == myacedocument + +Alternatively, you can rename your collections eg :: + + from mongoengine.connection import _get_db + from mongoengine.base import _document_registry + + def rename_collections(): + db = _get_db() + + failure = False + + collection_names = [d._get_collection_name() for d in _document_registry.values()] + + for new_style_name in collection_names: + if not new_style_name: # embedded documents don't have collections + continue + old_style_name = new_style_name.replace('_', '') + + if old_style_name == new_style_name: + continue # Nothing to do + + existing = db.collection_names() + if old_style_name in existing: + if new_style_name in existing: + failure = True + print "FAILED to rename: %s to %s (already exists)" % ( + old_style_name, new_style_name) + else: + db[old_style_name].rename(new_style_name) + print "Renamed: %s to %s" % (old_style_name, new_style_name) + + if failure: + print "Upgrading collection names failed" + else: + print "Upgraded collection names" + diff --git a/mongoengine/base.py b/mongoengine/base.py index e59119eb..94f00cbf 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -492,7 +492,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): raise ValueError("Abstract document cannot have non-abstract base") return super_new(cls, name, bases, attrs) - collection = name.lower() + collection = ''.join('_%s' % c if c.isupper() else c for c in name).strip('_').lower() id_field = None base_indexes = [] diff --git a/tests/document.py b/tests/document.py index c10c903f..28d61332 100644 --- a/tests/document.py +++ b/tests/document.py @@ -62,22 +62,72 @@ class DocumentTest(unittest.TestCase): # Ensure Document isn't treated like an actual document self.assertFalse(hasattr(Document, '_fields')) - def test_dynamic_collection_naming(self): + def test_collection_name(self): + """Ensure that a collection with a specified name may be used. + """ - def create_collection_name(cls): - return "PERSON" + class DefaultNamingTest(Document): + pass + self.assertEquals('default_naming_test', DefaultNamingTest._get_collection_name()) - class DynamicPerson(Document): - name = StringField() - age = IntField() + class CustomNamingTest(Document): + meta = {'collection': 'pimp_my_collection'} - meta = {'collection': create_collection_name} + self.assertEquals('pimp_my_collection', CustomNamingTest._get_collection_name()) - collection = DynamicPerson._get_collection_name() - self.assertEquals(collection, 'PERSON') + class DynamicNamingTest(Document): + meta = {'collection': lambda c: "DYNAMO"} + self.assertEquals('DYNAMO', DynamicNamingTest._get_collection_name()) - DynamicPerson(name='Test User', age=30).save() - self.assertTrue(collection in self.db.collection_names()) + # Use Abstract class to handle backwards compatibility + class BaseDocument(Document): + meta = { + 'abstract': True, + 'collection': lambda c: c.__name__.lower() + } + + class OldNamingConvention(BaseDocument): + pass + self.assertEquals('oldnamingconvention', OldNamingConvention._get_collection_name()) + + class InheritedAbstractNamingTest(BaseDocument): + meta = {'collection': 'wibble'} + self.assertEquals('wibble', InheritedAbstractNamingTest._get_collection_name()) + + with warnings.catch_warnings(record=True) as w: + # Cause all warnings to always be triggered. + warnings.simplefilter("always") + + class NonAbstractBase(Document): + pass + + class InheritedDocumentFailTest(NonAbstractBase): + meta = {'collection': 'fail'} + + self.assertTrue(issubclass(w[0].category, SyntaxWarning)) + self.assertEquals('non_abstract_base', InheritedDocumentFailTest._get_collection_name()) + + # Mixin tests + class BaseMixin(object): + meta = { + 'collection': lambda c: c.__name__.lower() + } + + class OldMixinNamingConvention(Document, BaseMixin): + pass + self.assertEquals('oldmixinnamingconvention', OldMixinNamingConvention._get_collection_name()) + + class BaseMixin(object): + meta = { + 'collection': lambda c: c.__name__.lower() + } + + class BaseDocument(Document, BaseMixin): + pass + + class MyDocument(BaseDocument): + pass + self.assertEquals('mydocument', OldMixinNamingConvention._get_collection_name()) def test_get_superclasses(self): """Ensure that the correct list of superclasses is assembled. From 08ba51f714731a6fc27340cae8cdb47bf1d60302 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 20 Jun 2011 15:41:23 +0100 Subject: [PATCH 144/214] Updated geo_index checking to be recursive Fixes #127 - Embedded Documents can declare geo indexes and have them created automatically --- docs/changelog.rst | 1 + mongoengine/base.py | 194 +++++++++++++++++++++------------------- mongoengine/queryset.py | 11 ++- tests/fields.py | 21 +++++ 4 files changed, 129 insertions(+), 98 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index cfae79e0..0737171c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Updated geo index checking to be recursive and check in embedded documents - Updated default collection naming convention - Added Document Mixin support - Fixed queryet __repr__ mid iteration diff --git a/mongoengine/base.py b/mongoengine/base.py index 94f00cbf..12c760aa 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -644,28 +644,6 @@ class BaseDocument(object): signals.post_init.send(self.__class__, document=self) - def __getstate__(self): - self_dict = self.__dict__ - removals = ["get_%s_display" % k for k,v in self._fields.items() if v.choices] - for k in removals: - if hasattr(self, k): - delattr(self, k) - return self.__dict__ - - def __setstate__(self, __dict__): - self.__dict__ = __dict__ - self.__set_field_display() - - def __set_field_display(self): - for attr_name, field in self._fields.items(): - if field.choices: # dynamically adds a way to get the display value for a field with choices - setattr(self, 'get_%s_display' % attr_name, partial(self.__get_field_display, field=field)) - - def __get_field_display(self, field): - """Returns the display value for a choice field""" - value = getattr(self, field.name) - return dict(field.choices).get(value, value) - def validate(self): """Ensure that all fields' values are valid and that required fields are present. @@ -685,6 +663,33 @@ class BaseDocument(object): elif field.required: raise ValidationError('Field "%s" is required' % field.name) + @apply + def pk(): + """Primary key alias + """ + def fget(self): + return getattr(self, self._meta['id_field']) + def fset(self, value): + return setattr(self, self._meta['id_field'], value) + return property(fget, fset) + + def to_mongo(self): + """Return data dictionary ready for use with MongoDB. + """ + data = {} + for field_name, field in self._fields.items(): + value = getattr(self, field_name, None) + if value is not None: + data[field.db_field] = field.to_mongo(value) + # Only add _cls and _types if allow_inheritance is not False + if not (hasattr(self, '_meta') and + self._meta.get('allow_inheritance', True) == False): + data['_cls'] = self._class_name + data['_types'] = self._superclasses.keys() + [self._class_name] + if '_id' in data and data['_id'] is None: + del data['_id'] + return data + @classmethod def _get_collection_name(cls): """Returns the collection name for this class. @@ -706,76 +711,6 @@ class BaseDocument(object): all_subclasses.update(subclass._get_subclasses()) return all_subclasses - @apply - def pk(): - """Primary key alias - """ - def fget(self): - return getattr(self, self._meta['id_field']) - def fset(self, value): - return setattr(self, self._meta['id_field'], value) - return property(fget, fset) - - def __iter__(self): - return iter(self._fields) - - def __getitem__(self, name): - """Dictionary-style field access, return a field's value if present. - """ - try: - if name in self._fields: - return getattr(self, name) - except AttributeError: - pass - raise KeyError(name) - - def __setitem__(self, name, value): - """Dictionary-style field access, set a field's value. - """ - # Ensure that the field exists before settings its value - if name not in self._fields: - raise KeyError(name) - return setattr(self, name, value) - - def __contains__(self, name): - try: - val = getattr(self, name) - return val is not None - except AttributeError: - return False - - def __len__(self): - return len(self._data) - - def __repr__(self): - try: - u = unicode(self) - except (UnicodeEncodeError, UnicodeDecodeError): - u = '[Bad Unicode data]' - return u'<%s: %s>' % (self.__class__.__name__, u) - - def __str__(self): - if hasattr(self, '__unicode__'): - return unicode(self).encode('utf-8') - return '%s object' % self.__class__.__name__ - - def to_mongo(self): - """Return data dictionary ready for use with MongoDB. - """ - data = {} - for field_name, field in self._fields.items(): - value = getattr(self, field_name, None) - if value is not None: - data[field.db_field] = field.to_mongo(value) - # Only add _cls and _types if allow_inheritance is not False - if not (hasattr(self, '_meta') and - self._meta.get('allow_inheritance', True) == False): - data['_cls'] = self._class_name - data['_types'] = self._superclasses.keys() + [self._class_name] - if '_id' in data and data['_id'] is None: - del data['_id'] - return data - @classmethod def _from_son(cls, son): """Create an instance of a Document (subclass) from a PyMongo SON. @@ -874,6 +809,81 @@ class BaseDocument(object): unset_data[k] = 1 return set_data, unset_data + @classmethod + def _geo_indices(cls): + geo_indices = [] + for field in cls._fields.values(): + if hasattr(field, 'document_type'): + geo_indices += field.document_type._geo_indices() + elif field._geo_index: + geo_indices.append(field) + return geo_indices + + def __getstate__(self): + self_dict = self.__dict__ + removals = ["get_%s_display" % k for k,v in self._fields.items() if v.choices] + for k in removals: + if hasattr(self, k): + delattr(self, k) + return self.__dict__ + + def __setstate__(self, __dict__): + self.__dict__ = __dict__ + self.__set_field_display() + + def __set_field_display(self): + for attr_name, field in self._fields.items(): + if field.choices: # dynamically adds a way to get the display value for a field with choices + setattr(self, 'get_%s_display' % attr_name, partial(self.__get_field_display, field=field)) + + def __get_field_display(self, field): + """Returns the display value for a choice field""" + value = getattr(self, field.name) + return dict(field.choices).get(value, value) + + def __iter__(self): + return iter(self._fields) + + def __getitem__(self, name): + """Dictionary-style field access, return a field's value if present. + """ + try: + if name in self._fields: + return getattr(self, name) + except AttributeError: + pass + raise KeyError(name) + + def __setitem__(self, name, value): + """Dictionary-style field access, set a field's value. + """ + # Ensure that the field exists before settings its value + if name not in self._fields: + raise KeyError(name) + return setattr(self, name, value) + + def __contains__(self, name): + try: + val = getattr(self, name) + return val is not None + except AttributeError: + return False + + def __len__(self): + return len(self._data) + + def __repr__(self): + try: + u = unicode(self) + except (UnicodeEncodeError, UnicodeDecodeError): + u = '[Bad Unicode data]' + return u'<%s: %s>' % (self.__class__.__name__, u) + + def __str__(self): + if hasattr(self, '__unicode__'): + return unicode(self).encode('utf-8') + return '%s object' % self.__class__.__name__ + def __eq__(self, other): if isinstance(other, self.__class__) and hasattr(other, 'id'): if self.id == other.id: diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 2a5d3edb..e2947a00 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -494,12 +494,11 @@ class QuerySet(object): self._collection.ensure_index('_types', background=background, **index_opts) - # Ensure all needed field indexes are created - for field in self._document._fields.values(): - if field.__class__._geo_index: - index_spec = [(field.db_field, pymongo.GEO2D)] - self._collection.ensure_index(index_spec, - background=background, **index_opts) + # Add geo indicies + for field in self._document._geo_indices(): + index_spec = [(field.db_field, pymongo.GEO2D)] + self._collection.ensure_index(index_spec, + background=background, **index_opts) return self._collection_obj diff --git a/tests/fields.py b/tests/fields.py index 22049309..fe53d9e7 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -1312,6 +1312,27 @@ class FieldTest(unittest.TestCase): Event.drop_collection() + def test_geo_embedded_indexes(self): + """Ensure that indexes are created automatically for GeoPointFields on + embedded documents. + """ + class Venue(EmbeddedDocument): + location = GeoPointField() + name = StringField() + + class Event(Document): + title = StringField() + venue = EmbeddedDocumentField(Venue) + + Event.drop_collection() + venue = Venue(name="Double Door", location=[41.909889, -87.677137]) + event = Event(title="Coltrane Motion", venue=venue) + event.save() + + info = Event.objects._collection.index_information() + self.assertTrue(u'location_2d' in info) + self.assertTrue(info[u'location_2d']['key'] == [(u'location', u'2d')]) + def test_ensure_unique_default_instances(self): """Ensure that every field has it's own unique default instance.""" class D(Document): From 09c32a63cedcb2faa8f58e4f4444cd0788eb4df3 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 21 Jun 2011 12:34:14 +0100 Subject: [PATCH 145/214] Fixes bug with appending post save - due to lists not being reset --- mongoengine/document.py | 13 ++++++++++++- tests/fields.py | 10 ++++++++-- 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 36bf4017..e20500d6 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -161,7 +161,18 @@ class Document(BaseDocument): raise OperationError(message % unicode(err)) id_field = self._meta['id_field'] self[id_field] = self._fields[id_field].to_python(object_id) - self._changed_fields = [] + + def reset_changed_fields(doc): + """Loop through and reset changed fields lists""" + if hasattr(doc, '_changed_fields'): + doc._changed_fields = [] + + for field_name in doc._fields: + field = getattr(doc, field_name) + if hasattr(field, '_changed_fields') and field != doc: + reset_changed_fields(field) + + reset_changed_fields(self) signals.post_save.send(self.__class__, document=self, created=created) def delete(self, safe=False): diff --git a/tests/fields.py b/tests/fields.py index fe53d9e7..01280a15 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -879,7 +879,7 @@ class FieldTest(unittest.TestCase): name = StringField() children = ListField(EmbeddedDocumentField('self')) - Tree.drop_collection + Tree.drop_collection() tree = Tree(name="Tree") first_child = TreeNode(name="Child 1") @@ -887,9 +887,15 @@ class FieldTest(unittest.TestCase): second_child = TreeNode(name="Child 2") first_child.children.append(second_child) + tree.save() + + tree = Tree.objects.first() + self.assertEqual(len(tree.children), 1) + + self.assertEqual(len(tree.children[0].children), 1) third_child = TreeNode(name="Child 3") - first_child.children.append(third_child) + tree.children[0].children.append(third_child) tree.save() self.assertEqual(len(tree.children), 1) From 14be7ba2e202b8bc7c0f8b7bc729aeb59d3f3e0a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 21 Jun 2011 14:50:11 +0100 Subject: [PATCH 146/214] Added support for the $ positional operator closes #205 --- docs/changelog.rst | 1 + docs/guide/querying.rst | 34 ++++++++++++++------ mongoengine/queryset.py | 7 ++-- tests/queryset.py | 71 +++++++++++++++++++++++++++++++++++++++++ 4 files changed, 102 insertions(+), 11 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 0737171c..4fb5d627 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added support for the positional operator - Updated geo index checking to be recursive and check in embedded documents - Updated default collection naming convention - Added Document Mixin support diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 1caed2d7..4f36e964 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -23,7 +23,7 @@ fetch documents from the database:: Filtering queries ================= The query may be filtered by calling the -:class:`~mongoengine.queryset.QuerySet` object with field lookup keyword +:class:`~mongoengine.queryset.QuerySet` object with field lookup keyword arguments. The keys in the keyword arguments correspond to fields on the :class:`~mongoengine.Document` you are querying:: @@ -84,7 +84,7 @@ Available operators are as follows: * ``nin`` -- value is not in list (a list of values should be provided) * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values * ``all`` -- every item in list of values provided is in array -* ``size`` -- the size of the array is +* ``size`` -- the size of the array is * ``exists`` -- value for field exists The following operators are available as shortcuts to querying with regular @@ -163,9 +163,9 @@ To retrieve a result that should be unique in the collection, use and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one document matched the query. -A variation of this method exists, +A variation of this method exists, :meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new -document with the query arguments if no documents match the query. An +document with the query arguments if no documents match the query. An additional keyword argument, :attr:`defaults` may be provided, which will be used as default values for the new document, in the case that it should need to be created:: @@ -240,7 +240,7 @@ Javascript code that is executed on the database server. Counting results ---------------- Just as with limiting and skipping results, there is a method on -:class:`~mongoengine.queryset.QuerySet` objects -- +:class:`~mongoengine.queryset.QuerySet` objects -- :meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic way of achieving this:: @@ -309,11 +309,11 @@ Advanced queries ================ Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword arguments can't fully express the query you want to use -- for example if you -need to combine a number of constraints using *and* and *or*. This is made +need to combine a number of constraints using *and* and *or*. This is made possible in MongoEngine through the :class:`~mongoengine.queryset.Q` class. A :class:`~mongoengine.queryset.Q` object represents part of a query, and can be initialised using the same keyword-argument syntax you use to query -documents. To build a complex query, you may combine +documents. To build a complex query, you may combine :class:`~mongoengine.queryset.Q` objects using the ``&`` (and) and ``|`` (or) operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the first positional argument to :attr:`Document.objects` when you filter it by @@ -434,7 +434,7 @@ Atomic updates ============== Documents may be updated atomically by using the :meth:`~mongoengine.queryset.QuerySet.update_one` and -:meth:`~mongoengine.queryset.QuerySet.update` methods on a +:meth:`~mongoengine.queryset.QuerySet.update` methods on a :meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers" that you may use with these methods: @@ -450,7 +450,7 @@ that you may use with these methods: * ``pull_all`` -- remove several values from a list * ``add_to_set`` -- add value to a list only if its not in the list already -The syntax for atomic updates is similar to the querying syntax, but the +The syntax for atomic updates is similar to the querying syntax, but the modifier comes before the field, not after it:: >>> post = BlogPost(title='Test', page_views=0, tags=['database']) @@ -467,3 +467,19 @@ modifier comes before the field, not after it:: >>> post.reload() >>> post.tags ['database', 'nosql'] + +The positional operator allows you to update list items without knowing the +index position, therefore making the update a single atomic operation. As we +cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: + + >>> post = BlogPost(title='Test', page_views=0, tags=['database', 'mongo']) + >>> post.save() + >>> BlogPost.objects(id=post.id, tags='mongo').update(set__tags__S='mongodb') + >>> post.reload() + >>> post.tags + ['database', 'mongodb'] + +.. note :: + Currently only top level lists are handled, future versions of mongodb / + pymongo plan to support nested positional operators. See `The $ positional + operator `_. \ No newline at end of file diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index e2947a00..82138fec 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1215,6 +1215,9 @@ class QuerySet(object): append_field = True for field in fields: if isinstance(field, str): + # Convert the S operator to $ + if field == 'S': + field = '$' parts.append(field) append_field = False else: @@ -1243,7 +1246,7 @@ class QuerySet(object): return mongo_update - def update(self, safe_update=True, upsert=False, write_options=None, **update): + def update(self, safe_update=True, upsert=False, multi=True, write_options=None, **update): """Perform an atomic update on the fields matched by the query. When ``safe_update`` is used, the number of affected documents is returned. @@ -1261,7 +1264,7 @@ class QuerySet(object): update = QuerySet._transform_update(self._document, **update) try: - ret = self._collection.update(self._query, update, multi=True, + ret = self._collection.update(self._query, update, multi=multi, upsert=upsert, safe=safe_update, **write_options) if ret is not None and 'n' in ret: diff --git a/tests/queryset.py b/tests/queryset.py index c5f177c2..c0860b5c 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -260,6 +260,77 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() + def test_update_using_positional_operator(self): + """Ensure that the list fields can be updated using the positional + operator.""" + + class Comment(EmbeddedDocument): + by = StringField() + votes = IntField() + + class BlogPost(Document): + title = StringField() + comments = ListField(EmbeddedDocumentField(Comment)) + + BlogPost.drop_collection() + + c1 = Comment(by="joe", votes=3) + c2 = Comment(by="jane", votes=7) + + BlogPost(title="ABC", comments=[c1, c2]).save() + + BlogPost.objects(comments__by="joe").update(inc__comments__S__votes=1) + + post = BlogPost.objects.first() + self.assertEquals(post.comments[0].by, 'joe') + self.assertEquals(post.comments[0].votes, 4) + + # Currently the $ operator only applies to the first matched item in + # the query + + class Simple(Document): + x = ListField() + + Simple.drop_collection() + Simple(x=[1, 2, 3, 2]).save() + Simple.objects(x=2).update(inc__x__S=1) + + simple = Simple.objects.first() + self.assertEquals(simple.x, [1, 3, 3, 2]) + Simple.drop_collection() + + # You can set multiples + Simple.drop_collection() + Simple(x=[1, 2, 3, 4]).save() + Simple(x=[2, 3, 4, 5]).save() + Simple(x=[3, 4, 5, 6]).save() + Simple(x=[4, 5, 6, 7]).save() + Simple.objects(x=3).update(set__x__S=0) + + s = Simple.objects() + self.assertEquals(s[0].x, [1, 2, 0, 4]) + self.assertEquals(s[1].x, [2, 0, 4, 5]) + self.assertEquals(s[2].x, [0, 4, 5, 6]) + self.assertEquals(s[3].x, [4, 5, 6, 7]) + + # Using "$unset" with an expression like this "array.$" will result in + # the array item becoming None, not being removed. + Simple.drop_collection() + Simple(x=[1, 2, 3, 4, 3, 2, 3, 4]).save() + Simple.objects(x=3).update(unset__x__S=1) + simple = Simple.objects.first() + self.assertEquals(simple.x, [1, 2, None, 4, 3, 2, 3, 4]) + + # Nested updates arent supported yet.. + def update_nested(): + Simple.drop_collection() + Simple(x=[{'test': [1, 2, 3, 4]}]).save() + Simple.objects(x__test=2).update(set__x__S__test__S=3) + self.assertEquals(simple.x, [1, 2, 3, 4]) + + self.assertRaises(OperationError, update_nested) + Simple.drop_collection() + def test_mapfield_update(self): """Ensure that the MapField can be updated.""" class Member(EmbeddedDocument): From 87f486c4f13508215c5dec9cf945b5ed775394c1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 22 Jun 2011 15:45:25 +0100 Subject: [PATCH 147/214] Added select_related() and refactored dereferencing Added a dereference class to handle both select_related / recursive dereferencing and fetching dereference. Refs #206 --- mongoengine/base.py | 74 ++-------- mongoengine/dereference.py | 171 +++++++++++++++++++++++ mongoengine/document.py | 5 + mongoengine/queryset.py | 11 +- tests/dereference.py | 272 ++++++++++++++++++++++++++++++++++++- tests/document.py | 6 +- 6 files changed, 459 insertions(+), 80 deletions(-) create mode 100644 mongoengine/dereference.py diff --git a/mongoengine/base.py b/mongoengine/base.py index 12c760aa..8101aa00 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -8,7 +8,7 @@ import weakref import sys import pymongo import pymongo.objectid -from operator import itemgetter +import operator from functools import partial @@ -163,70 +163,14 @@ class ComplexBaseField(BaseField): def __get__(self, instance, owner): """Descriptor to automatically dereference references. """ - from connection import _get_db - if instance is None: # Document class being used rather than a document object return self - # Get value from document instance if available - value_list = instance._data.get(self.name) - if not value_list or isinstance(value_list, basestring): - return super(ComplexBaseField, self).__get__(instance, owner) - - is_list = False - if not hasattr(value_list, 'items'): - is_list = True - value_list = dict([(k,v) for k,v in enumerate(value_list)]) - - for k,v in value_list.items(): - if isinstance(v, dict) and '_cls' in v and '_ref' not in v: - value_list[k] = get_document(v['_cls'])._from_son(v) - - # Handle all dereferencing - db = _get_db() - dbref = {} - collections = {} - for k,v in value_list.items(): - - # Save any DBRefs - if isinstance(v, (pymongo.dbref.DBRef)): - # direct reference (DBRef) - collections.setdefault(v.collection, []).append((k,v)) - elif isinstance(v, (dict, pymongo.son.SON)): - if '_ref' in v: - # generic reference - collection = get_document(v['_cls'])._get_collection_name() - collections.setdefault(collection, []).append((k,v)) - else: - # Use BaseDict so can watch any changes - dbref[k] = BaseDict(v, instance=instance, name=self.name) - else: - dbref[k] = v - - # For each collection get the references - for collection, dbrefs in collections.items(): - id_map = {} - for k,v in dbrefs: - if isinstance(v, (pymongo.dbref.DBRef)): - # direct reference (DBRef), has no _cls information - id_map[v.id] = (k, None) - elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v: - # generic reference - includes _cls information - id_map[v['_ref'].id] = (k, get_document(v['_cls'])) - - references = db[collection].find({'_id': {'$in': id_map.keys()}}) - for ref in references: - key, doc_cls = id_map[ref['_id']] - if not doc_cls: # If no doc_cls get it from the referenced doc - doc_cls = get_document(ref['_cls']) - dbref[key] = doc_cls._from_son(ref) - - if is_list: - dbref = BaseList([v for k,v in sorted(dbref.items(), key=itemgetter(0))], instance=instance, name=self.name) - else: - dbref = BaseDict(dbref, instance=instance, name=self.name) - instance._data[self.name] = dbref + from dereference import dereference + instance._data[self.name] = dereference( + instance._data.get(self.name), max_depth=1, instance=instance, name=self.name, get=True + ) return super(ComplexBaseField, self).__get__(instance, owner) def to_python(self, value): @@ -266,7 +210,7 @@ class ComplexBaseField(BaseField): value_dict[k] = self.to_python(v) if is_list: # Convert back to a list - return [v for k,v in sorted(value_dict.items(), key=itemgetter(0))] + return [v for k,v in sorted(value_dict.items(), key=operator.itemgetter(0))] return value_dict def to_mongo(self, value): @@ -315,7 +259,7 @@ class ComplexBaseField(BaseField): value_dict[k] = self.to_mongo(v) if is_list: # Convert back to a list - return [v for k,v in sorted(value_dict.items(), key=itemgetter(0))] + return [v for k,v in sorted(value_dict.items(), key=operator.itemgetter(0))] return value_dict def validate(self, value): @@ -907,7 +851,7 @@ class BaseList(list): """ def __init__(self, list_items, instance, name): - self.instance = weakref.proxy(instance) + self.instance = instance self.name = name super(BaseList, self).__init__(list_items) @@ -958,7 +902,7 @@ class BaseDict(dict): """ def __init__(self, dict_items, instance, name): - self.instance = weakref.proxy(instance) + self.instance = instance self.name = name super(BaseDict, self).__init__(dict_items) diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py new file mode 100644 index 00000000..9192901c --- /dev/null +++ b/mongoengine/dereference.py @@ -0,0 +1,171 @@ +import operator + +import pymongo + +from base import BaseDict, BaseList, get_document +from connection import _get_db +from queryset import QuerySet + + +class DeReference(object): + + def __call__(self, items, max_depth=1, instance=None, name=None, get=False): + """ + Cheaply dereferences the items to a set depth. + Also handles the convertion of complex data types. + + :param items: The iterable (dict, list, queryset) to be dereferenced. + :param max_depth: The maximum depth to recurse to + :param instance: The owning instance used for tracking changes by + :class:`~mongoengine.base.ComplexBaseField` + :param name: The name of the field, used for tracking changes by + :class:`~mongoengine.base.ComplexBaseField` + :param get: A boolean determining if being called by __get__ + """ + if items is None or isinstance(items, basestring): + return items + + # cheapest way to convert a queryset to a list + # list(queryset) uses a count() query to determine length + if isinstance(items, QuerySet): + items = [i for i in items] + + self.max_depth = max_depth + self.reference_map = self._find_references(items) + self.object_map = self._fetch_objects() + return self._attach_objects(items, 0, instance, name, get) + + def _find_references(self, items, depth=0): + """ + Recursively finds all db references to be dereferenced + + :param items: The iterable (dict, list, queryset) + :param depth: The current depth of recursion + """ + reference_map = {} + if not items: + return reference_map + + # Determine the iterator to use + if not hasattr(items, 'items'): + iterator = enumerate(items) + else: + iterator = items.iteritems() + + # Recursively find dbreferences + for k, item in iterator: + if hasattr(item, '_fields'): + for field_name, field in item._fields.iteritems(): + v = item._data.get(field_name, None) + if isinstance(v, (pymongo.dbref.DBRef)): + reference_map.setdefault(field.document_type, []).append(v.id) + elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v: + reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id) + elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: + field_cls = getattr(getattr(field, 'field', None), 'document_type', None) + references = self._find_references(v, depth) + for key, refs in references.iteritems(): + if field_cls: + key = field_cls + reference_map.setdefault(key, []).extend(refs) + elif isinstance(item, (pymongo.dbref.DBRef)): + reference_map.setdefault(item.collection, []).append(item.id) + elif isinstance(item, (dict, pymongo.son.SON)) and '_ref' in item: + reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id) + elif isinstance(item, (dict, list, tuple)) and depth <= self.max_depth: + references = self._find_references(item, depth) + for key, refs in references.iteritems(): + reference_map.setdefault(key, []).extend(refs) + depth += 1 + return reference_map + + def _fetch_objects(self): + """Fetch all references and convert to their document objects + """ + object_map = {} + for col, dbrefs in self.reference_map.iteritems(): + keys = object_map.keys() + refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys])) + if hasattr(col, 'objects'): # We have a document class for the refs + references = col.objects.in_bulk(refs) + for key, doc in references.iteritems(): + object_map[key] = doc + else: # Generic reference: use the refs data to convert to document + references = _get_db()[col].find({'_id': {'$in': refs}}) + for ref in references: + doc = get_document(ref['_cls'])._from_son(ref) + object_map[doc.id] = doc + return object_map + + def _attach_objects(self, items, depth=0, instance=None, name=None, get=False): + """ + Recursively finds all db references to be dereferenced + + :param items: The iterable (dict, list, queryset) + :param depth: The current depth of recursion + :param instance: The owning instance used for tracking changes by + :class:`~mongoengine.base.ComplexBaseField` + :param name: The name of the field, used for tracking changes by + :class:`~mongoengine.base.ComplexBaseField` + :param get: A boolean determining if being called by __get__ + """ + if not items: + if isinstance(items, (BaseDict, BaseList)): + return items + + if instance: + if isinstance(items, dict): + return BaseDict(items, instance=instance, name=name) + else: + return BaseList(items, instance=instance, name=name) + + if isinstance(items, (dict, pymongo.son.SON)): + if '_ref' in items: + return self.object_map.get(items['_ref'].id, items) + elif '_types' in items and '_cls' in items: + doc = get_document(items['_cls'])._from_son(items) + if not get: + doc._data = self._attach_objects(doc._data, depth, doc, name, get) + return doc + + if not hasattr(items, 'items'): + is_list = True + iterator = enumerate(items) + data = [] + else: + is_list = False + iterator = items.iteritems() + data = {} + + for k, v in iterator: + if is_list: + data.append(v) + else: + data[k] = v + + if k in self.object_map: + data[k] = self.object_map[k] + elif hasattr(v, '_fields'): + for field_name, field in v._fields.iteritems(): + v = data[k]._data.get(field_name, None) + if isinstance(v, (pymongo.dbref.DBRef)): + data[k]._data[field_name] = self.object_map.get(v.id, v) + elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v: + data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v) + elif isinstance(v, dict) and depth < self.max_depth: + data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get) + elif isinstance(v, (list, tuple)): + data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get) + elif isinstance(v, (dict, list, tuple)) and depth < self.max_depth: + data[k] = self._attach_objects(v, depth, instance=instance, name=name, get=get) + elif hasattr(v, 'id'): + data[k] = self.object_map.get(v.id, v) + + if instance and name: + if is_list: + return BaseList(data, instance=instance, name=name) + return BaseDict(data, instance=instance, name=name) + depth += 1 + return data + +dereference = DeReference() diff --git a/mongoengine/document.py b/mongoengine/document.py index e20500d6..31a2530c 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -193,6 +193,11 @@ class Document(BaseDocument): signals.post_delete.send(self.__class__, document=self) + def select_related(self, max_depth=1): + from dereference import dereference + self._data = dereference(self._data, max_depth) + return self + def reload(self): """Reloads all attributes from the database. diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 82138fec..6b110ff0 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -801,13 +801,7 @@ class QuerySet(object): :param object_id: the value for the id of the document to look up """ - id_field = self._document._meta['id_field'] - object_id = self._document._fields[id_field].to_mongo(object_id) - - result = self._collection.find_one({'_id': object_id}, **self._cursor_args) - if result is not None: - result = self._document._from_son(result) - return result + return self._document.objects(pk=object_id).first() def in_bulk(self, object_ids): """Retrieve a set of documents by their ids. @@ -1530,6 +1524,9 @@ class QuerySet(object): data[-1] = "...(remaining elements truncated)..." return repr(data) + def select_related(self, max_depth=1): + from dereference import dereference + return dereference(self, max_depth=max_depth) class QuerySetManager(object): diff --git a/tests/dereference.py b/tests/dereference.py index 4040d5bd..a98267fd 100644 --- a/tests/dereference.py +++ b/tests/dereference.py @@ -30,6 +30,9 @@ class FieldTest(unittest.TestCase): group = Group(members=User.objects) group.save() + group = Group(members=User.objects) + group.save() + with query_counter() as q: self.assertEqual(q, 0) @@ -39,6 +42,24 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] self.assertEqual(q, 2) + # Document select_related + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first().select_related() + self.assertEqual(q, 2) + [m for m in group_obj.members] + self.assertEqual(q, 2) + + # Queryset select_related + with query_counter() as q: + self.assertEqual(q, 0) + group_objs = Group.objects.select_related() + self.assertEqual(q, 2) + for group_obj in group_objs: + [m for m in group_obj.members] + self.assertEqual(q, 2) + User.drop_collection() Group.drop_collection() @@ -50,6 +71,8 @@ class FieldTest(unittest.TestCase): boss = ReferenceField('self') friends = ListField(ReferenceField('self')) + Employee.drop_collection() + bill = Employee(name='Bill Lumbergh') bill.save() @@ -63,6 +86,10 @@ class FieldTest(unittest.TestCase): peter = Employee(name='Peter Gibbons', boss=bill, friends=friends) peter.save() + Employee(name='Funky Gibbon', boss=bill, friends=friends).save() + Employee(name='Funky Gibbon', boss=bill, friends=friends).save() + Employee(name='Funky Gibbon', boss=bill, friends=friends).save() + with query_counter() as q: self.assertEqual(q, 0) @@ -75,6 +102,33 @@ class FieldTest(unittest.TestCase): peter.friends self.assertEqual(q, 3) + # Document select_related + with query_counter() as q: + self.assertEqual(q, 0) + + peter = Employee.objects.with_id(peter.id).select_related() + self.assertEqual(q, 2) + + self.assertEquals(peter.boss, bill) + self.assertEqual(q, 2) + + self.assertEquals(peter.friends, friends) + self.assertEqual(q, 2) + + # Queryset select_related + with query_counter() as q: + self.assertEqual(q, 0) + + employees = Employee.objects(boss=bill).select_related() + self.assertEqual(q, 2) + + for employee in employees: + self.assertEquals(employee.boss, bill) + self.assertEqual(q, 2) + + self.assertEquals(employee.friends, friends) + self.assertEqual(q, 2) + def test_generic_reference(self): class UserA(Document): @@ -110,6 +164,9 @@ class FieldTest(unittest.TestCase): group = Group(members=members) group.save() + group = Group(members=members) + group.save() + with query_counter() as q: self.assertEqual(q, 0) @@ -125,6 +182,39 @@ class FieldTest(unittest.TestCase): for m in group_obj.members: self.assertTrue('User' in m.__class__.__name__) + # Document select_related + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first().select_related() + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + for m in group_obj.members: + self.assertTrue('User' in m.__class__.__name__) + + # Queryset select_related + with query_counter() as q: + self.assertEqual(q, 0) + + group_objs = Group.objects.select_related() + self.assertEqual(q, 4) + + for group_obj in group_objs: + [m for m in group_obj.members] + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + for m in group_obj.members: + self.assertTrue('User' in m.__class__.__name__) + UserA.drop_collection() UserB.drop_collection() UserC.drop_collection() @@ -165,6 +255,9 @@ class FieldTest(unittest.TestCase): group = Group(members=members) group.save() + group = Group(members=members) + group.save() + with query_counter() as q: self.assertEqual(q, 0) @@ -180,6 +273,39 @@ class FieldTest(unittest.TestCase): for m in group_obj.members: self.assertTrue('User' in m.__class__.__name__) + # Document select_related + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first().select_related() + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + for m in group_obj.members: + self.assertTrue('User' in m.__class__.__name__) + + # Queryset select_related + with query_counter() as q: + self.assertEqual(q, 0) + + group_objs = Group.objects.select_related() + self.assertEqual(q, 4) + + for group_obj in group_objs: + [m for m in group_obj.members] + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + for m in group_obj.members: + self.assertTrue('User' in m.__class__.__name__) + UserA.drop_collection() UserB.drop_collection() UserC.drop_collection() @@ -205,6 +331,9 @@ class FieldTest(unittest.TestCase): group = Group(members=dict([(str(u.id), u) for u in members])) group.save() + group = Group(members=dict([(str(u.id), u) for u in members])) + group.save() + with query_counter() as q: self.assertEqual(q, 0) @@ -217,6 +346,33 @@ class FieldTest(unittest.TestCase): for k, m in group_obj.members.iteritems(): self.assertTrue(isinstance(m, User)) + # Document select_related + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first().select_related() + self.assertEqual(q, 2) + + [m for m in group_obj.members] + self.assertEqual(q, 2) + + for k, m in group_obj.members.iteritems(): + self.assertTrue(isinstance(m, User)) + + # Queryset select_related + with query_counter() as q: + self.assertEqual(q, 0) + + group_objs = Group.objects.select_related() + self.assertEqual(q, 2) + + for group_obj in group_objs: + [m for m in group_obj.members] + self.assertEqual(q, 2) + + for k, m in group_obj.members.iteritems(): + self.assertTrue(isinstance(m, User)) + User.drop_collection() Group.drop_collection() @@ -254,6 +410,8 @@ class FieldTest(unittest.TestCase): group = Group(members=dict([(str(u.id), u) for u in members])) group.save() + group = Group(members=dict([(str(u.id), u) for u in members])) + group.save() with query_counter() as q: self.assertEqual(q, 0) @@ -270,8 +428,41 @@ class FieldTest(unittest.TestCase): for k, m in group_obj.members.iteritems(): self.assertTrue('User' in m.__class__.__name__) - group.members = {} - group.save() + # Document select_related + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first().select_related() + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + for k, m in group_obj.members.iteritems(): + self.assertTrue('User' in m.__class__.__name__) + + # Queryset select_related + with query_counter() as q: + self.assertEqual(q, 0) + + group_objs = Group.objects.select_related() + self.assertEqual(q, 4) + + for group_obj in group_objs: + [m for m in group_obj.members] + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + for k, m in group_obj.members.iteritems(): + self.assertTrue('User' in m.__class__.__name__) + + Group.objects.delete() + Group().save() with query_counter() as q: self.assertEqual(q, 0) @@ -310,6 +501,9 @@ class FieldTest(unittest.TestCase): group = Group(members=dict([(str(u.id), u) for u in members])) group.save() + group = Group(members=dict([(str(u.id), u) for u in members])) + group.save() + with query_counter() as q: self.assertEqual(q, 0) @@ -325,6 +519,39 @@ class FieldTest(unittest.TestCase): for k, m in group_obj.members.iteritems(): self.assertTrue(isinstance(m, UserA)) + # Document select_related + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first().select_related() + self.assertEqual(q, 2) + + [m for m in group_obj.members] + self.assertEqual(q, 2) + + [m for m in group_obj.members] + self.assertEqual(q, 2) + + for k, m in group_obj.members.iteritems(): + self.assertTrue(isinstance(m, UserA)) + + # Queryset select_related + with query_counter() as q: + self.assertEqual(q, 0) + + group_objs = Group.objects.select_related() + self.assertEqual(q, 2) + + for group_obj in group_objs: + [m for m in group_obj.members] + self.assertEqual(q, 2) + + [m for m in group_obj.members] + self.assertEqual(q, 2) + + for k, m in group_obj.members.iteritems(): + self.assertTrue(isinstance(m, UserA)) + UserA.drop_collection() Group.drop_collection() @@ -362,6 +589,8 @@ class FieldTest(unittest.TestCase): group = Group(members=dict([(str(u.id), u) for u in members])) group.save() + group = Group(members=dict([(str(u.id), u) for u in members])) + group.save() with query_counter() as q: self.assertEqual(q, 0) @@ -378,8 +607,41 @@ class FieldTest(unittest.TestCase): for k, m in group_obj.members.iteritems(): self.assertTrue('User' in m.__class__.__name__) - group.members = {} - group.save() + # Document select_related + with query_counter() as q: + self.assertEqual(q, 0) + + group_obj = Group.objects.first().select_related() + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + for k, m in group_obj.members.iteritems(): + self.assertTrue('User' in m.__class__.__name__) + + # Queryset select_related + with query_counter() as q: + self.assertEqual(q, 0) + + group_objs = Group.objects.select_related() + self.assertEqual(q, 4) + + for group_obj in group_objs: + [m for m in group_obj.members] + self.assertEqual(q, 4) + + [m for m in group_obj.members] + self.assertEqual(q, 4) + + for k, m in group_obj.members.iteritems(): + self.assertTrue('User' in m.__class__.__name__) + + Group.objects.delete() + Group().save() with query_counter() as q: self.assertEqual(q, 0) @@ -393,4 +655,4 @@ class FieldTest(unittest.TestCase): UserA.drop_collection() UserB.drop_collection() UserC.drop_collection() - Group.drop_collection() \ No newline at end of file + Group.drop_collection() diff --git a/tests/document.py b/tests/document.py index 28d61332..82488cf1 100644 --- a/tests/document.py +++ b/tests/document.py @@ -932,7 +932,7 @@ class DocumentTest(unittest.TestCase): list_field = ListField() embedded_field = EmbeddedDocumentField(Embedded) - Doc.drop_collection + Doc.drop_collection() doc = Doc() doc.dict_field = {'hello': 'world'} doc.list_field = ['1', 2, {'hello': 'world'}] @@ -1125,7 +1125,7 @@ class DocumentTest(unittest.TestCase): dict_field = DictField() list_field = ListField() - Doc.drop_collection + Doc.drop_collection() doc = Doc() doc.save() @@ -1180,7 +1180,7 @@ class DocumentTest(unittest.TestCase): list_field = ListField() embedded_field = EmbeddedDocumentField(Embedded) - Doc.drop_collection + Doc.drop_collection() doc = Doc() doc.save() From b039a2293fea59ab3f1f7c25b85755cbb1237e1f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 27 Jun 2011 12:42:26 +0100 Subject: [PATCH 148/214] Updated documentation about dereferencing Refs #206 --- docs/changelog.rst | 1 + docs/guide/querying.rst | 16 ++++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 4fb5d627..cad1b687 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added select_related() support - Added support for the positional operator - Updated geo index checking to be recursive and check in embedded documents - Updated default collection naming convention diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 4f36e964..b23ea4d1 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -175,6 +175,22 @@ to be created:: >>> a.name == b.name and a.age == b.age True +Dereferencing results +--------------------- +When iterating the results of :class:`~mongoengine.ListField` or +:class:`~mongoengine.DictField` we automatically dereference any +:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the +number the queries to mongo. + +There are times when that efficiency is not enough, documents that have +:class:`~mongoengine.ReferenceField` objects or +:class:`~mongoengine.GenericReferenceField` objects at the top level are +expensive as the number of queries to MongoDB can quickly rise. + +To limit the number of queries use +:func:`~mongoengine.queryset.QuerySet.select_related` which converts the +QuerySet to a list and dereferences as efficiently as possible. + Default Document queries ======================== By default, the objects :attr:`~mongoengine.Document.objects` attribute on a From 4036e9fe3496b62cb2389022bd38e91a20567898 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 27 Jun 2011 13:17:41 +0100 Subject: [PATCH 149/214] Moved private method to make class more readable --- mongoengine/fields.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 50a30a13..bac312bd 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -775,11 +775,6 @@ class GridFSProxy(object): self.gridout = None self._mark_as_changed() - def _mark_as_changed(self): - """Inform the instance that `self.key` has been changed""" - if self.instance: - self.instance._mark_as_changed(self.key) - def replace(self, file_obj, **kwargs): self.delete() self.put(file_obj, **kwargs) @@ -788,6 +783,11 @@ class GridFSProxy(object): if self.newfile: self.newfile.close() + def _mark_as_changed(self): + """Inform the instance that `self.key` has been changed""" + if self.instance: + self.instance._mark_as_changed(self.key) + class FileField(BaseField): """A GridFS storage field. From 84e611b91e94ac32341460dc6cbb46396743cf58 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 27 Jun 2011 16:46:39 +0100 Subject: [PATCH 150/214] Tweak to dereferencing --- mongoengine/dereference.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 9192901c..6bfabd94 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -2,9 +2,10 @@ import operator import pymongo -from base import BaseDict, BaseList, get_document +from base import BaseDict, BaseList, get_document, TopLevelDocumentMetaclass from connection import _get_db from queryset import QuerySet +from document import Document class DeReference(object): @@ -65,7 +66,7 @@ class DeReference(object): field_cls = getattr(getattr(field, 'field', None), 'document_type', None) references = self._find_references(v, depth) for key, refs in references.iteritems(): - if field_cls: + if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): key = field_cls reference_map.setdefault(key, []).extend(refs) elif isinstance(item, (pymongo.dbref.DBRef)): From 3d15a3b3e2d362f617a2f1059b091f96dc99a54b Mon Sep 17 00:00:00 2001 From: Zak Johnson Date: Wed, 29 Jun 2011 20:48:39 -0700 Subject: [PATCH 151/214] Add GridFSProxy.__nonzero__ For documents that do not have a value set for a given field, most field types return None (or [] in the case of ListField). This makes it easy to test whether a field has been set using "if doc.field". FileFields, on the other hand, always return a GridFSProxy. Adding GridFSProxy.__nonzero__ which simply checks for a grid_id allows the same boolean-test pattern for FileFields, as well. --- mongoengine/fields.py | 5 ++++- tests/fields.py | 15 +++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 50a30a13..3e3b5b1e 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -723,6 +723,9 @@ class GridFSProxy(object): def __get__(self, instance, value): return self + def __nonzero__(self): + return bool(self.grid_id) + def get(self, id=None): if id: self.grid_id = id @@ -805,7 +808,7 @@ class FileField(BaseField): # Check if a file already exists for this model grid_file = instance._data.get(self.name) self.grid_file = grid_file - if self.grid_file: + if isinstance(self.grid_file, GridFSProxy): if not self.grid_file.key: self.grid_file.key = self.name self.grid_file.instance = instance diff --git a/tests/fields.py b/tests/fields.py index 01280a15..f2543fc1 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -1300,6 +1300,21 @@ class FieldTest(unittest.TestCase): TestFile.drop_collection() + def test_file_boolean(self): + """Ensure that a boolean test of a FileField indicates its presence + """ + class TestFile(Document): + file = FileField() + + testfile = TestFile() + self.assertFalse(bool(testfile.file)) + testfile.file = 'Hello, World!' + testfile.file.content_type = 'text/plain' + testfile.save() + self.assertTrue(bool(testfile.file)) + + TestFile.drop_collection() + def test_geo_indexes(self): """Ensure that indexes are created automatically for GeoPointFields. """ From 8e1d701c277467f774ac8fb3857811bace29d5f7 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 30 Jun 2011 10:32:05 +0100 Subject: [PATCH 152/214] Fixed infinite recursion bug in _geo_indices() Fixes #213 Thanks to joshink for the bug report --- mongoengine/base.py | 9 +++++++-- tests/document.py | 12 ++++++++++++ 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 8101aa00..b83164aa 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -754,11 +754,16 @@ class BaseDocument(object): return set_data, unset_data @classmethod - def _geo_indices(cls): + def _geo_indices(cls, inspected_classes=None): + inspected_classes = inspected_classes or [] geo_indices = [] + inspected_classes.append(cls) for field in cls._fields.values(): if hasattr(field, 'document_type'): - geo_indices += field.document_type._geo_indices() + field_cls = field.document_type + if field_cls in inspected_classes: + continue + geo_indices += field_cls._geo_indices(inspected_classes) elif field._geo_index: geo_indices.append(field) return geo_indices diff --git a/tests/document.py b/tests/document.py index 82488cf1..c1abd463 100644 --- a/tests/document.py +++ b/tests/document.py @@ -662,6 +662,18 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() + def test_geo_indexes_recursion(self): + + class User(Document): + channel = ReferenceField('Channel') + location = GeoPointField() + + class Channel(Document): + user = ReferenceField('User') + location = GeoPointField() + + self.assertEquals(len(User._geo_indices()), 2) + def test_hint(self): class BlogPost(Document): From 556e620c7a5b4cff3f9b93ce0361b39033e587f9 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 1 Jul 2011 08:44:46 +0100 Subject: [PATCH 153/214] Fixes recursion error when resetting changed fields Fixes #214 - thanks to wpjunior for the test case --- mongoengine/document.py | 9 ++++++--- tests/document.py | 26 ++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 3 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 31a2530c..c653c8fb 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -162,15 +162,18 @@ class Document(BaseDocument): id_field = self._meta['id_field'] self[id_field] = self._fields[id_field].to_python(object_id) - def reset_changed_fields(doc): + def reset_changed_fields(doc, inspected_docs=None): """Loop through and reset changed fields lists""" + + inspected_docs = inspected_docs or [] + inspected_docs.append(doc) if hasattr(doc, '_changed_fields'): doc._changed_fields = [] for field_name in doc._fields: field = getattr(doc, field_name) - if hasattr(field, '_changed_fields') and field != doc: - reset_changed_fields(field) + if field not in inspected_docs and hasattr(field, '_changed_fields'): + reset_changed_fields(field, inspected_docs) reset_changed_fields(self) signals.post_save.send(self.__class__, document=self, created=created) diff --git a/tests/document.py b/tests/document.py index c1abd463..9498cfb2 100644 --- a/tests/document.py +++ b/tests/document.py @@ -1045,6 +1045,32 @@ class DocumentTest(unittest.TestCase): except ValidationError: self.fail() + def test_save_max_recursion_not_hit(self): + + class Person(Document): + name = StringField() + parent = ReferenceField('self') + friend = ReferenceField('self') + + Person.drop_collection() + + p1 = Person(name="Wilson Jr") + p1.parent = None + p1.save() + + p2 = Person(name="Wilson Jr2") + p2.parent = p1 + p2.save() + + p1.friend = p2 + p1.save() + + # Confirm can save and it resets the changed fields without hitting + # max recursion error + p0 = Person.objects.first() + p0.name = 'wpjunior' + p0.save() + def test_update(self): """Ensure that an existing document is updated instead of be overwritten. """ From 4e6f91ae77baf7cf757daf2dd8f8fe648f25795b Mon Sep 17 00:00:00 2001 From: Victor Farazdagi Date: Sat, 2 Jul 2011 19:48:21 +0400 Subject: [PATCH 154/214] Typo fixed in "Quering The Db" guide. --- docs/guide/querying.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index b23ea4d1..c454b6e8 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -5,8 +5,8 @@ Querying the database is used for accessing the objects in the database associated with the class. The :attr:`objects` attribute is actually a :class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new -a new :class:`~mongoengine.queryset.QuerySet` object on access. The -:class:`~mongoengine.queryset.QuerySet` object may may be iterated over to +:class:`~mongoengine.queryset.QuerySet` object on access. The +:class:`~mongoengine.queryset.QuerySet` object may be iterated over to fetch documents from the database:: # Prints out the names of all the users in the database @@ -498,4 +498,4 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: .. note :: Currently only top level lists are handled, future versions of mongodb / pymongo plan to support nested positional operators. See `The $ positional - operator `_. \ No newline at end of file + operator `_. From dc5a613bc7a28148791f8fa42a7cebfe83fe5a5f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 11 Jul 2011 09:19:27 +0100 Subject: [PATCH 155/214] Fixes conversion of null genericreferences in querysets closes #211 --- mongoengine/fields.py | 3 +++ tests/fields.py | 12 ++++++++++++ 2 files changed, 15 insertions(+) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index eb0825e6..88040115 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -653,6 +653,9 @@ class GenericReferenceField(BaseField): return doc def to_mongo(self, document): + if document is None: + return None + id_field_name = document.__class__._meta['id_field'] id_field = document.__class__._fields[id_field_name] diff --git a/tests/fields.py b/tests/fields.py index f2543fc1..7a752998 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -1095,6 +1095,18 @@ class FieldTest(unittest.TestCase): Link.drop_collection() User.drop_collection() + def test_generic_reference_is_none(self): + + class Person(Document): + name = StringField() + city = GenericReferenceField() + + Person.drop_collection() + Person(name="Wilson Jr").save() + + self.assertEquals(repr(Person.objects(city=None)), + "[]") + def test_binary_fields(self): """Ensure that binary fields can be stored and retrieved. """ From 803164a993b272dd72860cf5cb5ea1b2464a1aee Mon Sep 17 00:00:00 2001 From: Dan Crosta Date: Mon, 11 Jul 2011 08:08:49 -0400 Subject: [PATCH 156/214] add unique index on User.username --- mongoengine/django/auth.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index 2711ee18..92424909 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -32,6 +32,12 @@ class User(Document): last_login = DateTimeField(default=datetime.datetime.now) date_joined = DateTimeField(default=datetime.datetime.now) + meta = { + 'indexes': [ + {'fields': ['username'], 'unique': True} + ] + } + def __unicode__(self): return self.username From 859de712b493f47fc764e2a648f086e14da920c9 Mon Sep 17 00:00:00 2001 From: Dan Crosta Date: Mon, 11 Jul 2011 09:44:28 -0400 Subject: [PATCH 157/214] only create indexes on first collection access (fix #223) --- mongoengine/queryset.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 6b110ff0..a477e370 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -320,10 +320,11 @@ class QuerySet(object): providing :class:`~mongoengine.Document` objects as the results. """ + ALREADY_INDEXED = set() + def __init__(self, document, collection): self._document = document self._collection_obj = collection - self._accessed_collection = False self._mongo_query = None self._query_obj = Q() self._initial_query = {} @@ -467,8 +468,8 @@ class QuerySet(object): """Property that returns the collection object. This allows us to perform operations only if the collection is accessed. """ - if not self._accessed_collection: - self._accessed_collection = True + if self._document not in QuerySet.ALREADY_INDEXED: + QuerySet.ALREADY_INDEXED.add(self._document) background = self._document._meta.get('index_background', False) drop_dups = self._document._meta.get('index_drop_dups', False) From 0847687fd16ca435f5f16f88566a9b325e2f8cc6 Mon Sep 17 00:00:00 2001 From: Dan Crosta Date: Mon, 11 Jul 2011 10:15:55 -0400 Subject: [PATCH 158/214] don't create extra index on _types (fix #222) mongodb will use an index that begins with _types to service queries against _types, so the extra index is only needed if no other fields are indexed in the document. to be safe, we explicitly check all indexes to see if any begins with _types, and only then prevent creation of the additional index on _types. --- mongoengine/queryset.py | 21 +++++++++++++++++++-- tests/document.py | 11 +++++++---- 2 files changed, 26 insertions(+), 6 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index a477e370..69c78b28 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -476,22 +476,39 @@ class QuerySet(object): index_opts = self._document._meta.get('index_options', {}) index_types = self._document._meta.get('index_types', True) + # determine if an index which we are creating includes + # _type as its first field; if so, we can avoid creating + # an extra index on _type, as mongodb will use the existing + # index to service queries against _type + types_indexed = False + def includes_types(fields): + first_field = None + if len(fields): + if isinstance(fields[0], basestring): + first_field = fields[0] + elif isinstance(fields[0], (list, tuple)) and len(fields[0]): + first_field = fields[0][0] + return first_field == '_types' + # Ensure indexes created by uniqueness constraints for index in self._document._meta['unique_indexes']: + types_indexed = types_indexed or includes_types(index) self._collection.ensure_index(index, unique=True, background=background, drop_dups=drop_dups, **index_opts) # Ensure document-defined indexes are created if self._document._meta['indexes']: for spec in self._document._meta['indexes']: + types_indexed = types_indexed or includes_types(spec['fields']) opts = index_opts.copy() opts['unique'] = spec.get('unique', False) opts['sparse'] = spec.get('sparse', False) self._collection.ensure_index(spec['fields'], background=background, **opts) - # If _types is being used (for polymorphism), it needs an index - if index_types and '_types' in self._query: + # If _types is being used (for polymorphism), it needs an index, + # only if another index doesn't begin with _types + if index_types and '_types' in self._query and not types_indexed: self._collection.ensure_index('_types', background=background, **index_opts) diff --git a/tests/document.py b/tests/document.py index 9498cfb2..0c056a16 100644 --- a/tests/document.py +++ b/tests/document.py @@ -397,7 +397,7 @@ class DocumentTest(unittest.TestCase): info = collection.index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertEquals([[(u'_id', 1)], [(u'_types', 1)], [(u'_types', 1), (u'name', 1)]], info) + self.assertEquals([[(u'_id', 1)], [(u'_types', 1), (u'name', 1)]], info) # Turn off inheritance class Animal(Document): @@ -415,7 +415,7 @@ class DocumentTest(unittest.TestCase): info = collection.index_information() info = [value['key'] for key, value in info.iteritems()] - self.assertEquals([[(u'_id', 1)], [(u'_types', 1)], [(u'_types', 1), (u'name', 1)]], info) + self.assertEquals([[(u'_id', 1)], [(u'_types', 1), (u'name', 1)]], info) info = collection.index_information() indexes_to_drop = [key for key, value in info.iteritems() if '_types' in dict(value['key'])] @@ -601,8 +601,11 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() - # _id, types, '-date', 'tags', ('cat', 'date') - self.assertEqual(len(info), 5) + # _id, '-date', 'tags', ('cat', 'date') + # NB: there is no index on _types by itself, since + # the indices on -date and tags will both contain + # _types as first element in the key + self.assertEqual(len(info), 4) # Indexes are lazy so use list() to perform query list(BlogPost.objects) From 0fb629e24ccb6ce6f2b8c3bf92fd3c239ba5ef11 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 11 Jul 2011 16:01:48 +0100 Subject: [PATCH 159/214] Added cascading deletes Also ensured that unsetting works when not the default value of a field --- docs/changelog.rst | 1 + mongoengine/base.py | 19 ++++++++--- mongoengine/document.py | 17 +++++++++- tests/document.py | 70 ++++++++++++++++++++++++++++++++++++----- 4 files changed, 94 insertions(+), 13 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index cad1b687..1b4842e7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added cascading saves - so changes to Referenced documents are saved on .save() - Added select_related() support - Added support for the positional operator - Updated geo index checking to be recursive and check in embedded documents diff --git a/mongoengine/base.py b/mongoengine/base.py index b83164aa..25b049a3 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -747,10 +747,21 @@ class BaseDocument(object): if '_id' in set_data: del(set_data['_id']) - for k,v in set_data.items(): - if not v: - del(set_data[k]) - unset_data[k] = 1 + # Determine if any changed items were actually unset. + for path, value in set_data.items(): + if value: + continue + + # If we've set a value that aint the default value save it. + if path in self._fields: + default = self._fields[path].default + if callable(default): + default = default() + if default != value: + continue + + del(set_data[path]) + unset_data[path] = 1 return set_data, unset_data @classmethod diff --git a/mongoengine/document.py b/mongoengine/document.py index c653c8fb..6ccda997 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -112,7 +112,7 @@ class Document(BaseDocument): self._collection = db[collection_name] return self._collection - def save(self, safe=True, force_insert=False, validate=True, write_options=None): + def save(self, safe=True, force_insert=False, validate=True, write_options=None, _refs=None): """Save the :class:`~mongoengine.Document` to the database. If the document already exists, it will be updated, otherwise it will be created. @@ -131,6 +131,8 @@ class Document(BaseDocument): For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers have recorded the write and will force an fsync on each server being written to. """ + from fields import ReferenceField, GenericReferenceField + signals.pre_save.send(self.__class__, document=self) if validate: @@ -140,6 +142,7 @@ class Document(BaseDocument): write_options = {} doc = self.to_mongo() + created = '_id' not in doc try: collection = self.__class__.objects._collection @@ -154,6 +157,18 @@ class Document(BaseDocument): collection.update({'_id': object_id}, {"$set": updates}, upsert=True, safe=safe, **write_options) if removals: collection.update({'_id': object_id}, {"$unset": removals}, upsert=True, safe=safe, **write_options) + + # Save any references / generic references + _refs = _refs or [] + for name, cls in self._fields.items(): + if isinstance(cls, (ReferenceField, GenericReferenceField)): + ref = getattr(self, name) + if ref and str(ref) not in _refs: + _refs.append(str(ref)) + ref.save(safe=safe, force_insert=force_insert, + validate=validate, write_options=write_options, + _refs=_refs) + except pymongo.errors.OperationFailure, err: message = 'Could not save document (%s)' if u'duplicate key' in unicode(err): diff --git a/tests/document.py b/tests/document.py index 9498cfb2..81670eb0 100644 --- a/tests/document.py +++ b/tests/document.py @@ -1054,11 +1054,11 @@ class DocumentTest(unittest.TestCase): Person.drop_collection() - p1 = Person(name="Wilson Jr") + p1 = Person(name="Wilson Snr") p1.parent = None p1.save() - p2 = Person(name="Wilson Jr2") + p2 = Person(name="Wilson Jr") p2.parent = p1 p2.save() @@ -1071,6 +1071,51 @@ class DocumentTest(unittest.TestCase): p0.name = 'wpjunior' p0.save() + def test_save_cascades(self): + + class Person(Document): + name = StringField() + parent = ReferenceField('self') + + Person.drop_collection() + + p1 = Person(name="Wilson Snr") + p1.parent = None + p1.save() + + p2 = Person(name="Wilson Jr") + p2.parent = p1 + p2.save() + + p = Person.objects(name="Wilson Jr").get() + p.parent.name = "Daddy Wilson" + p.save() + + p1.reload() + self.assertEquals(p1.name, p.parent.name) + + def test_save_cascades_generically(self): + + class Person(Document): + name = StringField() + parent = GenericReferenceField() + + Person.drop_collection() + + p1 = Person(name="Wilson Snr") + p1.save() + + p2 = Person(name="Wilson Jr") + p2.parent = p1 + p2.save() + + p = Person.objects(name="Wilson Jr").get() + p.parent.name = "Daddy Wilson" + p.save() + + p1.reload() + self.assertEquals(p1.name, p.parent.name) + def test_update(self): """Ensure that an existing document is updated instead of be overwritten. """ @@ -1364,22 +1409,31 @@ class DocumentTest(unittest.TestCase): """Ensure save only sets / unsets changed fields """ + class User(self.Person): + active = BooleanField(default=True) + + + User.drop_collection() + # Create person object and save it to the database - person = self.Person(name='Test User', age=30) - person.save() - person.reload() + user = User(name='Test User', age=30, active=True) + user.save() + user.reload() + # Simulated Race condition same_person = self.Person.objects.get() + same_person.active = False + + user.age = 21 + user.save() - person.age = 21 same_person.name = 'User' - - person.save() same_person.save() person = self.Person.objects.get() self.assertEquals(person.name, 'User') self.assertEquals(person.age, 21) + self.assertEquals(person.active, False) def test_delete(self): """Ensure that document may be deleted using the delete method. From 1452d3fac5f2500cda0439a45294b9382c8c2d42 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 11 Jul 2011 16:50:31 +0100 Subject: [PATCH 160/214] Fixed item_frequency methods to handle null values [fixes #216] --- mongoengine/queryset.py | 13 ++++++++----- tests/queryset.py | 22 ++++++++++++++++++++++ 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 6b110ff0..d533736b 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1435,7 +1435,7 @@ class QuerySet(object): path = '{{~%(field)s}}'.split('.'); field = this; for (p in path) { field = field[path[p]]; } - if (field.constructor == Array) { + if (field && field.constructor == Array) { field.forEach(function(item) { emit(item, 1); }); @@ -1481,7 +1481,7 @@ class QuerySet(object): db[collection].find(query).forEach(function(doc) { field = doc; for (p in path) { field = field[path[p]]; } - if (field.constructor == Array) { + if (field && field.constructor == Array) { total += field.length; } else { total++; @@ -1497,7 +1497,7 @@ class QuerySet(object): db[collection].find(query).forEach(function(doc) { field = doc; for (p in path) { field = field[path[p]]; } - if (field.constructor == Array) { + if (field && field.constructor == Array) { field.forEach(function(item) { frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); }); @@ -1509,8 +1509,11 @@ class QuerySet(object): return frequencies; } """ - - return self.exec_js(freq_func, field, normalize=normalize) + data = self.exec_js(freq_func, field, normalize=normalize) + if 'undefined' in data: + data[None] = data['undefined'] + del(data['undefined']) + return data def __repr__(self): limit = REPR_OUTPUT_SIZE + 1 diff --git a/tests/queryset.py b/tests/queryset.py index c0860b5c..e21db0fa 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1781,6 +1781,28 @@ class QuerySetTest(unittest.TestCase): test_assertions(exec_js) test_assertions(map_reduce) + def test_item_frequencies_null_values(self): + + class Person(Document): + name = StringField() + city = StringField() + + Person.drop_collection() + + Person(name="Wilson Snr", city="CRB").save() + Person(name="Wilson Jr").save() + + freq = Person.objects.item_frequencies('city') + self.assertEquals(freq, {'CRB': 1.0, None: 1.0}) + freq = Person.objects.item_frequencies('city', normalize=True) + self.assertEquals(freq, {'CRB': 0.5, None: 0.5}) + + + freq = Person.objects.item_frequencies('city', map_reduce=True) + self.assertEquals(freq, {'CRB': 1.0, None: 1.0}) + freq = Person.objects.item_frequencies('city', normalize=True, map_reduce=True) + self.assertEquals(freq, {'CRB': 0.5, None: 0.5}) + def test_average(self): """Ensure that field can be averaged correctly. """ From 2a8d0012136b83de78bff5f897670ad9cd3bc42a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 11 Jul 2011 17:02:23 +0100 Subject: [PATCH 161/214] Improvements to indexes and efficiencies Thanks to @dcrosta for the patches closes #225 --- mongoengine/queryset.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index d82708c3..de80a3d2 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -320,7 +320,7 @@ class QuerySet(object): providing :class:`~mongoengine.Document` objects as the results. """ - ALREADY_INDEXED = set() + __already_indexed = set() def __init__(self, document, collection): self._document = document @@ -468,8 +468,8 @@ class QuerySet(object): """Property that returns the collection object. This allows us to perform operations only if the collection is accessed. """ - if self._document not in QuerySet.ALREADY_INDEXED: - QuerySet.ALREADY_INDEXED.add(self._document) + if self._document not in QuerySet.__already_indexed: + QuerySet.__already_indexed.add(self._document) background = self._document._meta.get('index_background', False) drop_dups = self._document._meta.get('index_drop_dups', False) From cace665858f78782d6f0aaecf9cbd68d39c2f224 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 12 Jul 2011 10:20:36 +0100 Subject: [PATCH 162/214] _delta checking didn't handle db_field_names at all Fixed and added tests, thanks to @wpjunior and @iapain for initial test cases [fixes #226] --- mongoengine/base.py | 11 +- tests/document.py | 274 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 281 insertions(+), 4 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 25b049a3..c2f4d214 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -381,6 +381,7 @@ class DocumentMetaclass(type): attr_value.db_field = attr_name doc_fields[attr_name] = attr_value attrs['_fields'] = doc_fields + attrs['_db_field_map'] = dict([(k, v.db_field) for k, v in doc_fields.items()]) new_class = super_new(cls, name, bases, attrs) for field in new_class._fields.values(): @@ -696,6 +697,7 @@ class BaseDocument(object): """ if not key: return + key = self._db_field_map.get(key, key) if hasattr(self, '_changed_fields') and key not in self._changed_fields: self._changed_fields.append(key) @@ -705,13 +707,13 @@ class BaseDocument(object): from mongoengine import EmbeddedDocument _changed_fields = [] _changed_fields += getattr(self, '_changed_fields', []) - for field_name in self._fields: - key = '%s.' % field_name + db_field_name = self._db_field_map.get(field_name, field_name) + key = '%s.' % db_field_name field = getattr(self, field_name, None) - if isinstance(field, EmbeddedDocument) and field_name not in _changed_fields: # Grab all embedded fields that have been changed + if isinstance(field, EmbeddedDocument) and db_field_name not in _changed_fields: # Grab all embedded fields that have been changed _changed_fields += ["%s%s" % (key, k) for k in field._get_changed_fields(key) if k] - elif isinstance(field, (list, tuple)) and field_name not in _changed_fields: # Loop list fields as they contain documents + elif isinstance(field, (list, tuple)) and db_field_name not in _changed_fields: # Loop list fields as they contain documents for index, value in enumerate(field): if not hasattr(value, '_get_changed_fields'): continue @@ -726,6 +728,7 @@ class BaseDocument(object): # Handles cases where not loaded from_son but has _id doc = self.to_mongo() set_fields = self._get_changed_fields() + set_data = {} unset_data = {} if hasattr(self, '_changed_fields'): diff --git a/tests/document.py b/tests/document.py index a8164697..df3b4fa1 100644 --- a/tests/document.py +++ b/tests/document.py @@ -1203,6 +1203,59 @@ class DocumentTest(unittest.TestCase): self.assertEqual(person.name, None) self.assertEqual(person.age, None) + def test_embedded_update(self): + """ + Test update on `EmbeddedDocumentField` fields + """ + + class Page(EmbeddedDocument): + log_message = StringField(verbose_name="Log message", + required=True) + + class Site(Document): + page = EmbeddedDocumentField(Page) + + + Site.drop_collection() + site = Site(page=Page(log_message="Warning: Dummy message")) + site.save() + + # Update + site = Site.objects.first() + site.page.log_message = "Error: Dummy message" + site.save() + + site = Site.objects.first() + self.assertEqual(site.page.log_message, "Error: Dummy message") + + def test_embedded_update_db_field(self): + """ + Test update on `EmbeddedDocumentField` fields when db_field is other + than default. + """ + + class Page(EmbeddedDocument): + log_message = StringField(verbose_name="Log message", + db_field="page_log_message", + required=True) + + class Site(Document): + page = EmbeddedDocumentField(Page) + + + Site.drop_collection() + + site = Site(page=Page(log_message="Warning: Dummy message")) + site.save() + + # Update + site = Site.objects.first() + site.page.log_message = "Error: Dummy message" + site.save() + + site = Site.objects.first() + self.assertEqual(site.page.log_message, "Error: Dummy message") + def test_delta(self): class Doc(Document): @@ -1408,6 +1461,227 @@ class DocumentTest(unittest.TestCase): del(doc.embedded_field.list_field[2].list_field) self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) + def test_delta_db_field(self): + + class Doc(Document): + string_field = StringField(db_field='db_string_field') + int_field = IntField(db_field='db_int_field') + dict_field = DictField(db_field='db_dict_field') + list_field = ListField(db_field='db_list_field') + + Doc.drop_collection() + doc = Doc() + doc.save() + + doc = Doc.objects.first() + self.assertEquals(doc._get_changed_fields(), []) + self.assertEquals(doc._delta(), ({}, {})) + + doc.string_field = 'hello' + self.assertEquals(doc._get_changed_fields(), ['db_string_field']) + self.assertEquals(doc._delta(), ({'db_string_field': 'hello'}, {})) + + doc._changed_fields = [] + doc.int_field = 1 + self.assertEquals(doc._get_changed_fields(), ['db_int_field']) + self.assertEquals(doc._delta(), ({'db_int_field': 1}, {})) + + doc._changed_fields = [] + dict_value = {'hello': 'world', 'ping': 'pong'} + doc.dict_field = dict_value + self.assertEquals(doc._get_changed_fields(), ['db_dict_field']) + self.assertEquals(doc._delta(), ({'db_dict_field': dict_value}, {})) + + doc._changed_fields = [] + list_value = ['1', 2, {'hello': 'world'}] + doc.list_field = list_value + self.assertEquals(doc._get_changed_fields(), ['db_list_field']) + self.assertEquals(doc._delta(), ({'db_list_field': list_value}, {})) + + # Test unsetting + doc._changed_fields = [] + doc.dict_field = {} + self.assertEquals(doc._get_changed_fields(), ['db_dict_field']) + self.assertEquals(doc._delta(), ({}, {'db_dict_field': 1})) + + doc._changed_fields = [] + doc.list_field = [] + self.assertEquals(doc._get_changed_fields(), ['db_list_field']) + self.assertEquals(doc._delta(), ({}, {'db_list_field': 1})) + + # Test it saves that data + doc = Doc() + doc.save() + + doc.string_field = 'hello' + doc.int_field = 1 + doc.dict_field = {'hello': 'world'} + doc.list_field = ['1', 2, {'hello': 'world'}] + doc.save() + doc.reload() + + self.assertEquals(doc.string_field, 'hello') + self.assertEquals(doc.int_field, 1) + self.assertEquals(doc.dict_field, {'hello': 'world'}) + self.assertEquals(doc.list_field, ['1', 2, {'hello': 'world'}]) + + def test_delta_recursive_db_field(self): + + class Embedded(EmbeddedDocument): + string_field = StringField(db_field='db_string_field') + int_field = IntField(db_field='db_int_field') + dict_field = DictField(db_field='db_dict_field') + list_field = ListField(db_field='db_list_field') + + class Doc(Document): + string_field = StringField(db_field='db_string_field') + int_field = IntField(db_field='db_int_field') + dict_field = DictField(db_field='db_dict_field') + list_field = ListField(db_field='db_list_field') + embedded_field = EmbeddedDocumentField(Embedded, db_field='db_embedded_field') + + Doc.drop_collection() + doc = Doc() + doc.save() + + doc = Doc.objects.first() + self.assertEquals(doc._get_changed_fields(), []) + self.assertEquals(doc._delta(), ({}, {})) + + embedded_1 = Embedded() + embedded_1.string_field = 'hello' + embedded_1.int_field = 1 + embedded_1.dict_field = {'hello': 'world'} + embedded_1.list_field = ['1', 2, {'hello': 'world'}] + doc.embedded_field = embedded_1 + + self.assertEquals(doc._get_changed_fields(), ['db_embedded_field']) + + embedded_delta = { + '_types': ['Embedded'], + '_cls': 'Embedded', + 'db_string_field': 'hello', + 'db_int_field': 1, + 'db_dict_field': {'hello': 'world'}, + 'db_list_field': ['1', 2, {'hello': 'world'}] + } + self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {})) + self.assertEquals(doc._delta(), ({'db_embedded_field': embedded_delta}, {})) + + doc.save() + doc.reload() + + doc.embedded_field.dict_field = {} + self.assertEquals(doc._get_changed_fields(), ['db_embedded_field.db_dict_field']) + self.assertEquals(doc.embedded_field._delta(), ({}, {'db_dict_field': 1})) + self.assertEquals(doc._delta(), ({}, {'db_embedded_field.db_dict_field': 1})) + doc.save() + doc.reload() + self.assertEquals(doc.embedded_field.dict_field, {}) + + doc.embedded_field.list_field = [] + self.assertEquals(doc._get_changed_fields(), ['db_embedded_field.db_list_field']) + self.assertEquals(doc.embedded_field._delta(), ({}, {'db_list_field': 1})) + self.assertEquals(doc._delta(), ({}, {'db_embedded_field.db_list_field': 1})) + doc.save() + doc.reload() + self.assertEquals(doc.embedded_field.list_field, []) + + embedded_2 = Embedded() + embedded_2.string_field = 'hello' + embedded_2.int_field = 1 + embedded_2.dict_field = {'hello': 'world'} + embedded_2.list_field = ['1', 2, {'hello': 'world'}] + + doc.embedded_field.list_field = ['1', 2, embedded_2] + self.assertEquals(doc._get_changed_fields(), ['db_embedded_field.db_list_field']) + self.assertEquals(doc.embedded_field._delta(), ({ + 'db_list_field': ['1', 2, { + '_cls': 'Embedded', + '_types': ['Embedded'], + 'db_string_field': 'hello', + 'db_dict_field': {'hello': 'world'}, + 'db_int_field': 1, + 'db_list_field': ['1', 2, {'hello': 'world'}], + }] + }, {})) + + self.assertEquals(doc._delta(), ({ + 'db_embedded_field.db_list_field': ['1', 2, { + '_cls': 'Embedded', + '_types': ['Embedded'], + 'db_string_field': 'hello', + 'db_dict_field': {'hello': 'world'}, + 'db_int_field': 1, + 'db_list_field': ['1', 2, {'hello': 'world'}], + }] + }, {})) + doc.save() + doc.reload() + + self.assertEquals(doc.embedded_field.list_field[0], '1') + self.assertEquals(doc.embedded_field.list_field[1], 2) + for k in doc.embedded_field.list_field[2]._fields: + self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k]) + + doc.embedded_field.list_field[2].string_field = 'world' + self.assertEquals(doc._get_changed_fields(), ['db_embedded_field.db_list_field.2.db_string_field']) + self.assertEquals(doc.embedded_field._delta(), ({'db_list_field.2.db_string_field': 'world'}, {})) + self.assertEquals(doc._delta(), ({'db_embedded_field.db_list_field.2.db_string_field': 'world'}, {})) + doc.save() + doc.reload() + self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world') + + # Test multiple assignments + doc.embedded_field.list_field[2].string_field = 'hello world' + doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] + self.assertEquals(doc._get_changed_fields(), ['db_embedded_field.db_list_field']) + self.assertEquals(doc.embedded_field._delta(), ({ + 'db_list_field': ['1', 2, { + '_types': ['Embedded'], + '_cls': 'Embedded', + 'db_string_field': 'hello world', + 'db_int_field': 1, + 'db_list_field': ['1', 2, {'hello': 'world'}], + 'db_dict_field': {'hello': 'world'}}]}, {})) + self.assertEquals(doc._delta(), ({ + 'db_embedded_field.db_list_field': ['1', 2, { + '_types': ['Embedded'], + '_cls': 'Embedded', + 'db_string_field': 'hello world', + 'db_int_field': 1, + 'db_list_field': ['1', 2, {'hello': 'world'}], + 'db_dict_field': {'hello': 'world'}} + ]}, {})) + doc.save() + doc.reload() + self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world') + + # Test list native methods + doc.embedded_field.list_field[2].list_field.pop(0) + self.assertEquals(doc._delta(), ({'db_embedded_field.db_list_field.2.db_list_field': [2, {'hello': 'world'}]}, {})) + doc.save() + doc.reload() + + doc.embedded_field.list_field[2].list_field.append(1) + self.assertEquals(doc._delta(), ({'db_embedded_field.db_list_field.2.db_list_field': [2, {'hello': 'world'}, 1]}, {})) + doc.save() + doc.reload() + self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) + + doc.embedded_field.list_field[2].list_field.sort() + doc.save() + doc.reload() + self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) + + del(doc.embedded_field.list_field[2].list_field[2]['hello']) + self.assertEquals(doc._delta(), ({'db_embedded_field.db_list_field.2.db_list_field': [1, 2, {}]}, {})) + doc.save() + doc.reload() + + del(doc.embedded_field.list_field[2].list_field) + self.assertEquals(doc._delta(), ({}, {'db_embedded_field.db_list_field.2.db_list_field': 1})) + def test_save_only_changed_fields(self): """Ensure save only sets / unsets changed fields """ From 7f0d3638bae2eb8685e55bafedfa6a11ac1b39b9 Mon Sep 17 00:00:00 2001 From: Leo Honkanen Date: Tue, 12 Jul 2011 16:10:47 +0300 Subject: [PATCH 163/214] guard against potentially destructive updates with no update parameters --- mongoengine/queryset.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index de80a3d2..d55c5f7a 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1268,6 +1268,9 @@ class QuerySet(object): .. versionadded:: 0.2 """ + if not update: + raise OperationError("No update parameters, would remove data") + if pymongo.version < '1.1.1': raise OperationError('update() method requires PyMongo 1.1.1+') @@ -1298,6 +1301,9 @@ class QuerySet(object): .. versionadded:: 0.2 """ + if not update: + raise OperationError("No update parameters, would remove data") + if not write_options: write_options = {} update = QuerySet._transform_update(self._document, **update) From e0799246322bedb4b197978f4d091e8734d383de Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 12 Jul 2011 14:43:21 +0100 Subject: [PATCH 164/214] Added extra test for update / update_one [closes #231] --- tests/document.py | 20 -------------------- tests/queryset.py | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 20 deletions(-) diff --git a/tests/document.py b/tests/document.py index df3b4fa1..92aa3c2e 100644 --- a/tests/document.py +++ b/tests/document.py @@ -2087,26 +2087,6 @@ class DocumentTest(unittest.TestCase): pickle_doc.reload() self.assertEquals(resurrected, pickle_doc) - def test_write_options(self): - """Test that passing write_options works""" - - self.Person.drop_collection() - - write_options = {"fsync": True} - - author, created = self.Person.objects.get_or_create( - name='Test User', write_options=write_options) - author.save(write_options=write_options) - - self.Person.objects.update(set__name='Ross', write_options=write_options) - - author = self.Person.objects.first() - self.assertEquals(author.name, 'Ross') - - self.Person.objects.update_one(set__name='Test User', write_options=write_options) - author = self.Person.objects.first() - self.assertEquals(author.name, 'Test User') - if __name__ == '__main__': unittest.main() diff --git a/tests/queryset.py b/tests/queryset.py index e21db0fa..a07ff927 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -211,6 +211,42 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() + def test_update_write_options(self): + """Test that passing write_options works""" + + self.Person.drop_collection() + + write_options = {"fsync": True} + + author, created = self.Person.objects.get_or_create( + name='Test User', write_options=write_options) + author.save(write_options=write_options) + + self.Person.objects.update(set__name='Ross', write_options=write_options) + + author = self.Person.objects.first() + self.assertEquals(author.name, 'Ross') + + self.Person.objects.update_one(set__name='Test User', write_options=write_options) + author = self.Person.objects.first() + self.assertEquals(author.name, 'Test User') + + def test_update_update_has_a_value(self): + """Test to ensure that update is passed a value to update to""" + self.Person.drop_collection() + + author = self.Person(name='Test User') + author.save() + + def update_raises(): + self.Person.objects(pk=author.pk).update({}) + + def update_one_raises(): + self.Person.objects(pk=author.pk).update({}) + + self.assertRaises(OperationError, update_raises) + self.assertRaises(OperationError, update_one_raises) + def test_update_array_position(self): """Ensure that updating by array position works. From 7a3412dc13a6745247b723d330bfadb2fa10e025 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 13 Jul 2011 09:54:41 +0100 Subject: [PATCH 165/214] Added helper for reseting the index cache --- mongoengine/queryset.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index d55c5f7a..11c7a804 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -122,7 +122,6 @@ class QueryTreeTransformerVisitor(QNodeVisitor): q_object = reduce(lambda a, b: a & b, and_parts, Q()) q_object = reduce(lambda a, b: a & b, or_group, q_object) clauses.append(q_object) - # Finally, $or the generated clauses in to one query. Each of the # clauses is sufficient for the query to succeed. return reduce(lambda a, b: a | b, clauses, Q()) @@ -431,6 +430,11 @@ class QuerySet(object): return spec + @classmethod + def _reset_already_indexed(cls): + """Helper to reset already indexed, can be useful for testing purposes""" + cls.__already_indexed = set() + def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query): """Filter the selected documents by calling the :class:`~mongoengine.queryset.QuerySet` with a query. From a4c197a83cb366ccb6382538cbea98b6a7082a22 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 13 Jul 2011 14:15:46 +0100 Subject: [PATCH 166/214] Added update() convenience method to a document Thanks to @dcrosta for the initial code [closes #229] --- mongoengine/document.py | 12 ++++++++++++ tests/document.py | 23 +++++++++++++++++++++++ tests/queryset.py | 2 +- 3 files changed, 36 insertions(+), 1 deletion(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index 6ccda997..c41303d8 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -193,6 +193,18 @@ class Document(BaseDocument): reset_changed_fields(self) signals.post_save.send(self.__class__, document=self, created=created) + def update(self, **kwargs): + """Performs an update on the :class:`~mongoengine.Document` + A convenience wrapper to :meth:`~mongoengine.QuerySet.update`. + + Raises :class:`OperationError` if called on an object that has not yet + been saved. + """ + if not self.pk: + raise OperationError('attempt to update a document not yet saved') + + return self.__class__.objects(pk=self.pk).update_one(**kwargs) + def delete(self, safe=False): """Delete the :class:`~mongoengine.Document` from the database. This will only take effect if the document has been previously saved. diff --git a/tests/document.py b/tests/document.py index 92aa3c2e..e1c536e5 100644 --- a/tests/document.py +++ b/tests/document.py @@ -1203,6 +1203,29 @@ class DocumentTest(unittest.TestCase): self.assertEqual(person.name, None) self.assertEqual(person.age, None) + def test_document_update(self): + + def update_not_saved_raises(): + person = self.Person(name='dcrosta') + person.update(set__name='Dan Crosta') + + self.assertRaises(OperationError, update_not_saved_raises) + + author = self.Person(name='dcrosta') + author.save() + + author.update(set__name='Dan Crosta') + author.reload() + + p1 = self.Person.objects.first() + self.assertEquals(p1.name, author.name) + + def update_no_value_raises(): + person = self.Person.objects.first() + person.update() + + self.assertRaises(OperationError, update_no_value_raises) + def test_embedded_update(self): """ Test update on `EmbeddedDocumentField` fields diff --git a/tests/queryset.py b/tests/queryset.py index a07ff927..51c95112 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -242,7 +242,7 @@ class QuerySetTest(unittest.TestCase): self.Person.objects(pk=author.pk).update({}) def update_one_raises(): - self.Person.objects(pk=author.pk).update({}) + self.Person.objects(pk=author.pk).update_one({}) self.assertRaises(OperationError, update_raises) self.assertRaises(OperationError, update_one_raises) From 7395ce5b22f6de552069cee9b028530e0f7d2c1b Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 13 Jul 2011 16:05:17 +0100 Subject: [PATCH 167/214] Updating changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 1b4842e7..246f9177 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added update() convenience method to a document - Added cascading saves - so changes to Referenced documents are saved on .save() - Added select_related() support - Added support for the positional operator From 72995a4b3e32f85ad5298d99d0392566230cf144 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 13 Jul 2011 16:06:40 +0100 Subject: [PATCH 168/214] Fixed changing default values to False for embedded items --- mongoengine/base.py | 19 ++++++++++++++++++- tests/document.py | 41 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 1 deletion(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index c2f4d214..6b11d233 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -755,9 +755,26 @@ class BaseDocument(object): if value: continue - # If we've set a value that aint the default value save it. + # If we've set a value that ain't the default value unset it. + default = None + if path in self._fields: default = self._fields[path].default + else: # Perform a full lookup for lists / embedded lookups + d = self + parts = path.split('.') + field_name = parts.pop() + for p in parts: + if p.isdigit(): + d = d[int(p)] + elif hasattr(d, '__getattribute__'): + d = getattr(d, p) + else: + d = d.get(p) + if hasattr(d, '_fields'): + default = d._fields[field_name].default + + if default is not None: if callable(default): default = default() if default != value: diff --git a/tests/document.py b/tests/document.py index e1c536e5..146681bf 100644 --- a/tests/document.py +++ b/tests/document.py @@ -1735,6 +1735,47 @@ class DocumentTest(unittest.TestCase): self.assertEquals(person.age, 21) self.assertEquals(person.active, False) + def test_save_only_changed_fields_recursive(self): + """Ensure save only sets / unsets changed fields + """ + + class Comment(EmbeddedDocument): + published = BooleanField(default=True) + + class User(self.Person): + comments_dict = DictField() + comments = ListField(EmbeddedDocumentField(Comment)) + active = BooleanField(default=True) + + User.drop_collection() + + # Create person object and save it to the database + person = User(name='Test User', age=30, active=True) + person.comments.append(Comment()) + person.save() + person.reload() + + person = self.Person.objects.get() + self.assertTrue(person.comments[0].published) + + person.comments[0].published = False + person.save() + + person = self.Person.objects.get() + self.assertFalse(person.comments[0].published) + + # Simple dict w + person.comments_dict['first_post'] = Comment() + person.save() + + person = self.Person.objects.get() + self.assertTrue(person.comments_dict['first_post'].published) + + person.comments_dict['first_post'].published = False + person.save() + + person = self.Person.objects.get() + self.assertTrue(person.comments_dict['first_post'].published) def test_delete(self): """Ensure that document may be deleted using the delete method. """ From b3ef67a544e2e50859bfe254e9ebc5892bcdcc90 Mon Sep 17 00:00:00 2001 From: Dan Crosta Date: Thu, 14 Jul 2011 18:43:11 -0400 Subject: [PATCH 169/214] get_document_or_404 raises 404 if given an invalid ObjectId (and possibly on other errors, not sure what else raises ValidationError) --- mongoengine/django/shortcuts.py | 3 ++- tests/django_tests.py | 12 +++++++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/mongoengine/django/shortcuts.py b/mongoengine/django/shortcuts.py index 29bc17a8..59a20741 100644 --- a/mongoengine/django/shortcuts.py +++ b/mongoengine/django/shortcuts.py @@ -1,6 +1,7 @@ from django.http import Http404 from mongoengine.queryset import QuerySet from mongoengine.base import BaseDocument +from mongoengine.base import ValidationError def _get_queryset(cls): """Inspired by django.shortcuts.*""" @@ -25,7 +26,7 @@ def get_document_or_404(cls, *args, **kwargs): queryset = _get_queryset(cls) try: return queryset.get(*args, **kwargs) - except queryset._document.DoesNotExist: + except (queryset._document.DoesNotExist, ValidationError): raise Http404('No %s matches the given query.' % queryset._document._class_name) def get_list_or_404(cls, *args, **kwargs): diff --git a/tests/django_tests.py b/tests/django_tests.py index 930cc113..9c7e3280 100644 --- a/tests/django_tests.py +++ b/tests/django_tests.py @@ -3,7 +3,9 @@ import unittest from mongoengine import * +from mongoengine.django.shortcuts import get_document_or_404 +from django.http import Http404 from django.template import Context, Template from django.conf import settings settings.configure() @@ -56,4 +58,12 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(t.render(Context(d)), 'D-10:C-30:') # Check double rendering doesn't throw an error - self.assertEqual(t.render(Context(d)), 'D-10:C-30:') \ No newline at end of file + self.assertEqual(t.render(Context(d)), 'D-10:C-30:') + + def test_get_document_or_404(self): + p = self.Person(name="G404") + p.save() + + self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234') + self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk)) + From bbd3a6961ef5c731487f9e1a8dd2043f04639ffe Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Mon, 18 Jul 2011 08:35:29 +0100 Subject: [PATCH 170/214] Fixed typo in tutorial [closes #235] Thanks @mulka --- docs/tutorial.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 63f8fe9b..6ce8d102 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -22,7 +22,7 @@ function. The only argument we need to provide is the name of the MongoDB database to use:: from mongoengine import * - + connect('tumblelog') For more information about connecting to MongoDB see :ref:`guide-connecting`. @@ -112,7 +112,7 @@ link table, we can just store a list of tags in each post. So, for both efficiency and simplicity's sake, we'll store the tags as strings directly within the post, rather than storing references to tags in a separate collection. Especially as tags are generally very short (often even shorter -than a document's id), this denormalisation won't impact very strongly on the +than a document's id), this denormalisation won't impact very strongly on the size of our database. So let's take a look that the code our modified :class:`Post` class:: @@ -265,5 +265,5 @@ the first matched by the query you provide. Aggregation functions may also be used on :class:`~mongoengine.queryset.QuerySet` objects:: num_posts = Post.objects(tags='mongodb').count() - print 'Found % posts with tag "mongodb"' % num_posts - + print 'Found %d posts with tag "mongodb"' % num_posts + From fa39789bac7e2e76280f17832e517a8cd378f48d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Mon, 18 Jul 2011 12:44:28 -0300 Subject: [PATCH 171/214] added SequenceField --- mongoengine/fields.py | 33 ++++++++++++++++++++++++++++++++- tests/fields.py | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 69 insertions(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 88040115..a89ec3e4 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -20,7 +20,8 @@ __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', 'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', 'DecimalField', 'ComplexDateTimeField', 'URLField', 'GenericReferenceField', 'FileField', 'BinaryField', - 'SortedListField', 'EmailField', 'GeoPointField'] + 'SortedListField', 'EmailField', 'GeoPointField', + 'SequenceField'] RECURSIVE_REFERENCE_CONSTANT = 'self' @@ -876,3 +877,33 @@ class GeoPointField(BaseField): if (not isinstance(value[0], (float, int)) and not isinstance(value[1], (float, int))): raise ValidationError('Both values in point must be float or int.') + + +class SequenceField(IntField): + def generate_new_value(self): + """ + Generate and Increment counter + """ + sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(), + self.name) + collection = _get_db()['mongoengine.counters'] + counter = collection.find_and_modify(query={"_id": sequence_id}, + update={"$inc" : {"next": 1}}, + new=True, + upsert=True) + return counter['next'] + + def __get__(self, instance, owner): + if not instance._data: + return + + if instance is None: + return self + + value = instance._data.get(self.name) + + if not value: + value = self.generate_new_value() + instance._data[self.name] = value + + return value diff --git a/tests/fields.py b/tests/fields.py index 7a752998..2ceda7df 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -1380,5 +1380,42 @@ class FieldTest(unittest.TestCase): self.assertEqual(d2.data2, {}) + def test_sequence_field(self): + class Person(Document): + id = SequenceField(primary_key=True) + + self.db['mongoengine.counters'].drop() + Person.drop_collection() + p = Person() + p.save() + + p = Person.objects.first() + self.assertEqual(p.id, 1) + + def test_multiple_sequence_field(self): + class Person(Document): + id = SequenceField(primary_key=True) + name = StringField() + + self.db['mongoengine.counters'].drop() + Person.drop_collection() + + for x in xrange(10): + p = Person(name="Person %s" % x) + p.save() + + ids = [i.id for i in Person.objects] + self.assertEqual(ids, range(1, 11)) + + for x in xrange(10): + p = Person(name="Person %s" % x) + p.save() + + ids = [i.id for i in Person.objects] + self.assertEqual(ids, range(1, 21)) + + counter = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) + self.assertEqual(counter['next'], 20) + if __name__ == '__main__': unittest.main() From cb324595ef67c6a7e826aa738e23a9c37d4f41e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Tue, 19 Jul 2011 07:36:35 -0300 Subject: [PATCH 172/214] fixerrors --- mongoengine/base.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 6b11d233..04e13cfc 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -382,6 +382,7 @@ class DocumentMetaclass(type): doc_fields[attr_name] = attr_value attrs['_fields'] = doc_fields attrs['_db_field_map'] = dict([(k, v.db_field) for k, v in doc_fields.items()]) + attrs['_reverse_db_field_map'] = dict([(v.db_field, k) for k, v in doc_fields.items()]) new_class = super_new(cls, name, bases, attrs) for field in new_class._fields.values(): @@ -763,17 +764,22 @@ class BaseDocument(object): else: # Perform a full lookup for lists / embedded lookups d = self parts = path.split('.') - field_name = parts.pop() + db_field_name = parts.pop() for p in parts: if p.isdigit(): d = d[int(p)] elif hasattr(d, '__getattribute__'): - d = getattr(d, p) + real_path = d._reverse_db_field_map.get(p, p) + d = getattr(d, real_path) else: d = d.get(p) + if hasattr(d, '_fields'): + field_name = d._reverse_db_field_map.get(db_field_name, + db_field_name) + default = d._fields[field_name].default - + if default is not None: if callable(default): default = default() From 5834fa840c867cf2c02fd66b0fcfd884dd2f482a Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 19 Jul 2011 16:51:26 +0100 Subject: [PATCH 173/214] Tweaked SequenceField so that it doesn't increment on creation. [refs #238] --- AUTHORS | 1 + docs/changelog.rst | 1 + mongoengine/base.py | 9 ++--- mongoengine/fields.py | 36 ++++++++++++++------ tests/fields.py | 79 +++++++++++++++++++++++++++++++++---------- 5 files changed, 95 insertions(+), 31 deletions(-) diff --git a/AUTHORS b/AUTHORS index aecdcaa9..b13af2b0 100644 --- a/AUTHORS +++ b/AUTHORS @@ -4,3 +4,4 @@ Deepak Thukral Florian Schlachter Steve Challis Ross Lawley +Wilson Júnior diff --git a/docs/changelog.rst b/docs/changelog.rst index 246f9177..e2ecceeb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added SequenceField - for creating sequential counters - Added update() convenience method to a document - Added cascading saves - so changes to Referenced documents are saved on .save() - Added select_related() support diff --git a/mongoengine/base.py b/mongoengine/base.py index 04e13cfc..07f53c30 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -587,7 +587,8 @@ class BaseDocument(object): # Set any get_fieldname_display methods self.__set_field_display() - + # Flag initialised + self._initialised = True signals.post_init.send(self.__class__, document=self) def validate(self): @@ -773,13 +774,13 @@ class BaseDocument(object): d = getattr(d, real_path) else: d = d.get(p) - + if hasattr(d, '_fields'): field_name = d._reverse_db_field_map.get(db_field_name, db_field_name) - + default = d._fields[field_name].default - + if default is not None: if callable(default): default = default() diff --git a/mongoengine/fields.py b/mongoengine/fields.py index a89ec3e4..3234160d 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -880,30 +880,46 @@ class GeoPointField(BaseField): class SequenceField(IntField): + """Provides a sequental counter. + + ..note:: Although traditional databases often use increasing sequence + numbers for primary keys. In MongoDB, the preferred approach is to + use Object IDs instead. The concept is that in a very large + cluster of machines, it is easier to create an object ID than have + global, uniformly increasing sequence numbers. + + .. versionadded:: 0.5 + """ + def __init__(self, collection_name=None, *args, **kwargs): + self.collection_name = collection_name or 'mongoengine.counters' + return super(SequenceField, self).__init__(*args, **kwargs) + def generate_new_value(self): """ - Generate and Increment counter + Generate and Increment the counter """ sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(), self.name) - collection = _get_db()['mongoengine.counters'] + collection = _get_db()[self.collection_name] counter = collection.find_and_modify(query={"_id": sequence_id}, - update={"$inc" : {"next": 1}}, + update={"$inc": {"next": 1}}, new=True, upsert=True) return counter['next'] def __get__(self, instance, owner): - if not instance._data: - return - + if instance is None: return self - + if not instance._data: + return value = instance._data.get(self.name) - - if not value: + if not value and instance._initialised: value = self.generate_new_value() instance._data[self.name] = value - + return value + + def to_python(self, value): + if value is None: + value = self.generate_new_value() return value diff --git a/tests/fields.py b/tests/fields.py index 2ceda7df..1f070ae1 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -1379,20 +1379,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(d2.data, {}) self.assertEqual(d2.data2, {}) - def test_sequence_field(self): - class Person(Document): - id = SequenceField(primary_key=True) - - self.db['mongoengine.counters'].drop() - Person.drop_collection() - p = Person() - p.save() - - p = Person.objects.first() - self.assertEqual(p.id, 1) - - def test_multiple_sequence_field(self): class Person(Document): id = SequenceField(primary_key=True) name = StringField() @@ -1404,18 +1391,76 @@ class FieldTest(unittest.TestCase): p = Person(name="Person %s" % x) p.save() + c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) + self.assertEqual(c['next'], 10) + ids = [i.id for i in Person.objects] self.assertEqual(ids, range(1, 11)) + c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) + self.assertEqual(c['next'], 10) + + def test_multiple_sequence_fields(self): + class Person(Document): + id = SequenceField(primary_key=True) + counter = SequenceField() + name = StringField() + + self.db['mongoengine.counters'].drop() + Person.drop_collection() + for x in xrange(10): p = Person(name="Person %s" % x) p.save() - ids = [i.id for i in Person.objects] - self.assertEqual(ids, range(1, 21)) + c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) + self.assertEqual(c['next'], 10) + + ids = [i.id for i in Person.objects] + self.assertEqual(ids, range(1, 11)) + + counters = [i.counter for i in Person.objects] + self.assertEqual(counters, range(1, 11)) + + c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) + self.assertEqual(c['next'], 10) + + def test_multiple_sequence_fields_on_docs(self): + + class Animal(Document): + id = SequenceField(primary_key=True) + + class Person(Document): + id = SequenceField(primary_key=True) + + self.db['mongoengine.counters'].drop() + Animal.drop_collection() + Person.drop_collection() + + for x in xrange(10): + a = Animal(name="Animal %s" % x) + a.save() + p = Person(name="Person %s" % x) + p.save() + + c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) + self.assertEqual(c['next'], 10) + + c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) + self.assertEqual(c['next'], 10) + + ids = [i.id for i in Person.objects] + self.assertEqual(ids, range(1, 11)) + + id = [i.id for i in Animal.objects] + self.assertEqual(id, range(1, 11)) + + c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) + self.assertEqual(c['next'], 10) + + c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) + self.assertEqual(c['next'], 10) - counter = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(counter['next'], 20) if __name__ == '__main__': unittest.main() From 49764b51dc69b33857efce105d082bd80df2d97a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Tue, 19 Jul 2011 14:43:32 -0300 Subject: [PATCH 174/214] tweaks for _db_field_map --- mongoengine/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 07f53c30..b88a2b89 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -381,8 +381,8 @@ class DocumentMetaclass(type): attr_value.db_field = attr_name doc_fields[attr_name] = attr_value attrs['_fields'] = doc_fields - attrs['_db_field_map'] = dict([(k, v.db_field) for k, v in doc_fields.items()]) - attrs['_reverse_db_field_map'] = dict([(v.db_field, k) for k, v in doc_fields.items()]) + attrs['_db_field_map'] = dict([(k, v.db_field) for k, v in doc_fields.items() if k!=v]) + attrs['_reverse_db_field_map'] = dict([(v, k) for k, v in attrs['_db_field_map'].items()]) new_class = super_new(cls, name, bases, attrs) for field in new_class._fields.values(): From 273412fda183fc8c516ef681751a5b353de9db55 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Tue, 19 Jul 2011 14:48:38 -0300 Subject: [PATCH 175/214] tweaks for _db_field_map --- mongoengine/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index b88a2b89..525b8bc8 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -381,7 +381,7 @@ class DocumentMetaclass(type): attr_value.db_field = attr_name doc_fields[attr_name] = attr_value attrs['_fields'] = doc_fields - attrs['_db_field_map'] = dict([(k, v.db_field) for k, v in doc_fields.items() if k!=v]) + attrs['_db_field_map'] = dict([(k, v.db_field) for k, v in doc_fields.items() if k!=v.db_field]) attrs['_reverse_db_field_map'] = dict([(v, k) for k, v in attrs['_db_field_map'].items()]) new_class = super_new(cls, name, bases, attrs) From 0d1804461dd302ceb4a04a5e4e0e0b545b86b3c1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 19 Jul 2011 22:12:09 +0100 Subject: [PATCH 176/214] Updated handling setting of object managers and inheritance --- mongoengine/base.py | 13 ++++++++---- tests/queryset.py | 50 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 4 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 07f53c30..18ee9134 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -447,7 +447,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): # Subclassed documents inherit collection from superclass for base in bases: if hasattr(base, '_meta'): - if 'collection' in attrs.get('meta', {}) and not base._meta.get('abstract', False): import warnings msg = "Trying to set a collection on a subclass (%s)" % name @@ -465,14 +464,20 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): # Propagate 'allow_inheritance' if 'allow_inheritance' in base._meta: base_meta['allow_inheritance'] = base._meta['allow_inheritance'] + if 'queryset_class' in base._meta: + base_meta['queryset_class'] = base._meta['queryset_class'] + try: + base_meta['objects'] = base.__getattribute__(base, 'objects') + except AttributeError: + pass meta = { 'abstract': False, 'collection': collection, 'max_documents': None, 'max_size': None, - 'ordering': [], # default ordering applied at runtime - 'indexes': [], # indexes to be ensured at runtime + 'ordering': [], # default ordering applied at runtime + 'indexes': [], # indexes to be ensured at runtime 'id_field': id_field, 'index_background': False, 'index_drop_dups': False, @@ -496,7 +501,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): new_class._meta['collection'] = collection(new_class) # Provide a default queryset unless one has been manually provided - manager = attrs.get('objects', QuerySetManager()) + manager = attrs.get('objects', meta.get('objects', QuerySetManager())) if hasattr(manager, 'queryset_class'): meta['queryset_class'] = manager.queryset_class new_class.objects = manager diff --git a/tests/queryset.py b/tests/queryset.py index 51c95112..a21bae69 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -2318,6 +2318,56 @@ class QuerySetTest(unittest.TestCase): Post.drop_collection() + def test_custom_querysets_inherited(self): + """Ensure that custom QuerySet classes may be used. + """ + + class CustomQuerySet(QuerySet): + def not_empty(self): + return len(self) > 0 + + class Base(Document): + meta = {'abstract': True, 'queryset_class': CustomQuerySet} + + class Post(Base): + pass + + Post.drop_collection() + self.assertTrue(isinstance(Post.objects, CustomQuerySet)) + self.assertFalse(Post.objects.not_empty()) + + Post().save() + self.assertTrue(Post.objects.not_empty()) + + Post.drop_collection() + + def test_custom_querysets_inherited_direct(self): + """Ensure that custom QuerySet classes may be used. + """ + + class CustomQuerySet(QuerySet): + def not_empty(self): + return len(self) > 0 + + class CustomQuerySetManager(QuerySetManager): + queryset_class = CustomQuerySet + + class Base(Document): + meta = {'abstract': True} + objects = CustomQuerySetManager() + + class Post(Base): + pass + + Post.drop_collection() + self.assertTrue(isinstance(Post.objects, CustomQuerySet)) + self.assertFalse(Post.objects.not_empty()) + + Post().save() + self.assertTrue(Post.objects.not_empty()) + + Post.drop_collection() + def test_call_after_limits_set(self): """Ensure that re-filtering after slicing works """ From 72aa191e70a8b5006e30a9b41f59e3108fe124fb Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 20 Jul 2011 11:58:13 +0100 Subject: [PATCH 177/214] Stop abstract classes being used in the document_registry --- mongoengine/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 18ee9134..02433874 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -339,7 +339,6 @@ class DocumentMetaclass(type): # Include all fields present in superclasses if hasattr(base, '_fields'): doc_fields.update(base._fields) - class_name.append(base._class_name) # Get superclasses from superclass superclasses[base._class_name] = base superclasses.update(base._superclasses) @@ -351,6 +350,7 @@ class DocumentMetaclass(type): # Ensure that the Document class may be subclassed - # inheritance may be disabled to remove dependency on # additional fields _cls and _types + class_name.append(base._class_name) if base._meta.get('allow_inheritance', True) == False: raise ValueError('Document %s may not be subclassed' % base.__name__) From 13afead9fb43ff6c1150bd21a5084b260116596a Mon Sep 17 00:00:00 2001 From: Dan Crosta Date: Wed, 20 Jul 2011 12:41:20 -0400 Subject: [PATCH 178/214] add where() method to QuerySet --- mongoengine/queryset.py | 5 +++++ tests/queryset.py | 28 ++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 11c7a804..b1185ee0 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1397,6 +1397,11 @@ class QuerySet(object): db = _get_db() return db.eval(code, *fields) + def where(self, where_clause): + where_clause = self._sub_js_fields(where_clause) + self._where_clause = where_clause + return self + def sum(self, field): """Sum over the values of the specified field. diff --git a/tests/queryset.py b/tests/queryset.py index a21bae69..ce64a004 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -2502,6 +2502,34 @@ class QuerySetTest(unittest.TestCase): for key, value in info.iteritems()] self.assertTrue(([('_types', 1), ('message', 1)], False, False) in info) + def test_where(self): + """Ensure that where clauses work. + """ + + class IntPair(Document): + fielda = IntField() + fieldb = IntField() + + IntPair.objects._collection.remove() + + a = IntPair(fielda=1, fieldb=1) + b = IntPair(fielda=1, fieldb=2) + c = IntPair(fielda=2, fieldb=1) + a.save() + b.save() + c.save() + + query = IntPair.objects.where('this[~fielda] >= this[~fieldb]') + self.assertEqual('this["fielda"] >= this["fieldb"]', query._where_clause) + results = list(query) + self.assertEqual(2, len(results)) + self.assertTrue(a in results) + self.assertTrue(c in results) + + query = IntPair.objects.where('this[~fielda] == this[~fieldb]') + results = list(query) + self.assertEqual(1, len(results)) + self.assertTrue(a in results) class QTest(unittest.TestCase): From ac72722e57fb376dba9e67391e98d8def5b17d60 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 22 Jul 2011 13:51:11 +0100 Subject: [PATCH 179/214] Fixing bug setting a value that equates to false --- mongoengine/base.py | 6 +++--- tests/document.py | 20 ++++++++++++++++++++ 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 02433874..909ed6cd 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -762,7 +762,7 @@ class BaseDocument(object): if value: continue - # If we've set a value that ain't the default value unset it. + # If we've set a value that ain't the default value dont unset it. default = None if path in self._fields: @@ -789,8 +789,8 @@ class BaseDocument(object): if default is not None: if callable(default): default = default() - if default != value: - continue + if default != value: + continue del(set_data[path]) unset_data[path] = 1 diff --git a/tests/document.py b/tests/document.py index 146681bf..5789e204 100644 --- a/tests/document.py +++ b/tests/document.py @@ -1048,6 +1048,26 @@ class DocumentTest(unittest.TestCase): except ValidationError: self.fail() + def test_save_to_a_value_that_equates_to_false(self): + + class Thing(EmbeddedDocument): + count = IntField() + + class User(Document): + thing = EmbeddedDocumentField(Thing) + + User.drop_collection() + + user = User(thing=Thing(count=1)) + user.save() + user.reload() + + user.thing.count = 0 + user.save() + + user.reload() + self.assertEquals(user.thing.count, 0) + def test_save_max_recursion_not_hit(self): class Person(Document): From 130fb9916d21c8fb14ae2a31be7898f529aa549c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Fri, 22 Jul 2011 10:19:41 -0300 Subject: [PATCH 180/214] fixes for SequenceField --- mongoengine/base.py | 1 + mongoengine/fields.py | 12 ++++++++++++ tests/fields.py | 26 ++++++++++++++++++++++++++ 3 files changed, 39 insertions(+) diff --git a/mongoengine/base.py b/mongoengine/base.py index 565bf6ba..79851da9 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -577,6 +577,7 @@ class BaseDocument(object): signals.pre_init.send(self.__class__, document=self, values=values) self._data = {} + self._initialised = False # Assign default values to instance for attr_name, field in self._fields.items(): value = getattr(self, attr_name, None) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 3234160d..b2f1e2a2 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -911,14 +911,26 @@ class SequenceField(IntField): if instance is None: return self + if not instance._data: return + value = instance._data.get(self.name) + if not value and instance._initialised: value = self.generate_new_value() instance._data[self.name] = value + instance._mark_as_changed(self.name) + return value + def __set__(self, instance, value): + + if value is None and instance._initialised: + value = self.generate_new_value() + + return super(SequenceField, self).__set__(instance, value) + def to_python(self, value): if value is None: value = self.generate_new_value() diff --git a/tests/fields.py b/tests/fields.py index 1f070ae1..f8aeb86c 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -1425,6 +1425,32 @@ class FieldTest(unittest.TestCase): c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) + def test_sequence_fields_reload(self): + class Animal(Document): + counter = SequenceField() + type = StringField() + + self.db['mongoengine.counters'].drop() + Animal.drop_collection() + + a = Animal(type="Boi") + a.save() + + self.assertEqual(a.counter, 1) + a.reload() + self.assertEqual(a.counter, 1) + + a.counter = None + self.assertEqual(a.counter, 2) + a.save() + + self.assertEqual(a.counter, 2) + + a = Animal.objects.first() + self.assertEqual(a.counter, 2) + a.reload() + self.assertEqual(a.counter, 2) + def test_multiple_sequence_fields_on_docs(self): class Animal(Document): From 6471c6e133cbca62983b3f785191457732daa3c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Wed, 27 Jul 2011 08:45:15 -0300 Subject: [PATCH 181/214] added GenericEmbeddedDocumentField --- mongoengine/fields.py | 28 +++++++++++++++++++++++++++- tests/fields.py | 25 +++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index b2f1e2a2..7d57d78f 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -21,7 +21,7 @@ __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', 'DecimalField', 'ComplexDateTimeField', 'URLField', 'GenericReferenceField', 'FileField', 'BinaryField', 'SortedListField', 'EmailField', 'GeoPointField', - 'SequenceField'] + 'SequenceField', 'GenericEmbeddedDocumentField'] RECURSIVE_REFERENCE_CONSTANT = 'self' @@ -420,6 +420,32 @@ class EmbeddedDocumentField(BaseField): def prepare_query_value(self, op, value): return self.to_mongo(value) +class GenericEmbeddedDocumentField(BaseField): + def prepare_query_value(self, op, value): + return self.to_mongo(value) + + def to_python(self, value): + if isinstance(value, dict): + doc_cls = get_document(value['_cls']) + value = doc_cls._from_son(value) + + return value + + def validate(self, value): + if not isinstance(value, EmbeddedDocument): + raise ValidationError('Invalid embedded document instance ' + 'provided to an GenericEmbeddedDocumentField') + + value.validate() + + def to_mongo(self, document): + if document is None: + return None + + data = document.to_mongo() + if not '_cls' in data: + data['_cls'] = document._class_name + return data class ListField(ComplexBaseField): """A list field that wraps a standard field, allowing multiple instances diff --git a/tests/fields.py b/tests/fields.py index f8aeb86c..960e55c9 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -1488,5 +1488,30 @@ class FieldTest(unittest.TestCase): self.assertEqual(c['next'], 10) + def test_generic_embedded_document(self): + class Car(EmbeddedDocument): + name = StringField() + + class Dish(EmbeddedDocument): + food = StringField(required=True) + number = IntField() + + class Person(Document): + name = StringField() + like = GenericEmbeddedDocumentField() + + person = Person(name='Test User') + person.like = Car(name='Fiat') + person.save() + + person = Person.objects.first() + self.assertTrue(isinstance(person.like, Car)) + + person.like = Dish(food="arroz", number=15) + person.save() + + person = Person.objects.first() + self.assertTrue(isinstance(person.like, Dish)) + if __name__ == '__main__': unittest.main() From 3f3f93b0fa07d17960d5670d6783b7896612771c Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 29 Jul 2011 15:48:29 +0100 Subject: [PATCH 182/214] Fixing delta bug for dict fields --- docs/changelog.rst | 2 +- mongoengine/base.py | 13 +++++++++---- tests/document.py | 15 ++++++++++++++- 3 files changed, 24 insertions(+), 6 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index e2ecceeb..f3a4b944 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -53,7 +53,7 @@ Changes in dev - Added reverse delete rules - Fixed issue with unset operation - Fixed Q-object bug -- Added ``QuerySet.all_fields`` resets previous .only() and .exlude() +- Added ``QuerySet.all_fields`` resets previous .only() and .exclude() - Added ``QuerySet.exclude`` - Added django style choices - Fixed order and filter issue diff --git a/mongoengine/base.py b/mongoengine/base.py index ea0f98a4..e224367c 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -721,12 +721,18 @@ class BaseDocument(object): field = getattr(self, field_name, None) if isinstance(field, EmbeddedDocument) and db_field_name not in _changed_fields: # Grab all embedded fields that have been changed _changed_fields += ["%s%s" % (key, k) for k in field._get_changed_fields(key) if k] - elif isinstance(field, (list, tuple)) and db_field_name not in _changed_fields: # Loop list fields as they contain documents - for index, value in enumerate(field): + elif isinstance(field, (list, tuple, dict)) and db_field_name not in _changed_fields: # Loop list / dict fields as they contain documents + # Determine the iterator to use + if not hasattr(field, 'items'): + iterator = enumerate(field) + else: + iterator = field.iteritems() + for index, value in iterator: if not hasattr(value, '_get_changed_fields'): continue list_key = "%s%s." % (key, index) _changed_fields += ["%s%s" % (list_key, k) for k in value._get_changed_fields(list_key) if k] + return _changed_fields def _delta(self): @@ -736,7 +742,6 @@ class BaseDocument(object): # Handles cases where not loaded from_son but has _id doc = self.to_mongo() set_fields = self._get_changed_fields() - set_data = {} unset_data = {} if hasattr(self, '_changed_fields'): @@ -775,7 +780,7 @@ class BaseDocument(object): for p in parts: if p.isdigit(): d = d[int(p)] - elif hasattr(d, '__getattribute__'): + elif hasattr(d, '__getattribute__') and not isinstance(d, dict): real_path = d._reverse_db_field_map.get(p, p) d = getattr(d, real_path) else: diff --git a/tests/document.py b/tests/document.py index 5789e204..1c9b90ed 100644 --- a/tests/document.py +++ b/tests/document.py @@ -1504,6 +1504,18 @@ class DocumentTest(unittest.TestCase): del(doc.embedded_field.list_field[2].list_field) self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) + doc.save() + doc.reload() + + doc.dict_field['Embedded'] = embedded_1 + doc.save() + doc.reload() + + doc.dict_field['Embedded'].string_field = 'Hello World' + self.assertEquals(doc._get_changed_fields(), ['dict_field.Embedded.string_field']) + self.assertEquals(doc._delta(), ({'dict_field.Embedded.string_field': 'Hello World'}, {})) + + def test_delta_db_field(self): class Doc(Document): @@ -1795,7 +1807,8 @@ class DocumentTest(unittest.TestCase): person.save() person = self.Person.objects.get() - self.assertTrue(person.comments_dict['first_post'].published) + self.assertFalse(person.comments_dict['first_post'].published) + def test_delete(self): """Ensure that document may be deleted using the delete method. """ From 7913ed1841abc7776b5efbb8362da3a76b1c35cf Mon Sep 17 00:00:00 2001 From: Slavi Pantaleev Date: Sat, 30 Jul 2011 00:52:37 +0300 Subject: [PATCH 183/214] Prevent double saving when doing a forced insert. When doing save(force_insert=True) on a document missing an _id field, the document was first getting inserted and then being saved a second time. Also refactatored the code a bit to make the intent (insert/update/delta-update) cleaner, especially since the `created` variable name was so confusing. --- mongoengine/document.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/mongoengine/document.py b/mongoengine/document.py index c41303d8..bd2bbda4 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -143,13 +143,15 @@ class Document(BaseDocument): doc = self.to_mongo() - created = '_id' not in doc + created = '_id' in doc + creation_mode = force_insert or not created try: collection = self.__class__.objects._collection - if force_insert: - object_id = collection.insert(doc, safe=safe, **write_options) - if created: - object_id = collection.save(doc, safe=safe, **write_options) + if creation_mode: + if force_insert: + object_id = collection.insert(doc, safe=safe, **write_options) + else: + object_id = collection.save(doc, safe=safe, **write_options) else: object_id = doc['_id'] updates, removals = self._delta() @@ -191,7 +193,7 @@ class Document(BaseDocument): reset_changed_fields(field, inspected_docs) reset_changed_fields(self) - signals.post_save.send(self.__class__, document=self, created=created) + signals.post_save.send(self.__class__, document=self, created=creation_mode) def update(self, **kwargs): """Performs an update on the :class:`~mongoengine.Document` From 376ca717fa53bd010a834d1fbd30d535ec018529 Mon Sep 17 00:00:00 2001 From: John Arnfield Date: Sat, 30 Jul 2011 22:01:24 +0100 Subject: [PATCH 184/214] Added support for within_polygon for spatial queries --- mongoengine/queryset.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 0af8dead..5ceeea9d 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -481,7 +481,7 @@ class QuerySet(object): """ operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', 'all', 'size', 'exists', 'not'] - geo_operators = ['within_distance', 'within_spherical_distance', 'within_box', 'near', 'near_sphere'] + geo_operators = ['within_distance', 'within_spherical_distance', 'within_box', 'within_polygon', 'near', 'near_sphere'] match_operators = ['contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'exact', 'iexact'] @@ -527,6 +527,8 @@ class QuerySet(object): value = {'$within': {'$center': value}} elif op == "within_spherical_distance": value = {'$within': {'$centerSphere': value}} + elif op == "within_polygon": + value = {'$within': {'$polygon': value}} elif op == "near": value = {'$near': value} elif op == "near_sphere": From 63ee4fef1a5c81f453a69f355bcaddc61c5eff6a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Fri, 5 Aug 2011 11:03:47 -0300 Subject: [PATCH 185/214] Translations for django/auth.py --- mongoengine/django/auth.py | 37 +++++++++++++++++++++++++++---------- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index 92424909..38370cc5 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -3,6 +3,7 @@ from mongoengine import * from django.utils.hashcompat import md5_constructor, sha_constructor from django.utils.encoding import smart_str from django.contrib.auth.models import AnonymousUser +from django.utils.translation import ugettext_lazy as _ import datetime @@ -21,16 +22,32 @@ class User(Document): """A User document that aims to mirror most of the API specified by Django at http://docs.djangoproject.com/en/dev/topics/auth/#users """ - username = StringField(max_length=30, required=True) - first_name = StringField(max_length=30) - last_name = StringField(max_length=30) - email = StringField() - password = StringField(max_length=128) - is_staff = BooleanField(default=False) - is_active = BooleanField(default=True) - is_superuser = BooleanField(default=False) - last_login = DateTimeField(default=datetime.datetime.now) - date_joined = DateTimeField(default=datetime.datetime.now) + username = StringField(max_length=30, required=True, + verbose_name=_('username'), + help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters")) + + first_name = StringField(max_length=30, + verbose_name=_('first name')) + + last_name = StringField(max_length=30, + verbose_name=_('last name')) + email = EmailField(verbose_name=_('e-mail address')) + password = StringField(max_length=128, + verbose_name=_('password'), + help_text=_("Use '[algo]$[salt]$[hexdigest]' or use the change password form.")) + is_staff = BooleanField(default=False, + verbose_name=_('staff status'), + help_text=_("Designates whether the user can log into this admin site.")) + is_active = BooleanField(default=True, + verbose_name=_('active'), + help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts.")) + is_superuser = BooleanField(default=False, + verbose_name=_('superuser status'), + help_text=_("Designates that this user has all permissions without explicitly assigning them.")) + last_login = DateTimeField(default=datetime.datetime.now, + verbose_name=_('last login')) + date_joined = DateTimeField(default=datetime.datetime.now, + verbose_name=_('date joined')) meta = { 'indexes': [ From 331f8b8ae7ef31badb0db3ddf4b7e843406ea807 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Tue, 9 Aug 2011 14:31:26 -0300 Subject: [PATCH 186/214] fixes dereference for documents (allow_inheritance = False) --- mongoengine/dereference.py | 18 +++++++++++++++--- tests/document.py | 25 +++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 3 deletions(-) diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 6bfabd94..7fe9ba2f 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -3,6 +3,7 @@ import operator import pymongo from base import BaseDict, BaseList, get_document, TopLevelDocumentMetaclass +from fields import ReferenceField from connection import _get_db from queryset import QuerySet from document import Document @@ -32,8 +33,16 @@ class DeReference(object): items = [i for i in items] self.max_depth = max_depth + + doc_type = None + if instance and instance._fields: + doc_type = instance._fields[name].field + + if isinstance(doc_type, ReferenceField): + doc_type = doc_type.document_type + self.reference_map = self._find_references(items) - self.object_map = self._fetch_objects() + self.object_map = self._fetch_objects(doc_type=doc_type) return self._attach_objects(items, 0, instance, name, get) def _find_references(self, items, depth=0): @@ -80,7 +89,7 @@ class DeReference(object): depth += 1 return reference_map - def _fetch_objects(self): + def _fetch_objects(self, doc_type=None): """Fetch all references and convert to their document objects """ object_map = {} @@ -94,7 +103,10 @@ class DeReference(object): else: # Generic reference: use the refs data to convert to document references = _get_db()[col].find({'_id': {'$in': refs}}) for ref in references: - doc = get_document(ref['_cls'])._from_son(ref) + if '_cls' in ref: + doc = get_document(ref['_cls'])._from_son(ref) + else: + doc = doc_type._from_son(ref) object_map[doc.id] = doc return object_map diff --git a/tests/document.py b/tests/document.py index 1c9b90ed..90a0bc25 100644 --- a/tests/document.py +++ b/tests/document.py @@ -289,6 +289,31 @@ class DocumentTest(unittest.TestCase): Zoo.drop_collection() Animal.drop_collection() + def test_reference_inheritance(self): + class Stats(Document): + created = DateTimeField(default=datetime.now) + + meta = {'allow_inheritance': False} + + class CompareStats(Document): + generated = DateTimeField(default=datetime.now) + stats = ListField(ReferenceField(Stats)) + + Stats.drop_collection() + CompareStats.drop_collection() + + list_stats = [] + + for i in xrange(10): + s = Stats() + s.save() + list_stats.append(s) + + cmp_stats = CompareStats(stats=list_stats) + cmp_stats.save() + + self.assertEqual(list_stats, CompareStats.objects.first().stats) + def test_inheritance(self): """Ensure that document may inherit fields from a superclass document. """ From 4abfcb0188257abb7955efec24e54a4ea3d12e7d Mon Sep 17 00:00:00 2001 From: Gareth Lloyd Date: Mon, 15 Aug 2011 10:01:48 +0100 Subject: [PATCH 187/214] check for presence of _geo_indices on field class before referencing --- mongoengine/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index e224367c..6be5c3de 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -812,7 +812,8 @@ class BaseDocument(object): field_cls = field.document_type if field_cls in inspected_classes: continue - geo_indices += field_cls._geo_indices(inspected_classes) + if hasattr(field_cls, '_geo_indices'): + geo_indices += field_cls._geo_indices(inspected_classes) elif field._geo_index: geo_indices.append(field) return geo_indices From 81b69648efaf01e64aefe0260aac78cb8147d407 Mon Sep 17 00:00:00 2001 From: Dan Crosta Date: Mon, 15 Aug 2011 16:56:42 -0400 Subject: [PATCH 188/214] docstring for `where()` --- mongoengine/queryset.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index b1185ee0..c8945142 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1398,6 +1398,10 @@ class QuerySet(object): return db.eval(code, *fields) def where(self, where_clause): + """Filter ``QuerySet`` results with a ``$where`` clause (a Javascript + expression). Performs automatic field name substitution like + :meth:`mongoengine.queryset.Queryset.exec_js`. + """ where_clause = self._sub_js_fields(where_clause) self._where_clause = where_clause return self From 3f301f6b0f5c56b731a37ae82442a5e90315972e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 16 Aug 2011 10:32:21 +0100 Subject: [PATCH 189/214] Finishing touches to where implementation - thanks to dcrosta Refs #242 --- AUTHORS | 1 + docs/changelog.rst | 1 + mongoengine/fields.py | 4 ++-- mongoengine/queryset.py | 4 ++++ tests/queryset.py | 13 +++++++++++++ 5 files changed, 21 insertions(+), 2 deletions(-) diff --git a/AUTHORS b/AUTHORS index b13af2b0..fbf78cf6 100644 --- a/AUTHORS +++ b/AUTHORS @@ -5,3 +5,4 @@ Florian Schlachter Steve Challis Ross Lawley Wilson Júnior +Dan Crosta https://github.com/dcrosta diff --git a/docs/changelog.rst b/docs/changelog.rst index f3a4b944..87247d57 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added where() - filter to allowing users to specify query expressions as Javascript - Added SequenceField - for creating sequential counters - Added update() convenience method to a document - Added cascading saves - so changes to Referenced documents are saved on .save() diff --git a/mongoengine/fields.py b/mongoengine/fields.py index b2f1e2a2..619b8c60 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -620,7 +620,7 @@ class GenericReferenceField(BaseField): """A reference to *any* :class:`~mongoengine.document.Document` subclass that will be automatically dereferenced on access (lazily). - note: Any documents used as a generic reference must be registered in the + ..note :: Any documents used as a generic reference must be registered in the document registry. Importing the model will automatically register it. .. versionadded:: 0.3 @@ -925,7 +925,7 @@ class SequenceField(IntField): return value def __set__(self, instance, value): - + if value is None and instance._initialised: value = self.generate_new_value() diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index c8945142..303fcc1b 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1401,6 +1401,10 @@ class QuerySet(object): """Filter ``QuerySet`` results with a ``$where`` clause (a Javascript expression). Performs automatic field name substitution like :meth:`mongoengine.queryset.Queryset.exec_js`. + + .. note:: When using this mode of query, the database will call your + function, or evaluate your predicate clause, for each object + in the collection. """ where_clause = self._sub_js_fields(where_clause) self._where_clause = where_clause diff --git a/tests/queryset.py b/tests/queryset.py index ce64a004..6ae1c10f 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -2531,6 +2531,19 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(1, len(results)) self.assertTrue(a in results) + query = IntPair.objects.where('function() { return this[~fielda] >= this[~fieldb] }') + self.assertEqual('function() { return this["fielda"] >= this["fieldb"] }', query._where_clause) + results = list(query) + self.assertEqual(2, len(results)) + self.assertTrue(a in results) + self.assertTrue(c in results) + + def invalid_where(): + list(IntPair.objects.where(fielda__gte=3)) + + self.assertRaises(TypeError, invalid_where) + + class QTest(unittest.TestCase): def setUp(self): From 8bdb42827c3433d69757325113b7f14fe22509d9 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 16 Aug 2011 11:33:41 +0100 Subject: [PATCH 190/214] Updated AUTHORS Thanks to all those that have contributed to MongoEngine --- AUTHORS | 60 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/AUTHORS b/AUTHORS index fbf78cf6..ed022c2e 100644 --- a/AUTHORS +++ b/AUTHORS @@ -1,3 +1,5 @@ +The PRIMARY AUTHORS are (and/or have been): + Harry Marr Matt Dennewitz Deepak Thukral @@ -6,3 +8,61 @@ Steve Challis Ross Lawley Wilson Júnior Dan Crosta https://github.com/dcrosta + +CONTRIBUTORS + +Dervived from the git logs, inevitably incomplete but all of whom and others +have submitted patches, reported bugs and generally helped make MongoEngine +that much better: + + * Harry Marr + * Ross Lawley + * blackbrrr + * Florian Schlachter + * Vincent Driessen + * Steve Challis + * flosch + * Deepak Thukral + * Colin Howe + * Wilson Júnior + * Alistair Roche + * Dan Crosta + * Viktor Kerkez + * Stephan Jaekel + * Rached Ben Mustapha + * Greg Turner + * Daniel Hasselrot + * Mircea Pasoi + * Matt Chisholm + * James Punteney + * TimothéePeignier + * Stuart Rackham + * Serge Matveenko + * Matt Dennewitz + * Don Spaulding + * Ales Zoulek + * sshwsfc + * sib + * Samuel Clay + * Nick Vlku + * martin + * Flavio Amieiro + * Анхбаяр Лхагвадорж + * Zak Johnson + * Victor Farazdagi + * vandersonmota + * Theo Julienne + * sp + * Slavi Pantaleev + * Richard Henry + * Nicolas Perriault + * Nick Vlku Jr + * Michael Henson + * Leo Honkanen + * kuno + * Josh Ourisman + * Jaime + * Igor Ivanov + * Gregg Lind + * Gareth Lloyd + * Albert Choi From 5f058434035121335c375d0e4d8b4d7447327144 Mon Sep 17 00:00:00 2001 From: Dan Crosta Date: Tue, 16 Aug 2011 08:20:06 -0400 Subject: [PATCH 191/214] prefer to use map-reduce to db.eval where possible --- mongoengine/queryset.py | 72 ++++++++++++++++++++++++++++------------- 1 file changed, 49 insertions(+), 23 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 303fcc1b..a2716089 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1416,16 +1416,26 @@ class QuerySet(object): :param field: the field to sum over; use dot-notation to refer to embedded document fields """ - sum_func = """ - function(sumField) { - var total = 0.0; - db[collection].find(query).forEach(function(doc) { - total += (doc[sumField] || 0.0); - }); - return total; + map_func = pymongo.code.Code(""" + function() { + emit(1, this[field] || 0); } - """ - return self.exec_js(sum_func, field) + """, scope={'field': field}) + + reduce_func = pymongo.code.Code(""" + function(key, values) { + var sum = 0; + for (var i in values) { + sum += values[i]; + } + return sum; + } + """) + + for result in self.map_reduce(map_func, reduce_func, output='inline'): + return result.value + else: + return 0 def average(self, field): """Average over the values of the specified field. @@ -1433,22 +1443,38 @@ class QuerySet(object): :param field: the field to average over; use dot-notation to refer to embedded document fields """ - average_func = """ - function(averageField) { - var total = 0.0; - var num = 0; - db[collection].find(query).forEach(function(doc) { - if (doc[averageField] !== undefined) { - total += doc[averageField]; - num += 1; - } - }); - return total / num; + map_func = pymongo.code.Code(""" + function() { + if (this.hasOwnProperty(field)) + emit(1, {t: this[field] || 0, c: 1}); } - """ - return self.exec_js(average_func, field) + """, scope={'field': field}) - def item_frequencies(self, field, normalize=False, map_reduce=False): + reduce_func = pymongo.code.Code(""" + function(key, values) { + var out = {t: 0, c: 0}; + for (var i in values) { + var value = values[i]; + out.t += value.t; + out.c += value.c; + } + return out; + } + """) + + finalize_func = pymongo.code.Code(""" + function(key, value) { + return value.t / value.c; + } + """) + + for result in self.map_reduce(map_func, reduce_func, finalize_f=finalize_func, output='inline'): + return result.value + else: + return 0 + + + def item_frequencies(self, field, normalize=False, map_reduce=True): """Returns a dictionary of all items present in a field across the whole queried set of documents, and their corresponding frequency. This is useful for generating tag clouds, or searching documents. From fd2e40d735126e788c688a296f8bf7bf45e34dc6 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 16 Aug 2011 15:24:37 +0100 Subject: [PATCH 192/214] Updated changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 87247d57..551d3b2d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Updated sum / average to use map_reduce as db.eval doesn't work in sharded environments - Added where() - filter to allowing users to specify query expressions as Javascript - Added SequenceField - for creating sequential counters - Added update() convenience method to a document From 2a8543b3b730974376324a27e5e46c33905a86fa Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 16 Aug 2011 15:26:11 +0100 Subject: [PATCH 193/214] Updated changelog --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 551d3b2d..787b9c91 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -30,7 +30,7 @@ Changes in dev - Added insert method for bulk inserts - Added blinker signal support - Added query_counter context manager for tests -- Added optional map_reduce method item_frequencies +- Added map_reduce method item_frequencies and set as default (as db.eval doesn't work in sharded environments) - Added inline_map_reduce option to map_reduce - Updated connection exception so it provides more info on the cause. - Added searching multiple levels deep in ``DictField`` From 3c8cbcfee757334b4e58606dd2e64d2082fa1e7e Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Tue, 16 Aug 2011 16:50:50 +0100 Subject: [PATCH 194/214] Added tests for showing how to set embedded document indexes refs #257 --- tests/document.py | 51 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/tests/document.py b/tests/document.py index 90a0bc25..6984ef3e 100644 --- a/tests/document.py +++ b/tests/document.py @@ -690,6 +690,57 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() + def test_embedded_document_index(self): + """Tests settings an index on an embedded document + """ + class Date(EmbeddedDocument): + year = IntField(db_field='yr') + + class BlogPost(Document): + title = StringField() + date = EmbeddedDocumentField(Date) + + meta = { + 'indexes': [ + '-date.year' + ], + } + + BlogPost.drop_collection() + + info = BlogPost.objects._collection.index_information() + self.assertEqual(info.keys(), ['_types_1_date.yr_-1', '_id_']) + BlogPost.drop_collection() + + def test_list_embedded_document_index(self): + """Ensure list embedded documents can be indexed + """ + class Tag(EmbeddedDocument): + name = StringField(db_field='tag') + + class BlogPost(Document): + title = StringField() + tags = ListField(EmbeddedDocumentField(Tag)) + + meta = { + 'indexes': [ + 'tags.name' + ], + } + + BlogPost.drop_collection() + + info = BlogPost.objects._collection.index_information() + # we don't use _types in with list fields by default + self.assertEqual(info.keys(), ['_id_', '_types_1', 'tags.tag_1']) + + post1 = BlogPost(title="Embedded Indexes tests in place", + tags=[Tag(name="about"), Tag(name="time")] + ) + post1.save() + BlogPost.drop_collection() + + def test_geo_indexes_recursion(self): class User(Document): From b76590dc011d69ef591db45b18e92dfe1ea74939 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Wed, 17 Aug 2011 09:32:04 -0300 Subject: [PATCH 195/214] more tests for embedded lists --- tests/document.py | 52 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/tests/document.py b/tests/document.py index 6984ef3e..cfeffb75 100644 --- a/tests/document.py +++ b/tests/document.py @@ -1945,6 +1945,58 @@ class DocumentTest(unittest.TestCase): BlogPost.drop_collection() + def test_list_search_by_embedded(self): + class User(Document): + username = StringField(required=True) + + meta = {'allow_inheritance': False} + + class Comment(EmbeddedDocument): + comment = StringField() + user = ReferenceField(User, + required=True) + + meta = {'allow_inheritance': False} + + class Page(Document): + comments = ListField(EmbeddedDocumentField(Comment)) + meta = {'allow_inheritance': False, + 'indexes': [ + {'fields': ['comments.user']} + ]} + + User.drop_collection() + Page.drop_collection() + + u1 = User(username="wilson") + u1.save() + + u2 = User(username="rozza") + u2.save() + + u3 = User(username="hmarr") + u3.save() + + p1 = Page(comments = [Comment(user=u1, comment="Its very good"), + Comment(user=u2, comment="Hello world"), + Comment(user=u3, comment="Ping Pong"), + Comment(user=u1, comment="I like a beer")]) + p1.save() + + p2 = Page(comments = [Comment(user=u1, comment="Its very good"), + Comment(user=u2, comment="Hello world")]) + p2.save() + + p3 = Page(comments = [Comment(user=u3, comment="Its very good")]) + p3.save() + + p4 = Page(comments = [Comment(user=u2, comment="Heavy Metal song")]) + p4.save() + + self.assertEqual([p1, p2], list(Page.objects.filter(comments__user=u1))) + self.assertEqual([p1, p2, p4], list(Page.objects.filter(comments__user=u2))) + self.assertEqual([p1, p3], list(Page.objects.filter(comments__user=u3))) + def test_save_embedded_document(self): """Ensure that a document with an embedded document field may be saved in the database. From 8071b23bff94a9818a4a8187896275dcd631a820 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 17 Aug 2011 14:17:06 +0100 Subject: [PATCH 196/214] Updated upgrade.rst --- docs/upgrade.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index f005e2e2..ef44b96a 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -7,6 +7,15 @@ Upgrading There have been the following backwards incompatibilities from 0.4 to 0.5: +# PyMongo / MongoDB + +map reduce now requires pymongo 1.11+ More methods now use map_reduce as db.eval +is not supported for sharding - the following have been changed: + + * sum + * average + * item_frequencies + #. Default collection naming. Previously it was just lowercase, its now much more pythonic and readable as its From ca3b004921fd9e69ab0a3d3b6b62a435826c26a5 Mon Sep 17 00:00:00 2001 From: John Arnfield Date: Wed, 17 Aug 2011 20:04:38 +0100 Subject: [PATCH 197/214] Added tests for polygon queries --- tests/queryset.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/queryset.py b/tests/queryset.py index 72623b89..778d9317 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -1355,6 +1355,26 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(events.count(), 1) self.assertEqual(events[0].id, event2.id) + # check that polygon works + polygon = [ + (41.912114,-87.694445), + (41.919395,-87.69084), + (41.927186,-87.681742), + (41.911731,-87.654276), + (41.898061,-87.656164), + ] + events = Event.objects(location__within_polygon=polygon) + self.assertEqual(events.count(), 1) + self.assertEqual(events[0].id, event1.id) + + polygon2 = [ + (54.033586,-1.742249), + (52.792797,-1.225891), + (53.389881,-4.40094) + ] + events = Event.objects(location__within_polygon=polygon2) + self.assertEqual(events.count(), 0) + Event.drop_collection() def test_spherical_geospatial_operators(self): From 88cb8f39638c99860b81cb299df9409d17a91624 Mon Sep 17 00:00:00 2001 From: John Arnfield Date: Wed, 17 Aug 2011 20:14:24 +0100 Subject: [PATCH 198/214] left some conflict markers in - oops --- mongoengine/queryset.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 397a1538..a715b57d 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -617,15 +617,9 @@ class QuerySet(object): """ operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', 'all', 'size', 'exists', 'not'] -<<<<<<< HEAD - geo_operators = ['within_distance', 'within_spherical_distance', 'within_box', 'near', 'near_sphere'] + geo_operators = ['within_distance', 'within_spherical_distance', 'within_box', 'within_polygon' 'near', 'near_sphere'] match_operators = ['contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', -======= - geo_operators = ['within_distance', 'within_spherical_distance', 'within_box', 'within_polygon', 'near', 'near_sphere'] - match_operators = ['contains', 'icontains', 'startswith', - 'istartswith', 'endswith', 'iendswith', ->>>>>>> master 'exact', 'iexact'] mongo_query = {} From 10bc93dfa64e146e64077e12aa29412139bd436f Mon Sep 17 00:00:00 2001 From: John Arnfield Date: Wed, 17 Aug 2011 20:15:47 +0100 Subject: [PATCH 199/214] Commas help too :) --- mongoengine/queryset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index a715b57d..93b4deca 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -617,7 +617,7 @@ class QuerySet(object): """ operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', 'all', 'size', 'exists', 'not'] - geo_operators = ['within_distance', 'within_spherical_distance', 'within_box', 'within_polygon' 'near', 'near_sphere'] + geo_operators = ['within_distance', 'within_spherical_distance', 'within_box', 'within_polygon', 'near', 'near_sphere'] match_operators = ['contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'exact', 'iexact'] From b037fb3e21e7b9f4c535fe57b231caf692d0c762 Mon Sep 17 00:00:00 2001 From: John Arnfield Date: Wed, 17 Aug 2011 21:23:40 +0100 Subject: [PATCH 200/214] Added version check to the polygon test to ensure server version >= 1.9 --- tests/queryset.py | 46 ++++++++++++++++++++++++++-------------------- 1 file changed, 26 insertions(+), 20 deletions(-) diff --git a/tests/queryset.py b/tests/queryset.py index 001d10be..5b0e658f 100644 --- a/tests/queryset.py +++ b/tests/queryset.py @@ -7,6 +7,7 @@ from mongoengine.queryset import (QuerySet, QuerySetManager, MultipleObjectsReturned, DoesNotExist, QueryFieldList) from mongoengine import * +from mongoengine.connection import _get_connection from mongoengine.tests import query_counter @@ -14,7 +15,7 @@ class QuerySetTest(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') - + class Person(Document): name = StringField() age = IntField() @@ -2197,25 +2198,30 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(events.count(), 1) self.assertEqual(events[0].id, event2.id) - # check that polygon works - polygon = [ - (41.912114,-87.694445), - (41.919395,-87.69084), - (41.927186,-87.681742), - (41.911731,-87.654276), - (41.898061,-87.656164), - ] - events = Event.objects(location__within_polygon=polygon) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0].id, event1.id) - - polygon2 = [ - (54.033586,-1.742249), - (52.792797,-1.225891), - (53.389881,-4.40094) - ] - events = Event.objects(location__within_polygon=polygon2) - self.assertEqual(events.count(), 0) + # check that polygon works for users who have a server >= 1.9 + server_version = tuple( + _get_connection().server_info()['version'].split('.') + ) + required_version = tuple("1.9.0".split(".")) + if server_version >= required_version: + polygon = [ + (41.912114,-87.694445), + (41.919395,-87.69084), + (41.927186,-87.681742), + (41.911731,-87.654276), + (41.898061,-87.656164), + ] + events = Event.objects(location__within_polygon=polygon) + self.assertEqual(events.count(), 1) + self.assertEqual(events[0].id, event1.id) + + polygon2 = [ + (54.033586,-1.742249), + (52.792797,-1.225891), + (53.389881,-4.40094) + ] + events = Event.objects(location__within_polygon=polygon2) + self.assertEqual(events.count(), 0) Event.drop_collection() From 97ac7e54767375e83d9df713d95da17510200060 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 17 Aug 2011 21:34:35 +0100 Subject: [PATCH 201/214] Remove old pymongo version checks Closes #264 --- mongoengine/queryset.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index a2716089..6e5a4717 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -1275,9 +1275,6 @@ class QuerySet(object): if not update: raise OperationError("No update parameters, would remove data") - if pymongo.version < '1.1.1': - raise OperationError('update() method requires PyMongo 1.1.1+') - if not write_options: write_options = {} @@ -1314,14 +1311,10 @@ class QuerySet(object): try: # Explicitly provide 'multi=False' to newer versions of PyMongo # as the default may change to 'True' - if pymongo.version >= '1.1.1': - ret = self._collection.update(self._query, update, multi=False, - upsert=upsert, safe=safe_update, - **write_options) - else: - # Older versions of PyMongo don't support 'multi' - ret = self._collection.update(self._query, update, - safe=safe_update) + ret = self._collection.update(self._query, update, multi=False, + upsert=upsert, safe=safe_update, + **write_options) + if ret is not None and 'n' in ret: return ret['n'] except pymongo.errors.OperationFailure, e: From 11621c6f5a0335fb3ab1b230c6db70c534d7b7ee Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 17 Aug 2011 21:38:46 +0100 Subject: [PATCH 202/214] Removed keeptemp from map_reduce as 0.5 requires pymongo 1.11 Closes #258 --- mongoengine/queryset.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index 6e5a4717..ca7cffb1 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -872,7 +872,7 @@ class QuerySet(object): return self.count() def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None, - scope=None, keep_temp=False): + scope=None): """Perform a map/reduce query using the current query spec and ordering. While ``map_reduce`` respects ``QuerySet`` chaining, it must be the last call made, as it does not return a maleable @@ -920,7 +920,7 @@ class QuerySet(object): reduce_f_code = self._sub_js_fields(reduce_f) reduce_f = pymongo.code.Code(reduce_f_code, reduce_f_scope) - mr_args = {'query': self._query, 'keeptemp': keep_temp} + mr_args = {'query': self._query} if finalize_f: finalize_f_scope = {} @@ -937,7 +937,7 @@ class QuerySet(object): if limit: mr_args['limit'] = limit - if output == 'inline' or (not keep_temp and not self._ordering): + if output == 'inline' and not self._ordering: map_reduce_function = 'inline_map_reduce' else: map_reduce_function = 'map_reduce' @@ -1514,7 +1514,7 @@ class QuerySet(object): return total; } """ - values = self.map_reduce(map_func, reduce_func, 'inline', keep_temp=False) + values = self.map_reduce(map_func, reduce_func, 'inline') frequencies = {} for f in values: key = f.key From 10c30f2224cd7aa3fbc9fa252b770665e51800ce Mon Sep 17 00:00:00 2001 From: Dan Crosta Date: Wed, 17 Aug 2011 16:42:36 -0400 Subject: [PATCH 203/214] remove keep_temp from map_reduce fixes #258 --- mongoengine/queryset.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index ca7cffb1..1dfc06a3 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -900,6 +900,10 @@ class QuerySet(object): :meth:`~pymongo.collection.Collection.map_reduce` helper requires PyMongo version **>= 1.11**. + .. versionchanged:: 0.5 + - removed ``keep_temp`` keyword argument, which was only relevant + for MongoDB server versions older than 1.7.4 + .. versionadded:: 0.3 """ from document import MapReduceDocument From 91a0e499d9295bad1f52ff9337f4df4dde9a7e04 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 17 Aug 2011 21:48:41 +0100 Subject: [PATCH 204/214] Updated changelog and authors Refs #263 --- AUTHORS | 1 + docs/changelog.rst | 1 + 2 files changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index ed022c2e..b342830a 100644 --- a/AUTHORS +++ b/AUTHORS @@ -66,3 +66,4 @@ that much better: * Gregg Lind * Gareth Lloyd * Albert Choi + * John Arnfield diff --git a/docs/changelog.rst b/docs/changelog.rst index 787b9c91..3a2a2c43 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added within_polygon support - for those with mongodb 1.9 - Updated sum / average to use map_reduce as db.eval doesn't work in sharded environments - Added where() - filter to allowing users to specify query expressions as Javascript - Added SequenceField - for creating sequential counters From bda716ef9d721248a9f3d5501c1e3e5a2637c442 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Thu, 18 Aug 2011 08:30:52 +0100 Subject: [PATCH 205/214] Improved update in test case for removing inheritance --- tests/document.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/document.py b/tests/document.py index cfeffb75..d6d70289 100644 --- a/tests/document.py +++ b/tests/document.py @@ -431,7 +431,7 @@ class DocumentTest(unittest.TestCase): 'allow_inheritance': False, 'indexes': ['name'] } - collection.update({}, {"$unset": {"_types": 1, "_cls": 1}}, False, True) + collection.update({}, {"$unset": {"_types": 1, "_cls": 1}}, multi=True) # Confirm extra data is removed obj = collection.find_one() @@ -1948,14 +1948,14 @@ class DocumentTest(unittest.TestCase): def test_list_search_by_embedded(self): class User(Document): username = StringField(required=True) - + meta = {'allow_inheritance': False} - + class Comment(EmbeddedDocument): comment = StringField() user = ReferenceField(User, required=True) - + meta = {'allow_inheritance': False} class Page(Document): From dd49d1d4bbdbef4e68d9776cf07207f58f7884c1 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 24 Aug 2011 13:37:20 +0100 Subject: [PATCH 206/214] Added choices note to upgrade docs --- docs/upgrade.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/upgrade.rst b/docs/upgrade.rst index ef44b96a..7187adcf 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -7,6 +7,12 @@ Upgrading There have been the following backwards incompatibilities from 0.4 to 0.5: +# Choice options: + +Are now expected to be an iterable of tuples, with the first element in each +tuple being the actual value to be stored. The second element is the +human-readable name for the option. + # PyMongo / MongoDB map reduce now requires pymongo 1.11+ More methods now use map_reduce as db.eval From 1631788ab6bfe66779f226f750b9f235c1f6c7ea Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Wed, 24 Aug 2011 13:37:39 +0100 Subject: [PATCH 207/214] Now Raise an exception if subclasses are missing at querytime. Beats returning None thanks to #aid for mentioning it on IRC --- mongoengine/base.py | 7 +++++-- tests/document.py | 30 +++++++++++++++++++++++++++--- tests/fields.py | 3 ++- 3 files changed, 34 insertions(+), 6 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 6be5c3de..8d0c470b 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -671,7 +671,6 @@ class BaseDocument(object): # get the class name from the document, falling back to the given # class if unavailable class_name = son.get(u'_cls', cls._class_name) - data = dict((str(key), value) for key, value in son.items()) if '_types' in data: @@ -686,7 +685,11 @@ class BaseDocument(object): if class_name not in subclasses: # Type of document is probably more generic than the class # that has been queried to return this SON - return None + raise NotRegistered(""" + `%s` has not been registered in the document registry. + Importing the document class automatically registers it, + has it been imported? + """.strip() % class_name) cls = subclasses[class_name] present_fields = data.keys() diff --git a/tests/document.py b/tests/document.py index d6d70289..b76b6f92 100644 --- a/tests/document.py +++ b/tests/document.py @@ -12,7 +12,7 @@ import weakref from fixtures import Base, Mixin, PickleEmbedded, PickleTest from mongoengine import * -from mongoengine.base import BaseField +from mongoengine.base import _document_registry, NotRegistered from mongoengine.connection import _get_db @@ -740,7 +740,6 @@ class DocumentTest(unittest.TestCase): post1.save() BlogPost.drop_collection() - def test_geo_indexes_recursion(self): class User(Document): @@ -799,7 +798,6 @@ class DocumentTest(unittest.TestCase): post2 = BlogPost(title='test2', slug='test') self.assertRaises(OperationError, post2.save) - def test_unique_with(self): """Ensure that unique_with constraints are applied to fields. """ @@ -978,6 +976,32 @@ class DocumentTest(unittest.TestCase): User.drop_collection() + + def test_document_not_registered(self): + + class Place(Document): + name = StringField() + + class NicePlace(Place): + pass + + Place.drop_collection() + + Place(name="London").save() + NicePlace(name="Buckingham Palace").save() + + # Mimic Place and NicePlace definitions being in a different file + # and the NicePlace model not being imported in at query time. + @classmethod + def _get_subclasses(cls): + return {} + Place._get_subclasses = _get_subclasses + + def query_without_importing_nice_place(): + print Place.objects.all() + self.assertRaises(NotRegistered, query_without_importing_nice_place) + + def test_creation(self): """Ensure that document may be created using keyword arguments. """ diff --git a/tests/fields.py b/tests/fields.py index f8aeb86c..f9734900 100644 --- a/tests/fields.py +++ b/tests/fields.py @@ -1062,6 +1062,7 @@ class FieldTest(unittest.TestCase): Post.drop_collection() User.drop_collection() + def test_generic_reference_document_not_registered(self): """Ensure dereferencing out of the document registry throws a `NotRegistered` error. @@ -1445,7 +1446,7 @@ class FieldTest(unittest.TestCase): a.save() self.assertEqual(a.counter, 2) - + a = Animal.objects.first() self.assertEqual(a.counter, 2) a.reload() From bc9a09f52e5b64695e682b0bcd4997ca8c41f41f Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 9 Sep 2011 04:21:32 -0700 Subject: [PATCH 208/214] Document updates --- mongoengine/base.py | 5 ++++- mongoengine/document.py | 10 ++++++++++ mongoengine/fields.py | 8 +++++++- mongoengine/queryset.py | 38 ++++++++++++++++++++++++++++++++++---- 4 files changed, 55 insertions(+), 6 deletions(-) diff --git a/mongoengine/base.py b/mongoengine/base.py index 8d0c470b..6a94670e 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -43,6 +43,8 @@ def get_document(name): class BaseField(object): """A base class for fields in a MongoDB document. Instances of this class may be added to subclasses of `Document` to define a document's schema. + + .. versionchanged:: 0.5 - added verbose and help text """ # Fields may have _types inserted into indexes by default @@ -156,6 +158,8 @@ class ComplexBaseField(BaseField): Allows for nesting of embedded documents inside complex types. Handles the lazy dereferencing of a queryset by lazily dereferencing all items in a list / dict rather than one at a time. + + .. versionadded:: 0.5 """ field = None @@ -896,7 +900,6 @@ class BaseDocument(object): return not self.__eq__(other) def __hash__(self): - """ For list, dict key """ if self.pk is None: # For new object return super(BaseDocument,self).__hash__() diff --git a/mongoengine/document.py b/mongoengine/document.py index bd2bbda4..3ccc4ddc 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -130,6 +130,11 @@ class Document(BaseDocument): which will be used as options for the resultant ``getLastError`` command. For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers have recorded the write and will force an fsync on each server being written to. + + .. versionchanged:: 0.5 + In existing documents it only saves changed fields using set / unset + Saves are cascaded and any :class:`~pymongo.dbref.DBRef` objects + that have changes are saved as well. """ from fields import ReferenceField, GenericReferenceField @@ -226,6 +231,11 @@ class Document(BaseDocument): signals.post_delete.send(self.__class__, document=self) def select_related(self, max_depth=1): + """Handles dereferencing of :class:`~pymongo.dbref.DBRef` objects to + a maximum depth in order to cut down the number queries to mongodb. + + .. versionadded:: 0.5 + """ from dereference import dereference self._data = dereference(self._data, max_depth) return self diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 619b8c60..0514f1f4 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -279,7 +279,6 @@ class DateTimeField(BaseField): return None - class ComplexDateTimeField(StringField): """ ComplexDateTimeField handles microseconds exactly instead of rounding @@ -295,6 +294,8 @@ class ComplexDateTimeField(StringField): Where NNNNNN is the number of microseconds of the represented `datetime`. The `,` as the separator can be easily modified by passing the `separator` keyword when initializing the field. + + .. versionadded:: 0.5 """ def __init__(self, separator=',', **kwargs): @@ -478,6 +479,7 @@ class DictField(ComplexBaseField): similar to an embedded document, but the structure is not defined. .. versionadded:: 0.3 + .. versionchanged:: 0.5 - Can now handle complex / varying types of data """ def __init__(self, basecls=None, field=None, *args, **kwargs): @@ -542,6 +544,8 @@ class ReferenceField(BaseField): * NULLIFY - Updates the reference to null. * CASCADE - Deletes the documents associated with the reference. * DENY - Prevent the deletion of the reference object. + + .. versionchanged:: 0.5 added `reverse_delete_rule` """ def __init__(self, document_type, reverse_delete_rule=DO_NOTHING, **kwargs): @@ -708,6 +712,7 @@ class GridFSProxy(object): """Proxy object to handle writing and reading of files to and from GridFS .. versionadded:: 0.4 + .. versionchanged:: 0.5 - added optional size param to read """ def __init__(self, grid_id=None, key=None, instance=None): @@ -800,6 +805,7 @@ class FileField(BaseField): """A GridFS storage field. .. versionadded:: 0.4 + .. versionchanged:: 0.5 added optional size param for read """ def __init__(self, **kwargs): diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index bade2b4c..a830150a 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -346,7 +346,10 @@ class QuerySet(object): self._hint = -1 # Using -1 as None is a valid value for hint def clone(self): - """Creates a copy of the current :class:`~mongoengine.queryset.QuerySet`""" + """Creates a copy of the current :class:`~mongoengine.queryset.QuerySet` + + .. versionadded:: 0.5 + """ c = self.__class__(self._document, self._collection_obj) copy_props = ('_initial_query', '_query_obj', '_where_clause', @@ -996,6 +999,8 @@ class QuerySet(object): Hinting will not do anything if the corresponding index does not exist. The last hint applied to this cursor takes precedence over all others. + + .. versionadded:: 0.5 """ self._cursor.hint(index) self._hint = index @@ -1038,11 +1043,12 @@ class QuerySet(object): def only(self, *fields): """Load only a subset of this document's fields. :: - post = BlogPost.objects(...).only("title") + post = BlogPost.objects(...).only("title", "author.name") :param fields: fields to include .. versionadded:: 0.3 + .. versionchanged:: 0.5 - Added subfield support """ fields = dict([(f, QueryFieldList.ONLY) for f in fields]) return self.fields(**fields) @@ -1053,6 +1059,8 @@ class QuerySet(object): post = BlogPost.objects(...).exclude("comments") :param fields: fields to exclude + + .. versionadded:: 0.5 """ fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields]) return self.fields(**fields) @@ -1098,6 +1106,8 @@ class QuerySet(object): """Include all fields. Reset all previously calls of .only() and .exclude(). :: post = BlogPost.objects(...).exclude("comments").only("title").all_fields() + + .. versionadded:: 0.5 """ self._loaded_fields = QueryFieldList(always_include=self._loaded_fields.always_include) return self @@ -1153,6 +1163,8 @@ class QuerySet(object): """Enable or disable snapshot mode when querying. :param enabled: whether or not snapshot mode is enabled + + ..versionchanged:: 0.5 - made chainable """ self._snapshot = enabled return self @@ -1161,6 +1173,8 @@ class QuerySet(object): """Enable or disable the default mongod timeout when querying. :param enabled: whether or not the timeout is used + + ..versionchanged:: 0.5 - made chainable """ self._timeout = enabled return self @@ -1404,6 +1418,8 @@ class QuerySet(object): .. note:: When using this mode of query, the database will call your function, or evaluate your predicate clause, for each object in the collection. + + .. versionadded:: 0.5 """ where_clause = self._sub_js_fields(where_clause) self._where_clause = where_clause @@ -1414,6 +1430,9 @@ class QuerySet(object): :param field: the field to sum over; use dot-notation to refer to embedded document fields + + .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work + with sharding. """ map_func = pymongo.code.Code(""" function() { @@ -1441,6 +1460,9 @@ class QuerySet(object): :param field: the field to average over; use dot-notation to refer to embedded document fields + + .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work + with sharding. """ map_func = pymongo.code.Code(""" function() { @@ -1472,7 +1494,6 @@ class QuerySet(object): else: return 0 - def item_frequencies(self, field, normalize=False, map_reduce=True): """Returns a dictionary of all items present in a field across the whole queried set of documents, and their corresponding frequency. @@ -1490,6 +1511,9 @@ class QuerySet(object): :param field: the field to use :param normalize: normalize the results so they add to 1.0 :param map_reduce: Use map_reduce over exec_js + + .. versionchanged:: 0.5 defaults to map_reduce and can handle embedded + document lookups """ if map_reduce: return self._item_frequencies_map_reduce(field, normalize=normalize) @@ -1532,7 +1556,7 @@ class QuerySet(object): if normalize: count = sum(frequencies.values()) - frequencies = dict([(k, v/count) for k,v in frequencies.items()]) + frequencies = dict([(k, v / count) for k, v in frequencies.items()]) return frequencies @@ -1594,9 +1618,15 @@ class QuerySet(object): return repr(data) def select_related(self, max_depth=1): + """Handles dereferencing of :class:`~pymongo.dbref.DBRef` objects to + a maximum depth in order to cut down the number queries to mongodb. + + .. versionadded:: 0.5 + """ from dereference import dereference return dereference(self, max_depth=max_depth) + class QuerySetManager(object): get_queryset = None From a6449a7b2c97cf669b887cb04f6cd61911e874c2 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 9 Sep 2011 05:45:56 -0700 Subject: [PATCH 209/214] Updates to documentation in prep for 0.5 --- docs/guide/defining-documents.rst | 78 +++++++----- docs/guide/document-instances.rst | 10 +- docs/guide/gridfs.rst | 1 + docs/guide/installing.rst | 20 +-- docs/guide/querying.rst | 202 +++++++++++++++++------------- docs/guide/signals.rst | 4 +- docs/index.rst | 39 +++++- docs/upgrade.rst | 27 ++-- mongoengine/queryset.py | 14 ++- 9 files changed, 240 insertions(+), 155 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index a524520c..00a7d090 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -4,14 +4,14 @@ Defining documents In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When working with relational databases, rows are stored in **tables**, which have a strict **schema** that the rows follow. MongoDB stores documents in -**collections** rather than tables - the principle difference is that no schema -is enforced at a database level. +**collections** rather than tables - the principle difference is that no schema +is enforced at a database level. Defining a document's schema ============================ MongoEngine allows you to define schemata for documents as this helps to reduce coding errors, and allows for utility methods to be defined on fields which may -be present. +be present. To define a schema for a document, create a class that inherits from :class:`~mongoengine.Document`. Fields are specified by adding **field @@ -19,7 +19,7 @@ objects** as class attributes to the document class:: from mongoengine import * import datetime - + class Page(Document): title = StringField(max_length=200, required=True) date_modified = DateTimeField(default=datetime.datetime.now) @@ -31,31 +31,34 @@ By default, fields are not required. To make a field mandatory, set the validation constraints available (such as :attr:`max_length` in the example above). Fields may also take default values, which will be used if a value is not provided. Default values may optionally be a callable, which will be called -to retrieve the value (such as in the above example). The field types available +to retrieve the value (such as in the above example). The field types available are as follows: * :class:`~mongoengine.StringField` * :class:`~mongoengine.URLField` +* :class:`~mongoengine.EmailField` * :class:`~mongoengine.IntField` * :class:`~mongoengine.FloatField` * :class:`~mongoengine.DecimalField` * :class:`~mongoengine.DateTimeField` +* :class:`~mongoengine.ComplexDateTimeField` * :class:`~mongoengine.ListField` +* :class:`~mongoengine.SortedListField` * :class:`~mongoengine.DictField` +* :class:`~mongoengine.MapField` * :class:`~mongoengine.ObjectIdField` -* :class:`~mongoengine.EmbeddedDocumentField` * :class:`~mongoengine.ReferenceField` * :class:`~mongoengine.GenericReferenceField` +* :class:`~mongoengine.EmbeddedDocumentField` * :class:`~mongoengine.BooleanField` * :class:`~mongoengine.FileField` -* :class:`~mongoengine.EmailField` -* :class:`~mongoengine.SortedListField` * :class:`~mongoengine.BinaryField` * :class:`~mongoengine.GeoPointField` +* :class:`~mongoengine.SequenceField` Field arguments --------------- -Each field type can be customized by keyword arguments. The following keyword +Each field type can be customized by keyword arguments. The following keyword arguments can be set on all fields: :attr:`db_field` (Default: None) @@ -74,7 +77,7 @@ arguments can be set on all fields: The definion of default parameters follow `the general rules on Python `__, - which means that some care should be taken when dealing with default mutable objects + which means that some care should be taken when dealing with default mutable objects (like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`):: class ExampleFirst(Document): @@ -89,7 +92,7 @@ arguments can be set on all fields: # This can make an .append call to add values to the default (and all the following objects), # instead to just an object values = ListField(IntField(), default=[1,2,3]) - + :attr:`unique` (Default: False) When True, no documents in the collection will have the same value for this @@ -104,7 +107,13 @@ arguments can be set on all fields: :attr:`choices` (Default: None) An iterable of choices to which the value of this field should be limited. - + +:attr:`help_text` (Default: None) + Optional help text to output with the field - used by form libraries + +:attr:`verbose` (Default: None) + Optional human-readable name for the field - used by form libraries + List fields ----------- @@ -121,7 +130,7 @@ Embedded documents MongoDB has the ability to embed documents within other documents. Schemata may be defined for these embedded documents, just as they may be for regular documents. To create an embedded document, just define a document as usual, but -inherit from :class:`~mongoengine.EmbeddedDocument` rather than +inherit from :class:`~mongoengine.EmbeddedDocument` rather than :class:`~mongoengine.Document`:: class Comment(EmbeddedDocument): @@ -144,7 +153,7 @@ Often, an embedded document may be used instead of a dictionary -- generally this is recommended as dictionaries don't support validation or custom field types. However, sometimes you will not know the structure of what you want to store; in this situation a :class:`~mongoengine.DictField` is appropriate:: - + class SurveyResponse(Document): date = DateTimeField() user = ReferenceField(User) @@ -152,16 +161,19 @@ store; in this situation a :class:`~mongoengine.DictField` is appropriate:: survey_response = SurveyResponse(date=datetime.now(), user=request.user) response_form = ResponseForm(request.POST) - survey_response.answers = response_form.cleaned_data() + survey_response.answers = response_form.cleaned_data() survey_response.save() +Dictionaries can store complex data, other dictionaries, lists, references to +other objects, so are the most flexible field type available. + Reference fields ---------------- References may be stored to other documents in the database using the :class:`~mongoengine.ReferenceField`. Pass in another document class as the first argument to the constructor, then simply assign document objects to the field:: - + class User(Document): name = StringField() @@ -235,13 +247,13 @@ Its value can take any of the following constants: in-memory, by the MongoEngine module, it is of the upmost importance that the module that declares the relationship is loaded **BEFORE** the delete is invoked. - + If, for example, the :class:`Employee` object lives in the :mod:`payroll` app, and the :class:`ProfilePage` in the :mod:`people` app, it is extremely important that the :mod:`people` app is loaded before any employee is removed, because otherwise, MongoEngine could never know this relationship exists. - + In Django, be sure to put all apps that have such delete rule declarations in their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. @@ -250,15 +262,15 @@ Generic reference fields '''''''''''''''''''''''' A second kind of reference field also exists, :class:`~mongoengine.GenericReferenceField`. This allows you to reference any -kind of :class:`~mongoengine.Document`, and hence doesn't take a +kind of :class:`~mongoengine.Document`, and hence doesn't take a :class:`~mongoengine.Document` subclass as a constructor argument:: class Link(Document): url = StringField() - + class Post(Document): title = StringField() - + class Bookmark(Document): bookmark_object = GenericReferenceField() @@ -272,9 +284,10 @@ kind of :class:`~mongoengine.Document`, and hence doesn't take a Bookmark(bookmark_object=post).save() .. note:: + Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if - you will only be referencing one document type, prefer the standard + you will only be referencing one document type, prefer the standard :class:`~mongoengine.ReferenceField`. Uniqueness constraints @@ -282,7 +295,7 @@ Uniqueness constraints MongoEngine allows you to specify that a field should be unique across a collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's constructor. If you try to save a document that has the same value for a unique -field as a document that is already in the database, a +field as a document that is already in the database, a :class:`~mongoengine.OperationError` will be raised. You may also specify multi-field uniqueness constraints by using :attr:`unique_with`, which may be either a single field name, or a list or tuple of field names:: @@ -294,14 +307,14 @@ either a single field name, or a list or tuple of field names:: Skipping Document validation on save ------------------------------------ -You can also skip the whole document validation process by setting -``validate=False`` when caling the :meth:`~mongoengine.document.Document.save` +You can also skip the whole document validation process by setting +``validate=False`` when caling the :meth:`~mongoengine.document.Document.save` method:: class Recipient(Document): name = StringField() email = EmailField() - + recipient = Recipient(name='admin', email='root@localhost') recipient.save() # will raise a ValidationError while recipient.save(validate=False) # won't @@ -329,7 +342,7 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying stored in the collection, and :attr:`max_size` is the maximum size of the collection in bytes. If :attr:`max_size` is not specified and :attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB). -The following example shows a :class:`Log` document that will be limited to +The following example shows a :class:`Log` document that will be limited to 1000 entries and 2MB of disk space:: class Log(Document): @@ -369,9 +382,10 @@ If a dictionary is passed then the following options are available: Whether the index should be sparse. .. note:: - Geospatial indexes will be automatically created for all + + Geospatial indexes will be automatically created for all :class:`~mongoengine.GeoPointField`\ s - + Ordering ======== A default ordering can be specified for your @@ -393,7 +407,7 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: blog_post_1 = BlogPost(title="Blog Post #1") blog_post_1.published_date = datetime(2010, 1, 5, 0, 0 ,0) - blog_post_2 = BlogPost(title="Blog Post #2") + blog_post_2 = BlogPost(title="Blog Post #2") blog_post_2.published_date = datetime(2010, 1, 6, 0, 0 ,0) blog_post_3 = BlogPost(title="Blog Post #3") @@ -405,7 +419,7 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: # get the "first" BlogPost using default ordering # from BlogPost.meta.ordering - latest_post = BlogPost.objects.first() + latest_post = BlogPost.objects.first() assert latest_post.title == "Blog Post #3" # override default ordering, order BlogPosts by "published_date" @@ -434,7 +448,7 @@ Working with existing data To enable correct retrieval of documents involved in this kind of heirarchy, two extra attributes are stored on each document in the database: :attr:`_cls` and :attr:`_types`. These are hidden from the user through the MongoEngine -interface, but may not be present if you are trying to use MongoEngine with +interface, but may not be present if you are trying to use MongoEngine with an existing database. For this reason, you may disable this inheritance mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling you to work with existing databases. To disable inheritance on a document diff --git a/docs/guide/document-instances.rst b/docs/guide/document-instances.rst index aeed7cdb..317bfef1 100644 --- a/docs/guide/document-instances.rst +++ b/docs/guide/document-instances.rst @@ -4,12 +4,12 @@ Documents instances To create a new document object, create an instance of the relevant document class, providing values for its fields as its constructor keyword arguments. You may provide values for any of the fields on the document:: - + >>> page = Page(title="Test Page") >>> page.title 'Test Page' -You may also assign values to the document's fields using standard object +You may also assign values to the document's fields using standard object attribute syntax:: >>> page.title = "Example Page" @@ -18,9 +18,9 @@ attribute syntax:: Saving and deleting documents ============================= -MongoEngine tracks changes to documents to provide efficient saving. To save +MongoEngine tracks changes to documents to provide efficient saving. To save the document to the database, call the :meth:`~mongoengine.Document.save` method. -If the document does not exist in the database, it will be created. If it does +If the document does not exist in the database, it will be created. If it does already exist, then any changes will be updated atomically. For example:: >>> page = Page(title="Test Page") @@ -29,6 +29,7 @@ already exist, then any changes will be updated atomically. For example:: >>> page.save() # Performs an atomic set on the title field. .. note:: + Changes to documents are tracked and on the whole perform `set` operations. * ``list_field.pop(0)`` - *sets* the resulting list @@ -78,6 +79,7 @@ is an alias to :attr:`id`:: >>> page.id == page.pk .. note:: + If you define your own primary key field, the field implicitly becomes required, so a :class:`ValidationError` will be thrown if you don't provide it. diff --git a/docs/guide/gridfs.rst b/docs/guide/gridfs.rst index 0cd06539..3abad775 100644 --- a/docs/guide/gridfs.rst +++ b/docs/guide/gridfs.rst @@ -66,6 +66,7 @@ Deleting stored files is achieved with the :func:`delete` method:: marmot.photo.delete() .. note:: + The FileField in a Document actually only stores the ID of a file in a separate GridFS collection. This means that deleting a document with a defined FileField does not actually delete the file. You must be diff --git a/docs/guide/installing.rst b/docs/guide/installing.rst index 132f1079..f15d3dbb 100644 --- a/docs/guide/installing.rst +++ b/docs/guide/installing.rst @@ -1,31 +1,31 @@ ====================== Installing MongoEngine ====================== + To use MongoEngine, you will need to download `MongoDB `_ and ensure it is running in an accessible location. You will also need `PyMongo `_ to use MongoEngine, but if you install MongoEngine using setuptools, then the dependencies will be handled for you. -MongoEngine is available on PyPI, so to use it you can use -:program:`easy_install`: - +MongoEngine is available on PyPI, so to use it you can use :program:`pip`: + .. code-block:: console - # easy_install mongoengine + $ pip install mongoengine -Alternatively, if you don't have setuptools installed, `download it from PyPi +Alternatively, if you don't have setuptools installed, `download it from PyPi `_ and run .. code-block:: console - # python setup.py install + $ python setup.py install To use the bleeding-edge version of MongoEngine, you can get the source from `GitHub `_ and install it as above: - + .. code-block:: console - # git clone git://github.com/hmarr/mongoengine - # cd mongoengine - # python setup.py install + $ git clone git://github.com/hmarr/mongoengine + $ cd mongoengine + $ python setup.py install diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index c454b6e8..13e11106 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -14,6 +14,7 @@ fetch documents from the database:: print user.name .. note:: + Once the iteration finishes (when :class:`StopIteration` is raised), :meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the :class:`~mongoengine.queryset.QuerySet` may be iterated over again. The @@ -39,29 +40,6 @@ syntax:: # been written by a user whose 'country' field is set to 'uk' uk_pages = Page.objects(author__country='uk') -Querying lists --------------- -On most fields, this syntax will look up documents where the field specified -matches the given value exactly, but when the field refers to a -:class:`~mongoengine.ListField`, a single item may be provided, in which case -lists that contain that item will be matched:: - - class Page(Document): - tags = ListField(StringField()) - - # This will match all pages that have the word 'coding' as an item in the - # 'tags' list - Page.objects(tags='coding') - -Raw queries ------------ -It is possible to provide a raw PyMongo query as a query parameter, which will -be integrated directly into the query. This is done using the ``__raw__`` -keyword argument:: - - Page.objects(__raw__={'tags': 'coding'}) - -.. versionadded:: 0.4 Query operators =============== @@ -99,26 +77,67 @@ expressions: * ``endswith`` -- string field ends with value * ``iendswith`` -- string field ends with value (case insensitive) -.. versionadded:: 0.3 - There are a few special operators for performing geographical queries, that may used with :class:`~mongoengine.GeoPointField`\ s: * ``within_distance`` -- provide a list containing a point and a maximum distance (e.g. [(41.342, -87.653), 5]) +* ``within_spherical_distance`` -- Same as above but using the spherical geo model + (e.g. [(41.342, -87.653), 5/earth_radius]) +* ``near`` -- order the documents by how close they are to a given point +* ``near_sphere`` -- Same as above but using the spherical geo model * ``within_box`` -- filter documents to those within a given bounding box (e.g. [(35.0, -125.0), (40.0, -100.0)]) -* ``near`` -- order the documents by how close they are to a given point +* ``within_polygon`` -- filter documents to those within a given polygon (e.g. + [(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]). + .. note:: Requires Mongo Server 2.0 -.. versionadded:: 0.4 -Querying by position -==================== +Querying lists +-------------- +On most fields, this syntax will look up documents where the field specified +matches the given value exactly, but when the field refers to a +:class:`~mongoengine.ListField`, a single item may be provided, in which case +lists that contain that item will be matched:: + + class Page(Document): + tags = ListField(StringField()) + + # This will match all pages that have the word 'coding' as an item in the + # 'tags' list + Page.objects(tags='coding') + It is possible to query by position in a list by using a numerical value as a query operator. So if you wanted to find all pages whose first tag was ``db``, you could use the following query:: - BlogPost.objects(tags__0='db') + Page.objects(tags__0='db') + +If you only want to fetch part of a list eg: you want to paginate a list, then +the `slice` operator is required:: + + # comments - skip 5, limit 10 + Page.objects.fields(slice__comments=[5, 10]) + +For updating documents, if you don't know the position in a list, you can use +the $ positional operator :: + + Post.objects(comments__by="joe").update(**{'inc__comments__$__votes': 1}) + +However, this doesn't map well to the syntax so you can alos use a capital S instead :: + + Post.objects(comments__by="joe").update(inc__comments__S__votes=1) + + .. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query. + + +Raw queries +----------- +It is possible to provide a raw PyMongo query as a query parameter, which will +be integrated directly into the query. This is done using the ``__raw__`` +keyword argument:: + + Page.objects(__raw__={'tags': 'coding'}) .. versionadded:: 0.4 @@ -270,6 +289,7 @@ You may sum over the values of a specific field on documents using yearly_expense = Employee.objects.sum('salary') .. note:: + If the field isn't present on a document, that document will be ignored from the sum. @@ -318,6 +338,11 @@ will be given:: >>> f.rating # default value 3 +.. note:: + + The :meth:`~mongoengine.queryset.QuerySet.exclude` is the opposite of + :meth:`~mongoengine.queryset.QuerySet.only` if you want to exclude a field. + If you later need the missing fields, just call :meth:`~mongoengine.Document.reload` on your document. @@ -341,6 +366,67 @@ calling it with keyword arguments:: # Get top posts Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000)) +.. _guide-atomic-updates: + +Atomic updates +============== +Documents may be updated atomically by using the +:meth:`~mongoengine.queryset.QuerySet.update_one` and +:meth:`~mongoengine.queryset.QuerySet.update` methods on a +:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers" +that you may use with these methods: + +* ``set`` -- set a particular value +* ``unset`` -- delete a particular value (since MongoDB v1.3+) +* ``inc`` -- increment a value by a given amount +* ``dec`` -- decrement a value by a given amount +* ``pop`` -- remove the last item from a list +* ``push`` -- append a value to a list +* ``push_all`` -- append several values to a list +* ``pop`` -- remove the first or last element of a list +* ``pull`` -- remove a value from a list +* ``pull_all`` -- remove several values from a list +* ``add_to_set`` -- add value to a list only if its not in the list already + +The syntax for atomic updates is similar to the querying syntax, but the +modifier comes before the field, not after it:: + + >>> post = BlogPost(title='Test', page_views=0, tags=['database']) + >>> post.save() + >>> BlogPost.objects(id=post.id).update_one(inc__page_views=1) + >>> post.reload() # the document has been changed, so we need to reload it + >>> post.page_views + 1 + >>> BlogPost.objects(id=post.id).update_one(set__title='Example Post') + >>> post.reload() + >>> post.title + 'Example Post' + >>> BlogPost.objects(id=post.id).update_one(push__tags='nosql') + >>> post.reload() + >>> post.tags + ['database', 'nosql'] + +.. note :: + + In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates + on changed documents by tracking changes to that document. + +The positional operator allows you to update list items without knowing the +index position, therefore making the update a single atomic operation. As we +cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: + + >>> post = BlogPost(title='Test', page_views=0, tags=['database', 'mongo']) + >>> post.save() + >>> BlogPost.objects(id=post.id, tags='mongo').update(set__tags__S='mongodb') + >>> post.reload() + >>> post.tags + ['database', 'mongodb'] + +.. note :: + Currently only top level lists are handled, future versions of mongodb / + pymongo plan to support nested positional operators. See `The $ positional + operator `_. + Server-side javascript execution ================================ Javascript functions may be written and sent to the server for execution. The @@ -443,59 +529,3 @@ following example shows how the substitutions are made:: return comments; } """) - -.. _guide-atomic-updates: - -Atomic updates -============== -Documents may be updated atomically by using the -:meth:`~mongoengine.queryset.QuerySet.update_one` and -:meth:`~mongoengine.queryset.QuerySet.update` methods on a -:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers" -that you may use with these methods: - -* ``set`` -- set a particular value -* ``unset`` -- delete a particular value (since MongoDB v1.3+) -* ``inc`` -- increment a value by a given amount -* ``dec`` -- decrement a value by a given amount -* ``pop`` -- remove the last item from a list -* ``push`` -- append a value to a list -* ``push_all`` -- append several values to a list -* ``pop`` -- remove the first or last element of a list -* ``pull`` -- remove a value from a list -* ``pull_all`` -- remove several values from a list -* ``add_to_set`` -- add value to a list only if its not in the list already - -The syntax for atomic updates is similar to the querying syntax, but the -modifier comes before the field, not after it:: - - >>> post = BlogPost(title='Test', page_views=0, tags=['database']) - >>> post.save() - >>> BlogPost.objects(id=post.id).update_one(inc__page_views=1) - >>> post.reload() # the document has been changed, so we need to reload it - >>> post.page_views - 1 - >>> BlogPost.objects(id=post.id).update_one(set__title='Example Post') - >>> post.reload() - >>> post.title - 'Example Post' - >>> BlogPost.objects(id=post.id).update_one(push__tags='nosql') - >>> post.reload() - >>> post.tags - ['database', 'nosql'] - -The positional operator allows you to update list items without knowing the -index position, therefore making the update a single atomic operation. As we -cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: - - >>> post = BlogPost(title='Test', page_views=0, tags=['database', 'mongo']) - >>> post.save() - >>> BlogPost.objects(id=post.id, tags='mongo').update(set__tags__S='mongodb') - >>> post.reload() - >>> post.tags - ['database', 'mongodb'] - -.. note :: - Currently only top level lists are handled, future versions of mongodb / - pymongo plan to support nested positional operators. See `The $ positional - operator `_. diff --git a/docs/guide/signals.rst b/docs/guide/signals.rst index 3c3159f8..58b3d6ed 100644 --- a/docs/guide/signals.rst +++ b/docs/guide/signals.rst @@ -41,9 +41,9 @@ Example usage:: logging.debug("Created") else: logging.debug("Updated") - + signals.pre_save.connect(Author.pre_save, sender=Author) signals.post_save.connect(Author.post_save, sender=Author) -.. _blinker: http://pypi.python.org/pypi/blinker \ No newline at end of file +.. _blinker: http://pypi.python.org/pypi/blinker diff --git a/docs/index.rst b/docs/index.rst index 3b036564..920ddf60 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,35 +2,62 @@ MongoEngine User Documentation ============================== -MongoEngine is an Object-Document Mapper, written in Python for working with +**MongoEngine** is an Object-Document Mapper, written in Python for working with MongoDB. To install it, simply run .. code-block:: console # pip install -U mongoengine -The source is available on `GitHub `_. +:doc:`tutorial` + Start here for a quick overview. + +:doc:`guide/index` + The Full guide to MongoEngine + +:doc:`apireference` + The complete API documentation. + +:doc:`django` + Using MongoEngine and Django + +Community +--------- To get help with using MongoEngine, use the `MongoEngine Users mailing list `_ or come chat on the `#mongoengine IRC channel `_. -If you are interested in contributing, join the developers' `mailing list +Contributing +------------ + +The source is available on `GitHub `_ and +contributions are always encouraged. Contributions can be as simple as +minor tweaks to this documentation. To contribute, fork the project on +`GitHub `_ and send a +pull request. + +Also, you can join the developers' `mailing list `_. +Changes +------- +See the :doc:`changelog` for a full list of changes to MongoEngine. + .. toctree:: - :maxdepth: 2 + :hidden: tutorial guide/index apireference django changelog - upgrading + upgrade Indices and tables -================== +------------------ * :ref:`genindex` +* :ref:`modindex` * :ref:`search` diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 7187adcf..c684c1ad 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -5,24 +5,33 @@ Upgrading 0.4 to 0.5 =========== -There have been the following backwards incompatibilities from 0.4 to 0.5: +There have been the following backwards incompatibilities from 0.4 to 0.5. The +main areas of changed are: choices in fields, map_reduce and collection names. -# Choice options: +Choice options: +-------------- Are now expected to be an iterable of tuples, with the first element in each tuple being the actual value to be stored. The second element is the human-readable name for the option. -# PyMongo / MongoDB -map reduce now requires pymongo 1.11+ More methods now use map_reduce as db.eval -is not supported for sharding - the following have been changed: +PyMongo / MongoDB +----------------- - * sum - * average - * item_frequencies +map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output +parameters, have been depreciated. -#. Default collection naming. +More methods now use map_reduce as db.eval is not supported for sharding as such +the following have been changed: + + * :meth:`~mongoengine.queryset.QuerySet.sum` + * :meth:`~mongoengine.queryset.QuerySet.average` + * :meth:`~mongoengine.queryset.QuerySet.item_frequencies` + + +Default collection naming +------------------------- Previously it was just lowercase, its now much more pythonic and readable as its lowercase and underscores, previously :: diff --git a/mongoengine/queryset.py b/mongoengine/queryset.py index a830150a..a6626855 100644 --- a/mongoengine/queryset.py +++ b/mongoengine/queryset.py @@ -744,7 +744,7 @@ class QuerySet(object): :param write_options: optional extra keyword arguments used if we have to create a new document. - Passes any write_options onto :meth:`~mongoengine.document.Document.save` + Passes any write_options onto :meth:`~mongoengine.Document.save` .. versionadded:: 0.3 """ @@ -901,9 +901,11 @@ class QuerySet(object): Returns an iterator yielding :class:`~mongoengine.document.MapReduceDocument`. - .. note:: Map/Reduce changed in server version **>= 1.7.4**. The PyMongo - :meth:`~pymongo.collection.Collection.map_reduce` helper requires - PyMongo version **>= 1.11**. + .. note:: + + Map/Reduce changed in server version **>= 1.7.4**. The PyMongo + :meth:`~pymongo.collection.Collection.map_reduce` helper requires + PyMongo version **>= 1.11**. .. versionchanged:: 0.5 - removed ``keep_temp`` keyword argument, which was only relevant @@ -1070,8 +1072,7 @@ class QuerySet(object): and `.exclude()` to manipulate which fields to retrieve. Fields also allows for a greater level of control for example: - Retrieving a Subrange of Array Elements - --------------------------------------- + Retrieving a Subrange of Array Elements: You can use the $slice operator to retrieve a subrange of elements in an array :: @@ -1500,6 +1501,7 @@ class QuerySet(object): This is useful for generating tag clouds, or searching documents. .. note:: + Can only do direct simple mappings and cannot map across :class:`~mongoengine.ReferenceField` or :class:`~mongoengine.GenericReferenceField` for more complex From ee7d370751dd93ac80cd2084f0269d377e1d79bd Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 9 Sep 2011 05:52:43 -0700 Subject: [PATCH 210/214] Bumped the version --- docs/conf.py | 2 +- mongoengine/__init__.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 2541f49a..03ba047f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -38,7 +38,7 @@ master_doc = 'index' # General information about the project. project = u'MongoEngine' -copyright = u'2009-2010, Harry Marr' +copyright = u'2009-2011, Harry Marr' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index de635f96..0d271783 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -14,7 +14,8 @@ __all__ = (document.__all__ + fields.__all__ + connection.__all__ + __author__ = 'Harry Marr' -VERSION = (0, 4, 0) +VERSION = (0, 4, 1) + def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) @@ -23,4 +24,3 @@ def get_version(): return version __version__ = get_version() - From b8a5791de6148e22785c4f75d55ecca96b764807 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 9 Sep 2011 14:33:27 +0100 Subject: [PATCH 211/214] Updates to documents [#245] --- docs/guide/defining-documents.rst | 1 + mongoengine/fields.py | 12 ++++++++++-- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 00a7d090..fd005e40 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -50,6 +50,7 @@ are as follows: * :class:`~mongoengine.ReferenceField` * :class:`~mongoengine.GenericReferenceField` * :class:`~mongoengine.EmbeddedDocumentField` +* :class:`~mongoengine.GenericEmbeddedDocumentField` * :class:`~mongoengine.BooleanField` * :class:`~mongoengine.FileField` * :class:`~mongoengine.BinaryField` diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 9bb414bd..c5734430 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -374,8 +374,8 @@ class ComplexDateTimeField(StringField): class EmbeddedDocumentField(BaseField): - """An embedded document field. Only valid values are subclasses of - :class:`~mongoengine.EmbeddedDocument`. + """An embedded document field - with a declared document_type. + Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`. """ def __init__(self, document_type, **kwargs): @@ -421,7 +421,14 @@ class EmbeddedDocumentField(BaseField): def prepare_query_value(self, op, value): return self.to_mongo(value) + class GenericEmbeddedDocumentField(BaseField): + """A generic embedded document field - allows any + :class:`~mongoengine.EmbeddedDocument` to be stored. + + Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`. + """ + def prepare_query_value(self, op, value): return self.to_mongo(value) @@ -448,6 +455,7 @@ class GenericEmbeddedDocumentField(BaseField): data['_cls'] = document._class_name return data + class ListField(ComplexBaseField): """A list field that wraps a standard field, allowing multiple instances of the field to be used as a list in the database. From 60f0491f6230a3aa71fdee340c0f8f1cdab3bd0d Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 9 Sep 2011 17:35:44 +0100 Subject: [PATCH 212/214] Updated changelog --- docs/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 3a2a2c43..1073ba41 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,6 +5,7 @@ Changelog Changes in dev ============== +- Added GenericEmbeddedDocument - So you can embed any type of embeddable document - Added within_polygon support - for those with mongodb 1.9 - Updated sum / average to use map_reduce as db.eval doesn't work in sharded environments - Added where() - filter to allowing users to specify query expressions as Javascript From 050542c29b420128b0e89f6dfb1cf54d6b03b698 Mon Sep 17 00:00:00 2001 From: Ross Lawley Date: Fri, 9 Sep 2011 17:36:40 +0100 Subject: [PATCH 213/214] Added InvalidDocumentError Ensures defined documents are valid and users don't override core methods by accident. fixes #275 --- docs/changelog.rst | 3 ++- mongoengine/base.py | 8 ++++++++ tests/document.py | 11 ++++++++++- 3 files changed, 20 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 1073ba41..04235db6 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,7 +5,8 @@ Changelog Changes in dev ============== -- Added GenericEmbeddedDocument - So you can embed any type of embeddable document +- Added InvalidDocumentError - so Document core methods can't be overwritten +- Added GenericEmbeddedDocument - so you can embed any type of embeddable document - Added within_polygon support - for those with mongodb 1.9 - Updated sum / average to use map_reduce as db.eval doesn't work in sharded environments - Added where() - filter to allowing users to specify query expressions as Javascript diff --git a/mongoengine/base.py b/mongoengine/base.py index 6a94670e..c4bcee1e 100644 --- a/mongoengine/base.py +++ b/mongoengine/base.py @@ -16,6 +16,9 @@ class NotRegistered(Exception): pass +class InvalidDocumentError(Exception): + pass + class ValidationError(Exception): pass @@ -388,6 +391,8 @@ class DocumentMetaclass(type): attrs['_db_field_map'] = dict([(k, v.db_field) for k, v in doc_fields.items() if k!=v.db_field]) attrs['_reverse_db_field_map'] = dict([(v, k) for k, v in attrs['_db_field_map'].items()]) + from mongoengine import Document + new_class = super_new(cls, name, bases, attrs) for field in new_class._fields.values(): field.owner_document = new_class @@ -396,6 +401,9 @@ class DocumentMetaclass(type): field.document_type.register_delete_rule(new_class, field.name, delete_rule) + if field.name and hasattr(Document, field.name): + raise InvalidDocumentError("%s is a document method and not a valid field name" % field.name) + module = attrs.get('__module__') base_excs = tuple(base.DoesNotExist for base in bases diff --git a/tests/document.py b/tests/document.py index b76b6f92..95f37748 100644 --- a/tests/document.py +++ b/tests/document.py @@ -12,7 +12,7 @@ import weakref from fixtures import Base, Mixin, PickleEmbedded, PickleTest from mongoengine import * -from mongoengine.base import _document_registry, NotRegistered +from mongoengine.base import _document_registry, NotRegistered, InvalidDocumentError from mongoengine.connection import _get_db @@ -2336,6 +2336,15 @@ class DocumentTest(unittest.TestCase): pickle_doc.reload() self.assertEquals(resurrected, pickle_doc) + def throw_invalid_document_error(self): + + # test handles people trying to upsert + def throw_invalid_document_error(): + class Blog(Document): + validate = DictField() + + self.assertRaises(InvalidDocumentError, throw_invalid_document_error) + if __name__ == '__main__': unittest.main() From 88b1a2971944e9e762910bd583fef56da2cc82a6 Mon Sep 17 00:00:00 2001 From: Harry Marr Date: Sat, 10 Sep 2011 11:54:43 +0200 Subject: [PATCH 214/214] Typo fix --- docs/guide/querying.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 13e11106..13a374cc 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -124,7 +124,7 @@ the $ positional operator :: Post.objects(comments__by="joe").update(**{'inc__comments__$__votes': 1}) -However, this doesn't map well to the syntax so you can alos use a capital S instead :: +However, this doesn't map well to the syntax so you can also use a capital S instead :: Post.objects(comments__by="joe").update(inc__comments__S__votes=1)