diff --git a/.travis.yml b/.travis.yml index 78a9f787..381f7385 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,9 +19,7 @@ python: - pypy env: -- MONGODB=2.6 PYMONGO=2.7 -- MONGODB=2.6 PYMONGO=2.8 -- MONGODB=2.6 PYMONGO=3.0 +- MONGODB=2.6 PYMONGO=3.x matrix: # Finish the build as soon as one job fails @@ -29,21 +27,17 @@ matrix: include: - python: 2.7 - env: MONGODB=2.4 PYMONGO=2.7 + env: MONGODB=2.4 PYMONGO=3.5 - python: 2.7 - env: MONGODB=2.4 PYMONGO=3.0 - - python: 2.7 - env: MONGODB=3.0 PYMONGO=3.0 + env: MONGODB=3.0 PYMONGO=3.x - python: 3.5 - env: MONGODB=2.4 PYMONGO=2.7 + env: MONGODB=2.4 PYMONGO=3.5 - python: 3.5 - env: MONGODB=2.4 PYMONGO=3.0 - - python: 3.5 - env: MONGODB=3.0 PYMONGO=3.0 + env: MONGODB=3.0 PYMONGO=3.x - python: 3.6 - env: MONGODB=2.4 PYMONGO=3.0 + env: MONGODB=2.4 PYMONGO=3.5 - python: 3.6 - env: MONGODB=3.0 PYMONGO=3.0 + env: MONGODB=3.0 PYMONGO=3.x before_install: - bash .install_mongodb_on_travis.sh @@ -97,11 +91,11 @@ deploy: distributions: "sdist bdist_wheel" # only deploy on tagged commits (aka GitHub releases) and only for the - # parent repo's builds running Python 2.7 along with PyMongo v3.0 (we run + # parent repo's builds running Python 2.7 along with PyMongo v3.x (we run # Travis against many different Python and PyMongo versions and we don't # want the deploy to occur multiple times). on: tags: true repo: MongoEngine/mongoengine - condition: "$PYMONGO = 3.0" + condition: "$PYMONGO = 3.x" python: 2.7 diff --git a/AUTHORS b/AUTHORS index 4eac5eb2..b38825dc 100644 --- a/AUTHORS +++ b/AUTHORS @@ -244,4 +244,6 @@ that much better: * Stanislav Kaledin (https://github.com/sallyruthstruik) * Dmitry Yantsen (https://github.com/mrTable) * Renjianxin (https://github.com/Davidrjx) - * Erdenezul Batmunkh (https://github.com/erdenezul) \ No newline at end of file + * Erdenezul Batmunkh (https://github.com/erdenezul) + * Andy Yankovsky (https://github.com/werat) + * Bastien Gérard (https://github.com/bagerard) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 573d7060..f7b15c85 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,8 +22,11 @@ Supported Interpreters MongoEngine supports CPython 2.7 and newer. Language features not supported by all interpreters can not be used. -Please also ensure that your code is properly converted by -`2to3 `_ for Python 3 support. +The codebase is written in python 2 so you must be using python 2 +when developing new features. Compatibility of the library with Python 3 +relies on the 2to3 package that gets executed as part of the installation +build. You should ensure that your code is properly converted by +`2to3 `_. Style Guide ----------- diff --git a/docs/apireference.rst b/docs/apireference.rst index 625d4a8b..05ba3f73 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -87,7 +87,9 @@ Fields .. autoclass:: mongoengine.fields.DictField .. autoclass:: mongoengine.fields.MapField .. autoclass:: mongoengine.fields.ReferenceField +.. autoclass:: mongoengine.fields.LazyReferenceField .. autoclass:: mongoengine.fields.GenericReferenceField +.. autoclass:: mongoengine.fields.GenericLazyReferenceField .. autoclass:: mongoengine.fields.CachedReferenceField .. autoclass:: mongoengine.fields.BinaryField .. autoclass:: mongoengine.fields.FileField diff --git a/docs/changelog.rst b/docs/changelog.rst index f04ab314..9d9fa976 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,9 +2,27 @@ Changelog ========= -Development -=========== -- (Fill this out as you fix issues and develop your features). +Changes in 0.15.4 +================= +- Added `DateField` #513 + +Changes in 0.15.3 +================= +- Subfield resolve error in generic_emdedded_document query #1651 #1652 +- use each modifier only with $position #1673 #1675 +- Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704 +- Fix validation error instance in GenericEmbeddedDocumentField #1067 +- Update cached fields when fields argument is given #1712 +- Add a db parameter to register_connection for compatibility with connect +- Use insert_one, insert_many in Document.insert #1491 +- Use new update_one, update_many on document/queryset update #1491 +- Use insert_one, insert_many in Document.insert #1491 +- Fix reload(fields) affect changed fields #1371 +- Fix Read-only access to database fails when trying to create indexes #1338 + +Changes in 0.15.0 +================= +- Add LazyReferenceField and GenericLazyReferenceField to address #1230 Changes in 0.14.1 ================= diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index f40ed4c5..5dac6ae9 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -18,10 +18,10 @@ provide the :attr:`host` and :attr:`port` arguments to connect('project1', host='192.168.1.35', port=12345) -If the database requires authentication, :attr:`username` and :attr:`password` -arguments should be provided:: +If the database requires authentication, :attr:`username`, :attr:`password` +and :attr:`authentication_source` arguments should be provided:: - connect('project1', username='webapp', password='pwd123') + connect('project1', username='webapp', password='pwd123', authentication_source='admin') URI style connections are also supported -- just supply the URI as the :attr:`host` to diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index d41ae7e6..2a8d5418 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -22,7 +22,7 @@ objects** as class attributes to the document class:: class Page(Document): title = StringField(max_length=200, required=True) - date_modified = DateTimeField(default=datetime.datetime.now) + date_modified = DateTimeField(default=datetime.datetime.utcnow) As BSON (the binary format for storing data in mongodb) is order dependent, documents are serialized based on their field order. @@ -80,6 +80,7 @@ are as follows: * :class:`~mongoengine.fields.FloatField` * :class:`~mongoengine.fields.GenericEmbeddedDocumentField` * :class:`~mongoengine.fields.GenericReferenceField` +* :class:`~mongoengine.fields.GenericLazyReferenceField` * :class:`~mongoengine.fields.GeoPointField` * :class:`~mongoengine.fields.ImageField` * :class:`~mongoengine.fields.IntField` @@ -87,6 +88,7 @@ are as follows: * :class:`~mongoengine.fields.MapField` * :class:`~mongoengine.fields.ObjectIdField` * :class:`~mongoengine.fields.ReferenceField` +* :class:`~mongoengine.fields.LazyReferenceField` * :class:`~mongoengine.fields.SequenceField` * :class:`~mongoengine.fields.SortedListField` * :class:`~mongoengine.fields.StringField` @@ -224,7 +226,7 @@ store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate user = ReferenceField(User) answers = DictField() - survey_response = SurveyResponse(date=datetime.now(), user=request.user) + survey_response = SurveyResponse(date=datetime.utcnow(), user=request.user) response_form = ResponseForm(request.POST) survey_response.answers = response_form.cleaned_data() survey_response.save() @@ -526,8 +528,9 @@ There are a few top level defaults for all indexes that can be set:: meta = { 'index_options': {}, 'index_background': True, + 'index_cls': False, + 'auto_create_index': True, 'index_drop_dups': True, - 'index_cls': False } @@ -540,6 +543,12 @@ There are a few top level defaults for all indexes that can be set:: :attr:`index_cls` (Optional) A way to turn off a specific index for _cls. +:attr:`auto_create_index` (Optional) + When this is True (default), MongoEngine will ensure that the correct + indexes exist in MongoDB each time a command is run. This can be disabled + in systems where indexes are managed separately. Disabling this will improve + performance. + :attr:`index_drop_dups` (Optional) Set the default value for if an index should drop duplicates @@ -618,7 +627,7 @@ collection after a given period. See the official documentation for more information. A common usecase might be session data:: class Session(Document): - created = DateTimeField(default=datetime.now) + created = DateTimeField(default=datetime.utcnow) meta = { 'indexes': [ {'fields': ['created'], 'expireAfterSeconds': 3600} diff --git a/docs/guide/gridfs.rst b/docs/guide/gridfs.rst index 68e7a6d2..f7380e89 100644 --- a/docs/guide/gridfs.rst +++ b/docs/guide/gridfs.rst @@ -53,7 +53,8 @@ Deletion Deleting stored files is achieved with the :func:`delete` method:: - marmot.photo.delete() + marmot.photo.delete() # Deletes the GridFS document + marmot.save() # Saves the GridFS reference (being None) contained in the marmot instance .. warning:: @@ -71,4 +72,5 @@ Files can be replaced with the :func:`replace` method. This works just like the :func:`put` method so even metadata can (and should) be replaced:: another_marmot = open('another_marmot.png', 'rb') - marmot.photo.replace(another_marmot, content_type='image/png') + marmot.photo.replace(another_marmot, content_type='image/png') # Replaces the GridFS document + marmot.save() # Replaces the GridFS reference contained in marmot instance diff --git a/docs/guide/signals.rst b/docs/guide/signals.rst index 30277966..06bccb3b 100644 --- a/docs/guide/signals.rst +++ b/docs/guide/signals.rst @@ -43,10 +43,10 @@ Available signals include: has taken place but before saving. `post_save` - Called within :meth:`~mongoengine.Document.save` after all actions - (validation, insert/update, cascades, clearing dirty flags) have completed - successfully. Passed the additional boolean keyword argument `created` to - indicate if the save was an insert or an update. + Called within :meth:`~mongoengine.Document.save` after most actions + (validation, insert/update, and cascades, but not clearing dirty flags) have + completed successfully. Passed the additional boolean keyword argument + `created` to indicate if the save was an insert or an update. `pre_delete` Called within :meth:`~mongoengine.Document.delete` prior to @@ -113,6 +113,10 @@ handlers within your subclass:: signals.pre_save.connect(Author.pre_save, sender=Author) signals.post_save.connect(Author.post_save, sender=Author) +.. warning:: + + Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently. + Finally, you can also use this small decorator to quickly create a number of signals and attach them to your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocument` subclasses as class decorators:: diff --git a/docs/guide/text-indexes.rst b/docs/guide/text-indexes.rst index 725ad369..92a4471a 100644 --- a/docs/guide/text-indexes.rst +++ b/docs/guide/text-indexes.rst @@ -48,4 +48,4 @@ Ordering by text score :: - objects = News.objects.search('mongo').order_by('$text_score') + objects = News.objects.search_text('mongo').order_by('$text_score') diff --git a/docs/tutorial.rst b/docs/tutorial.rst index cc5b647d..bcd0d17f 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -86,7 +86,7 @@ of them stand out as particularly intuitive solutions. Posts ^^^^^ -Happily mongoDB *isn't* a relational database, so we're not going to do it that +Happily MongoDB *isn't* a relational database, so we're not going to do it that way. As it turns out, we can use MongoDB's schemaless nature to provide us with a much nicer solution. We will store all of the posts in *one collection* and each post type will only store the fields it needs. If we later want to add @@ -153,7 +153,7 @@ post. This works, but there is no real reason to be storing the comments separately from their associated posts, other than to work around the relational model. Using MongoDB we can store the comments as a list of *embedded documents* directly on a post document. An embedded document should -be treated no differently that a regular document; it just doesn't have its own +be treated no differently than a regular document; it just doesn't have its own collection in the database. Using MongoEngine, we can define the structure of embedded documents, along with utility methods, in exactly the same way we do with regular documents:: diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index c7c6f707..e6dc6b9d 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -23,7 +23,7 @@ __all__ = (list(document.__all__) + list(fields.__all__) + list(signals.__all__) + list(errors.__all__)) -VERSION = (0, 14, 3) +VERSION = (0, 15, 3) def get_version(): diff --git a/mongoengine/base/__init__.py b/mongoengine/base/__init__.py index da31b922..e069a147 100644 --- a/mongoengine/base/__init__.py +++ b/mongoengine/base/__init__.py @@ -15,7 +15,7 @@ __all__ = ( 'UPDATE_OPERATORS', '_document_registry', 'get_document', # datastructures - 'BaseDict', 'BaseList', 'EmbeddedDocumentList', + 'BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference', # document 'BaseDocument', diff --git a/mongoengine/base/common.py b/mongoengine/base/common.py index b9971ff7..f80471ef 100644 --- a/mongoengine/base/common.py +++ b/mongoengine/base/common.py @@ -3,9 +3,10 @@ from mongoengine.errors import NotRegistered __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') -UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push', - 'push_all', 'pull', 'pull_all', 'add_to_set', - 'set_on_insert', 'min', 'max', 'rename']) +UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'mul', + 'pop', 'push', 'push_all', 'pull', + 'pull_all', 'add_to_set', 'set_on_insert', + 'min', 'max', 'rename']) _document_registry = {} diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index 14fe95e9..db292f14 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -1,12 +1,13 @@ import itertools import weakref +from bson import DBRef import six from mongoengine.common import _import_class from mongoengine.errors import DoesNotExist, MultipleObjectsReturned -__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList') +__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference') class BaseDict(dict): @@ -127,8 +128,8 @@ class BaseList(list): return value def __iter__(self): - for i in six.moves.range(self.__len__()): - yield self[i] + for v in super(BaseList, self).__iter__(): + yield v def __setitem__(self, key, value, *args, **kwargs): if isinstance(key, slice): @@ -137,7 +138,7 @@ class BaseList(list): self._mark_as_changed(key) return super(BaseList, self).__setitem__(key, value) - def __delitem__(self, key, *args, **kwargs): + def __delitem__(self, key): self._mark_as_changed() return super(BaseList, self).__delitem__(key) @@ -186,7 +187,7 @@ class BaseList(list): self._mark_as_changed() return super(BaseList, self).remove(*args, **kwargs) - def reverse(self, *args, **kwargs): + def reverse(self): self._mark_as_changed() return super(BaseList, self).reverse() @@ -233,6 +234,9 @@ class EmbeddedDocumentList(BaseList): Filters the list by only including embedded documents with the given keyword arguments. + This method only supports simple comparison (e.g: .filter(name='John Doe')) + and does not support operators like __gte, __lte, __icontains like queryset.filter does + :param kwargs: The keyword arguments corresponding to the fields to filter on. *Multiple arguments are treated as if they are ANDed together.* @@ -350,7 +354,8 @@ class EmbeddedDocumentList(BaseList): def update(self, **update): """ - Updates the embedded documents with the given update values. + Updates the embedded documents with the given replacement values. This + function does not support mongoDB update operators such as ``inc__``. .. note:: The embedded document changes are not automatically saved @@ -445,3 +450,42 @@ class StrictDict(object): cls._classes[allowed_keys] = SpecificStrictDict return cls._classes[allowed_keys] + + +class LazyReference(DBRef): + __slots__ = ('_cached_doc', 'passthrough', 'document_type') + + def fetch(self, force=False): + if not self._cached_doc or force: + self._cached_doc = self.document_type.objects.get(pk=self.pk) + if not self._cached_doc: + raise DoesNotExist('Trying to dereference unknown document %s' % (self)) + return self._cached_doc + + @property + def pk(self): + return self.id + + def __init__(self, document_type, pk, cached_doc=None, passthrough=False): + self.document_type = document_type + self._cached_doc = cached_doc + self.passthrough = passthrough + super(LazyReference, self).__init__(self.document_type._get_collection_name(), pk) + + def __getitem__(self, name): + if not self.passthrough: + raise KeyError() + document = self.fetch() + return document[name] + + def __getattr__(self, name): + if not object.__getattribute__(self, 'passthrough'): + raise AttributeError() + document = self.fetch() + try: + return document[name] + except KeyError: + raise AttributeError() + + def __repr__(self): + return "" % (self.document_type, self.pk) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index f8ab73d0..85906a3e 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -13,6 +13,7 @@ from mongoengine import signals from mongoengine.base.common import get_document from mongoengine.base.datastructures import (BaseDict, BaseList, EmbeddedDocumentList, + LazyReference, StrictDict) from mongoengine.base.fields import ComplexBaseField from mongoengine.common import _import_class @@ -99,13 +100,11 @@ class BaseDocument(object): for key, value in values.iteritems(): if key in self._fields or key == '_id': setattr(self, key, value) - elif self._dynamic: + else: dynamic_data[key] = value else: FileField = _import_class('FileField') for key, value in values.iteritems(): - if key == '__auto_convert': - continue key = self._reverse_db_field_map.get(key, key) if key in self._fields or key in ('id', 'pk', '_cls'): if __auto_convert and value is not None: @@ -146,7 +145,7 @@ class BaseDocument(object): if not hasattr(self, name) and not name.startswith('_'): DynamicField = _import_class('DynamicField') - field = DynamicField(db_field=name) + field = DynamicField(db_field=name, null=True) field.name = name self._dynamic_fields[name] = field self._fields_ordered += (name,) @@ -336,7 +335,7 @@ class BaseDocument(object): value = field.generate() self._data[field_name] = value - if value is not None: + if (value is not None) or (field.null): if use_db_field: data[field.db_field] = value else: @@ -405,7 +404,15 @@ class BaseDocument(object): @classmethod def from_json(cls, json_data, created=False): - """Converts json data to an unsaved document instance""" + """Converts json data to a Document instance + + :param json_data: The json data to load into the Document + :param created: If True, the document will be considered as a brand new document + If False and an id is provided, it will consider that the data being + loaded corresponds to what's already in the database (This has an impact of subsequent call to .save()) + If False and no id is provided, it will consider the data as a new document + (default ``False``) + """ return cls._from_son(json_util.loads(json_data), created=created) def __expand_dynamic_values(self, name, value): @@ -488,7 +495,7 @@ class BaseDocument(object): else: data = getattr(data, part, None) - if hasattr(data, '_changed_fields'): + if not isinstance(data, LazyReference) and hasattr(data, '_changed_fields'): if getattr(data, '_is_document', False): continue @@ -1079,5 +1086,11 @@ class BaseDocument(object): """Return the display value for a choice field""" value = getattr(self, field.name) if field.choices and isinstance(field.choices[0], (list, tuple)): - return dict(field.choices).get(value, value) + if value is None: + return None + sep = getattr(field, 'display_sep', ' ') + values = value if field.__class__.__name__ in ('ListField', 'SortedListField') else [value] + return sep.join([ + dict(field.choices).get(val, val) + for val in values or []]) return value diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index e2b5d321..69034d5d 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -213,8 +213,10 @@ class BaseField(object): ) ) # Choices which are types other than Documents - elif value not in choice_list: - self.error('Value must be one of %s' % six.text_type(choice_list)) + else: + values = value if isinstance(value, (list, tuple)) else [value] + if len(set(values) - set(choice_list)): + self.error('Value must be one of %s' % six.text_type(choice_list)) def _validate(self, value, **kwargs): # Check the Choices Constraint diff --git a/mongoengine/base/utils.py b/mongoengine/base/utils.py new file mode 100644 index 00000000..288c2f3e --- /dev/null +++ b/mongoengine/base/utils.py @@ -0,0 +1,22 @@ +import re + + +class LazyRegexCompiler(object): + """Descriptor to allow lazy compilation of regex""" + + def __init__(self, pattern, flags=0): + self._pattern = pattern + self._flags = flags + self._compiled_regex = None + + @property + def compiled_regex(self): + if self._compiled_regex is None: + self._compiled_regex = re.compile(self._pattern, self._flags) + return self._compiled_regex + + def __get__(self, obj, objtype): + return self.compiled_regex + + def __set__(self, instance, value): + raise AttributeError("Can not set attribute LazyRegexCompiler") diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 34ff4dc3..705dc25b 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -28,7 +28,7 @@ _connections = {} _dbs = {} -def register_connection(alias, name=None, host=None, port=None, +def register_connection(alias, db=None, name=None, host=None, port=None, read_preference=READ_PREFERENCE, username=None, password=None, authentication_source=None, @@ -39,6 +39,7 @@ def register_connection(alias, name=None, host=None, port=None, :param alias: the name that will be used to refer to this connection throughout MongoEngine :param name: the name of the specific database to use + :param db: the name of the database to use, for compatibility with connect :param host: the host name of the :program:`mongod` instance to connect to :param port: the port that the :program:`mongod` instance is running on :param read_preference: The read preference for the collection @@ -58,7 +59,7 @@ def register_connection(alias, name=None, host=None, port=None, .. versionchanged:: 0.10.6 - added mongomock support """ conn_settings = { - 'name': name or 'test', + 'name': name or db or 'test', 'host': host or 'localhost', 'port': port or 27017, 'read_preference': read_preference, diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index c477575e..ec2e9e8b 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -1,9 +1,11 @@ +from contextlib import contextmanager +from pymongo.write_concern import WriteConcern from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db __all__ = ('switch_db', 'switch_collection', 'no_dereference', - 'no_sub_classes', 'query_counter') + 'no_sub_classes', 'query_counter', 'set_write_concern') class switch_db(object): @@ -215,3 +217,10 @@ class query_counter(object): count = self.db.system.profile.find(ignore_query).count() - self.counter self.counter += 1 return count + + +@contextmanager +def set_write_concern(collection, write_concerns): + combined_concerns = dict(collection.write_concern.document.items()) + combined_concerns.update(write_concerns) + yield collection.with_options(write_concern=WriteConcern(**combined_concerns)) diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 59204d4d..40bc72b2 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -3,6 +3,7 @@ import six from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, TopLevelDocumentMetaclass, get_document) +from mongoengine.base.datastructures import LazyReference from mongoengine.connection import get_db from mongoengine.document import Document, EmbeddedDocument from mongoengine.fields import DictField, ListField, MapField, ReferenceField @@ -99,7 +100,10 @@ class DeReference(object): if isinstance(item, (Document, EmbeddedDocument)): for field_name, field in item._fields.iteritems(): v = item._data.get(field_name, None) - if isinstance(v, DBRef): + if isinstance(v, LazyReference): + # LazyReference inherits DBRef but should not be dereferenced here ! + continue + elif isinstance(v, DBRef): reference_map.setdefault(field.document_type, set()).add(v.id) elif isinstance(v, (dict, SON)) and '_ref' in v: reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id) @@ -110,6 +114,9 @@ class DeReference(object): if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): key = field_cls reference_map.setdefault(key, set()).update(refs) + elif isinstance(item, LazyReference): + # LazyReference inherits DBRef but should not be dereferenced here ! + continue elif isinstance(item, DBRef): reference_map.setdefault(item.collection, set()).add(item.id) elif isinstance(item, (dict, SON)) and '_ref' in item: @@ -126,7 +133,12 @@ class DeReference(object): """ object_map = {} for collection, dbrefs in self.reference_map.iteritems(): - if hasattr(collection, 'objects'): # We have a document class for the refs + + # we use getattr instead of hasattr because as hasattr swallows any exception under python2 + # so it could hide nasty things without raising exceptions (cfr bug #1688)) + ref_document_cls_exists = (getattr(collection, 'objects', None) is not None) + + if ref_document_cls_exists: col_name = collection._get_collection_name() refs = [dbref for dbref in dbrefs if (col_name, dbref) not in object_map] @@ -230,7 +242,7 @@ class DeReference(object): elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: item_name = '%s.%s' % (name, k) if name else name data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name) - elif hasattr(v, 'id'): + elif isinstance(v, DBRef) and hasattr(v, 'id'): data[k] = self.object_map.get((v.collection, v.id), v) if instance and name: diff --git a/mongoengine/document.py b/mongoengine/document.py index f1622934..25af273d 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -195,7 +195,10 @@ class Document(BaseDocument): # Ensure indexes on the collection unless auto_create_index was # set to False. - if cls._meta.get('auto_create_index', True): + # Also there is no need to ensure indexes on slave. + db = cls._get_db() + if cls._meta.get('auto_create_index', True) and\ + db.client.is_primary: cls.ensure_indexes() return cls._collection @@ -280,6 +283,9 @@ class Document(BaseDocument): elif query[id_field] != self.pk: raise InvalidQueryError('Invalid document modify query: it must modify only this document.') + # Need to add shard key to query, or you get an error + query.update(self._object_key) + updated = self._qs(**query).modify(new=True, **update) if updated is None: return False @@ -576,12 +582,11 @@ class Document(BaseDocument): """Delete the :class:`~mongoengine.Document` from the database. This will only take effect if the document has been previously saved. - :parm signal_kwargs: (optional) kwargs dictionary to be passed to + :param signal_kwargs: (optional) kwargs dictionary to be passed to the signal calls. :param write_concern: Extra keyword arguments are passed down which - will be used as options for the resultant - ``getLastError`` command. For example, - ``save(..., write_concern={w: 2, fsync: True}, ...)`` will + will be used as options for the resultant ``getLastError`` command. + For example, ``save(..., w: 2, fsync: True)`` will wait until at least two servers have recorded the write and will force an fsync on the primary server. @@ -702,7 +707,6 @@ class Document(BaseDocument): obj = obj[0] else: raise self.DoesNotExist('Document does not exist') - for field in obj._data: if not fields or field in fields: try: @@ -710,7 +714,7 @@ class Document(BaseDocument): except (KeyError, AttributeError): try: # If field is a special field, e.g. items is stored as _reserved_items, - # an KeyError is thrown. So try to retrieve the field from _data + # a KeyError is thrown. So try to retrieve the field from _data setattr(self, field, self._reload(field, obj._data.get(field))) except KeyError: # If field is removed from the database while the object @@ -718,7 +722,9 @@ class Document(BaseDocument): # i.e. obj.update(unset__field=1) followed by obj.reload() delattr(self, field) - self._changed_fields = obj._changed_fields + self._changed_fields = list( + set(self._changed_fields) - set(fields) + ) if fields else obj._changed_fields self._created = False return self @@ -964,8 +970,16 @@ class Document(BaseDocument): """ required = cls.list_indexes() - existing = [info['key'] - for info in cls._get_collection().index_information().values()] + + existing = [] + for info in cls._get_collection().index_information().values(): + if '_fts' in info['key'][0]: + index_type = info['key'][0][1] + text_index_fields = info.get('weights').keys() + existing.append( + [(key, index_type) for key in text_index_fields]) + else: + existing.append(info['key']) missing = [index for index in required if index not in existing] extra = [index for index in existing if index not in required] @@ -985,7 +999,7 @@ class Document(BaseDocument): class DynamicDocument(Document): """A Dynamic Document class allowing flexible, expandable and uncontrolled schemas. As a :class:`~mongoengine.Document` subclass, acts in the same - way as an ordinary document but has expando style properties. Any data + way as an ordinary document but has expanded style properties. Any data passed or set against the :class:`~mongoengine.DynamicDocument` that is not a field is automatically converted into a :class:`~mongoengine.fields.DynamicField` and data can be attributed to that @@ -1010,6 +1024,7 @@ class DynamicDocument(Document): field_name = args[0] if field_name in self._dynamic_fields: setattr(self, field_name, None) + self._dynamic_fields[field_name].null = False else: super(DynamicDocument, self).__delattr__(*args, **kwargs) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index fffba7ac..3c255798 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -5,7 +5,6 @@ import re import socket import time import uuid -import warnings from operator import itemgetter from bson import Binary, DBRef, ObjectId, SON @@ -26,7 +25,10 @@ except ImportError: Int64 = long from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField, - GeoJsonBaseField, ObjectIdField, get_document) + GeoJsonBaseField, LazyReference, ObjectIdField, + get_document) +from mongoengine.base.utils import LazyRegexCompiler +from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.document import Document, EmbeddedDocument from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError @@ -41,11 +43,12 @@ except ImportError: __all__ = ( 'StringField', 'URLField', 'EmailField', 'IntField', 'LongField', - 'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField', + 'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField', 'DateField', 'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField', 'GenericEmbeddedDocumentField', 'DynamicField', 'ListField', 'SortedListField', 'EmbeddedDocumentListField', 'DictField', 'MapField', 'ReferenceField', 'CachedReferenceField', + 'LazyReferenceField', 'GenericLazyReferenceField', 'GenericReferenceField', 'BinaryField', 'GridFSError', 'GridFSProxy', 'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField', 'LineStringField', 'PolygonField', @@ -120,7 +123,7 @@ class URLField(StringField): .. versionadded:: 0.3 """ - _URL_REGEX = re.compile( + _URL_REGEX = LazyRegexCompiler( r'^(?:[a-z0-9\.\-]*)://' # scheme is validated separately r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}(?' % (self.__class__.__name__, self.grid_id) def __str__(self): - name = getattr( - self.get(), 'filename', self.grid_id) if self.get() else '(no file)' - return '<%s: %s>' % (self.__class__.__name__, name) + gridout = self.get() + filename = getattr(gridout, 'filename') if gridout else '' + return '<%s: %s (%s)>' % (self.__class__.__name__, filename, self.grid_id) def __eq__(self, other): if isinstance(other, GridFSProxy): @@ -2141,3 +2217,201 @@ class MultiPolygonField(GeoJsonBaseField): .. versionadded:: 0.9 """ _type = 'MultiPolygon' + + +class LazyReferenceField(BaseField): + """A really lazy reference to a document. + Unlike the :class:`~mongoengine.fields.ReferenceField` it will + **not** be automatically (lazily) dereferenced on access. + Instead, access will return a :class:`~mongoengine.base.LazyReference` class + instance, allowing access to `pk` or manual dereference by using + ``fetch()`` method. + + .. versionadded:: 0.15 + """ + + def __init__(self, document_type, passthrough=False, dbref=False, + reverse_delete_rule=DO_NOTHING, **kwargs): + """Initialises the Reference Field. + + :param dbref: Store the reference as :class:`~pymongo.dbref.DBRef` + or as the :class:`~pymongo.objectid.ObjectId`.id . + :param reverse_delete_rule: Determines what to do when the referring + object is deleted + :param passthrough: When trying to access unknown fields, the + :class:`~mongoengine.base.datastructure.LazyReference` instance will + automatically call `fetch()` and try to retrive the field on the fetched + document. Note this only work getting field (not setting or deleting). + """ + # XXX ValidationError raised outside of the "validate" method. + if ( + not isinstance(document_type, six.string_types) and + not issubclass(document_type, Document) + ): + self.error('Argument to LazyReferenceField constructor must be a ' + 'document class or a string') + + self.dbref = dbref + self.passthrough = passthrough + self.document_type_obj = document_type + self.reverse_delete_rule = reverse_delete_rule + super(LazyReferenceField, self).__init__(**kwargs) + + @property + def document_type(self): + if isinstance(self.document_type_obj, six.string_types): + if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: + self.document_type_obj = self.owner_document + else: + self.document_type_obj = get_document(self.document_type_obj) + return self.document_type_obj + + def build_lazyref(self, value): + if isinstance(value, LazyReference): + if value.passthrough != self.passthrough: + value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough) + elif value is not None: + if isinstance(value, self.document_type): + value = LazyReference(self.document_type, value.pk, passthrough=self.passthrough) + elif isinstance(value, DBRef): + value = LazyReference(self.document_type, value.id, passthrough=self.passthrough) + else: + # value is the primary key of the referenced document + value = LazyReference(self.document_type, value, passthrough=self.passthrough) + return value + + def __get__(self, instance, owner): + """Descriptor to allow lazy dereferencing.""" + if instance is None: + # Document class being used rather than a document object + return self + + value = self.build_lazyref(instance._data.get(self.name)) + if value: + instance._data[self.name] = value + + return super(LazyReferenceField, self).__get__(instance, owner) + + def to_mongo(self, value): + if isinstance(value, LazyReference): + pk = value.pk + elif isinstance(value, self.document_type): + pk = value.pk + elif isinstance(value, DBRef): + pk = value.id + else: + # value is the primary key of the referenced document + pk = value + id_field_name = self.document_type._meta['id_field'] + id_field = self.document_type._fields[id_field_name] + pk = id_field.to_mongo(pk) + if self.dbref: + return DBRef(self.document_type._get_collection_name(), pk) + else: + return pk + + def validate(self, value): + if isinstance(value, LazyReference): + if value.collection != self.document_type._get_collection_name(): + self.error('Reference must be on a `%s` document.' % self.document_type) + pk = value.pk + elif isinstance(value, self.document_type): + pk = value.pk + elif isinstance(value, DBRef): + # TODO: check collection ? + collection = self.document_type._get_collection_name() + if value.collection != collection: + self.error("DBRef on bad collection (must be on `%s`)" % collection) + pk = value.id + else: + # value is the primary key of the referenced document + id_field_name = self.document_type._meta['id_field'] + id_field = getattr(self.document_type, id_field_name) + pk = value + try: + id_field.validate(pk) + except ValidationError: + self.error( + "value should be `{0}` document, LazyReference or DBRef on `{0}` " + "or `{0}`'s primary key (i.e. `{1}`)".format( + self.document_type.__name__, type(id_field).__name__)) + + if pk is None: + self.error('You can only reference documents once they have been ' + 'saved to the database') + + def prepare_query_value(self, op, value): + if value is None: + return None + super(LazyReferenceField, self).prepare_query_value(op, value) + return self.to_mongo(value) + + def lookup_member(self, member_name): + return self.document_type._fields.get(member_name) + + +class GenericLazyReferenceField(GenericReferenceField): + """A reference to *any* :class:`~mongoengine.document.Document` subclass. + Unlike the :class:`~mongoengine.fields.GenericReferenceField` it will + **not** be automatically (lazily) dereferenced on access. + Instead, access will return a :class:`~mongoengine.base.LazyReference` class + instance, allowing access to `pk` or manual dereference by using + ``fetch()`` method. + + .. note :: + * Any documents used as a generic reference must be registered in the + document registry. Importing the model will automatically register + it. + + * You can use the choices param to limit the acceptable Document types + + .. versionadded:: 0.15 + """ + + def __init__(self, *args, **kwargs): + self.passthrough = kwargs.pop('passthrough', False) + super(GenericLazyReferenceField, self).__init__(*args, **kwargs) + + def _validate_choices(self, value): + if isinstance(value, LazyReference): + value = value.document_type._class_name + super(GenericLazyReferenceField, self)._validate_choices(value) + + def build_lazyref(self, value): + if isinstance(value, LazyReference): + if value.passthrough != self.passthrough: + value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough) + elif value is not None: + if isinstance(value, (dict, SON)): + value = LazyReference(get_document(value['_cls']), value['_ref'].id, passthrough=self.passthrough) + elif isinstance(value, Document): + value = LazyReference(type(value), value.pk, passthrough=self.passthrough) + return value + + def __get__(self, instance, owner): + if instance is None: + return self + + value = self.build_lazyref(instance._data.get(self.name)) + if value: + instance._data[self.name] = value + + return super(GenericLazyReferenceField, self).__get__(instance, owner) + + def validate(self, value): + if isinstance(value, LazyReference) and value.pk is None: + self.error('You can only reference documents once they have been' + ' saved to the database') + return super(GenericLazyReferenceField, self).validate(value) + + def to_mongo(self, document): + if document is None: + return None + + if isinstance(document, LazyReference): + return SON(( + ('_cls', document.document_type._class_name), + ('_ref', DBRef(document.document_type._get_collection_name(), document.pk)) + )) + else: + return super(GenericLazyReferenceField, self).to_mongo(document) diff --git a/mongoengine/python_support.py b/mongoengine/python_support.py index e51e1bc9..e884b4ea 100644 --- a/mongoengine/python_support.py +++ b/mongoengine/python_support.py @@ -6,11 +6,7 @@ import pymongo import six -if pymongo.version_tuple[0] < 3: - IS_PYMONGO_3 = False -else: - IS_PYMONGO_3 = True - +IS_PYMONGO_3 = pymongo.version_tuple[0] >= 3 # six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. StringIO = six.BytesIO diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 2dcfdbea..a4e64018 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -18,7 +18,7 @@ from mongoengine import signals from mongoengine.base import get_document from mongoengine.common import _import_class from mongoengine.connection import get_db -from mongoengine.context_managers import switch_db +from mongoengine.context_managers import set_write_concern, switch_db from mongoengine.errors import (InvalidQueryError, LookUpError, NotUniqueError, OperationError) from mongoengine.python_support import IS_PYMONGO_3 @@ -350,11 +350,24 @@ class BaseQuerySet(object): documents=docs, **signal_kwargs) raw = [doc.to_mongo() for doc in docs] + + with set_write_concern(self._collection, write_concern) as collection: + insert_func = collection.insert_many + if return_one: + raw = raw[0] + insert_func = collection.insert_one + try: - ids = self._collection.insert(raw, **write_concern) + inserted_result = insert_func(raw) + ids = return_one and [inserted_result.inserted_id] or inserted_result.inserted_ids except pymongo.errors.DuplicateKeyError as err: message = 'Could not save document (%s)' raise NotUniqueError(message % six.text_type(err)) + except pymongo.errors.BulkWriteError as err: + # inserting documents that already have an _id field will + # give huge performance debt or raise + message = u'Document must not have _id value before bulk write (%s)' + raise NotUniqueError(message % six.text_type(err)) except pymongo.errors.OperationFailure as err: message = 'Could not save document (%s)' if re.match('^E1100[01] duplicate key', six.text_type(err)): @@ -368,7 +381,6 @@ class BaseQuerySet(object): signals.post_bulk_insert.send( self._document, documents=docs, loaded=False, **signal_kwargs) return return_one and ids[0] or ids - documents = self.in_bulk(ids) results = [] for obj_id in ids: @@ -486,8 +498,9 @@ class BaseQuerySet(object): ``save(..., write_concern={w: 2, fsync: True}, ...)`` will wait until at least two servers have recorded the write and will force an fsync on the primary server. - :param full_result: Return the full result rather than just the number - updated. + :param full_result: Return the full result dictionary rather than just the number + updated, e.g. return + ``{'n': 2, 'nModified': 2, 'ok': 1.0, 'updatedExisting': True}``. :param update: Django-style update keyword arguments .. versionadded:: 0.2 @@ -510,12 +523,15 @@ class BaseQuerySet(object): else: update['$set'] = {'_cls': queryset._document._class_name} try: - result = queryset._collection.update(query, update, multi=multi, - upsert=upsert, **write_concern) + with set_write_concern(queryset._collection, write_concern) as collection: + update_func = collection.update_one + if multi: + update_func = collection.update_many + result = update_func(query, update, upsert=upsert) if full_result: return result - elif result: - return result['n'] + elif result.raw_result: + return result.raw_result['n'] except pymongo.errors.DuplicateKeyError as err: raise NotUniqueError(u'Update failed (%s)' % six.text_type(err)) except pymongo.errors.OperationFailure as err: @@ -544,10 +560,10 @@ class BaseQuerySet(object): write_concern=write_concern, full_result=True, **update) - if atomic_update['updatedExisting']: + if atomic_update.raw_result['updatedExisting']: document = self.get() else: - document = self._document.objects.with_id(atomic_update['upserted']) + document = self._document.objects.with_id(atomic_update.upserted_id) return document def update_one(self, upsert=False, write_concern=None, **update): @@ -1182,6 +1198,10 @@ class BaseQuerySet(object): pipeline = initial_pipeline + list(pipeline) + if IS_PYMONGO_3 and self._read_preference is not None: + return self._collection.with_options(read_preference=self._read_preference) \ + .aggregate(pipeline, cursor={}, **kwargs) + return self._collection.aggregate(pipeline, cursor={}, **kwargs) # JS functionality @@ -1578,6 +1598,9 @@ class BaseQuerySet(object): if self._batch_size is not None: self._cursor_obj.batch_size(self._batch_size) + if self._comment is not None: + self._cursor_obj.comment(self._comment) + return self._cursor_obj def __deepcopy__(self, memo): diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index a9907ada..5f777f41 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -101,21 +101,8 @@ def query(_doc_cls=None, **kwargs): value = value['_id'] elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): - # Raise an error if the in/nin/all/near param is not iterable. We need a - # special check for BaseDocument, because - although it's iterable - using - # it as such in the context of this method is most definitely a mistake. - BaseDocument = _import_class('BaseDocument') - if isinstance(value, BaseDocument): - raise TypeError("When using the `in`, `nin`, `all`, or " - "`near`-operators you can\'t use a " - "`Document`, you must wrap your object " - "in a list (object -> [object]).") - elif not hasattr(value, '__iter__'): - raise TypeError("The `in`, `nin`, `all`, or " - "`near`-operators must be applied to an " - "iterable (e.g. a list).") - else: - value = [field.prepare_query_value(op, v) for v in value] + # Raise an error if the in/nin/all/near param is not iterable. + value = _prepare_query_for_iterable(field, op, value) # If we're querying a GenericReferenceField, we need to alter the # key depending on the value: @@ -284,9 +271,15 @@ def update(_doc_cls=None, **update): if isinstance(field, GeoJsonBaseField): value = field.to_mongo(value) - if op == 'push' and isinstance(value, (list, tuple, set)): + if op == 'pull': + if field.required or value is not None: + if match == 'in' and not isinstance(value, dict): + value = _prepare_query_for_iterable(field, op, value) + else: + value = field.prepare_query_value(op, value) + elif op == 'push' and isinstance(value, (list, tuple, set)): value = [field.prepare_query_value(op, v) for v in value] - elif op in (None, 'set', 'push', 'pull'): + elif op in (None, 'set', 'push'): if field.required or value is not None: value = field.prepare_query_value(op, value) elif op in ('pushAll', 'pullAll'): @@ -321,11 +314,17 @@ def update(_doc_cls=None, **update): field_classes = [c.__class__ for c in cleaned_fields] field_classes.reverse() ListField = _import_class('ListField') - if ListField in field_classes: - # Join all fields via dot notation to the last ListField + EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') + if ListField in field_classes or EmbeddedDocumentListField in field_classes: + # Join all fields via dot notation to the last ListField or EmbeddedDocumentListField # Then process as normal + if ListField in field_classes: + _check_field = ListField + else: + _check_field = EmbeddedDocumentListField + last_listField = len( - cleaned_fields) - field_classes.index(ListField) + cleaned_fields) - field_classes.index(_check_field) key = '.'.join(parts[:last_listField]) parts = parts[last_listField:] parts.insert(0, key) @@ -335,7 +334,7 @@ def update(_doc_cls=None, **update): value = {key: value} elif op == 'addToSet' and isinstance(value, list): value = {key: {'$each': value}} - elif op == 'push': + elif op in ('push', 'pushAll'): if parts[-1].isdigit(): key = parts[0] position = int(parts[-1]) @@ -344,10 +343,14 @@ def update(_doc_cls=None, **update): if not isinstance(value, (set, tuple, list)): value = [value] value = {key: {'$each': value, '$position': position}} - elif isinstance(value, list): - value = {key: {'$each': value}} else: - value = {key: value} + if op == 'pushAll': + op = 'push' # convert to non-deprecated keyword + if not isinstance(value, (set, tuple, list)): + value = [value] + value = {key: {'$each': value}} + else: + value = {key: value} else: value = {key: value} key = '$' + op @@ -439,3 +442,22 @@ def _infer_geometry(value): raise InvalidQueryError('Invalid $geometry data. Can be either a ' 'dictionary or (nested) lists of coordinate(s)') + + +def _prepare_query_for_iterable(field, op, value): + # We need a special check for BaseDocument, because - although it's iterable - using + # it as such in the context of this method is most definitely a mistake. + BaseDocument = _import_class('BaseDocument') + + if isinstance(value, BaseDocument): + raise TypeError("When using the `in`, `nin`, `all`, or " + "`near`-operators you can\'t use a " + "`Document`, you must wrap your object " + "in a list (object -> [object]).") + + if not hasattr(value, '__iter__'): + raise TypeError("The `in`, `nin`, `all`, or " + "`near`-operators must be applied to an " + "iterable (e.g. a list).") + + return [field.prepare_query_value(op, v) for v in value] diff --git a/setup.cfg b/setup.cfg index eabe3271..fd6192b8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,11 +1,11 @@ [nosetests] verbosity=2 detailed-errors=1 -tests=tests +#tests=tests cover-package=mongoengine [flake8] -ignore=E501,F401,F403,F405,I201 +ignore=E501,F401,F403,F405,I201,I202 exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests max-complexity=47 application-import-names=mongoengine,tests diff --git a/tests/document/class_methods.py b/tests/document/class_methods.py index dd3addb7..2fab1f72 100644 --- a/tests/document/class_methods.py +++ b/tests/document/class_methods.py @@ -5,6 +5,7 @@ from mongoengine import * from mongoengine.queryset import NULLIFY, PULL from mongoengine.connection import get_db +from tests.utils import needs_mongodb_v26 __all__ = ("ClassMethodsTest", ) @@ -187,6 +188,26 @@ class ClassMethodsTest(unittest.TestCase): self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] }) self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] }) + @needs_mongodb_v26 + def test_compare_indexes_for_text_indexes(self): + """ Ensure that compare_indexes behaves correctly for text indexes """ + + class Doc(Document): + a = StringField() + b = StringField() + meta = {'indexes': [ + {'fields': ['$a', "$b"], + 'default_language': 'english', + 'weights': {'a': 10, 'b': 2} + } + ]} + + Doc.drop_collection() + Doc.ensure_indexes() + actual = Doc.compare_indexes() + expected = {'missing': [], 'extra': []} + self.assertEqual(actual, expected) + def test_list_indexes_inheritance(self): """ ensure that all of the indexes are listed regardless of the super- or sub-class that we call it from diff --git a/tests/document/instance.py b/tests/document/instance.py index 609bc900..cffe4f30 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -8,9 +8,12 @@ import weakref from datetime import datetime from bson import DBRef, ObjectId +from pymongo.errors import DuplicateKeyError + from tests import fixtures from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest, PickleDynamicEmbedded, PickleDynamicTest) +from tests.utils import MongoDBTestCase from mongoengine import * from mongoengine.base import get_document, _document_registry @@ -30,12 +33,9 @@ TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), __all__ = ("InstanceTest",) -class InstanceTest(unittest.TestCase): +class InstanceTest(MongoDBTestCase): def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() - class Job(EmbeddedDocument): name = StringField() years = IntField() @@ -476,6 +476,24 @@ class InstanceTest(unittest.TestCase): doc.save() doc.reload() + def test_reload_with_changed_fields(self): + """Ensures reloading will not affect changed fields""" + class User(Document): + name = StringField() + number = IntField() + User.drop_collection() + + user = User(name="Bob", number=1).save() + user.name = "John" + user.number = 2 + + self.assertEqual(user._get_changed_fields(), ['name', 'number']) + user.reload('number') + self.assertEqual(user._get_changed_fields(), ['name']) + user.save() + user.reload() + self.assertEqual(user.name, "John") + def test_reload_referencing(self): """Ensures reloading updates weakrefs correctly.""" class Embedded(EmbeddedDocument): @@ -521,7 +539,7 @@ class InstanceTest(unittest.TestCase): doc.save() doc.dict_field['extra'] = 1 doc = doc.reload(10, 'list_field') - self.assertEqual(doc._get_changed_fields(), []) + self.assertEqual(doc._get_changed_fields(), ['dict_field.extra']) self.assertEqual(len(doc.list_field), 5) self.assertEqual(len(doc.dict_field), 3) self.assertEqual(len(doc.embedded_field.list_field), 4) @@ -532,21 +550,14 @@ class InstanceTest(unittest.TestCase): pass f = Foo() - try: + with self.assertRaises(Foo.DoesNotExist): f.reload() - except Foo.DoesNotExist: - pass - except Exception: - self.assertFalse("Threw wrong exception") f.save() f.delete() - try: + + with self.assertRaises(Foo.DoesNotExist): f.reload() - except Foo.DoesNotExist: - pass - except Exception: - self.assertFalse("Threw wrong exception") def test_reload_of_non_strict_with_special_field_name(self): """Ensures reloading works for documents with meta strict == False.""" @@ -716,12 +727,12 @@ class InstanceTest(unittest.TestCase): t = TestDocument(status="draft", pub_date=datetime.now()) - try: + with self.assertRaises(ValidationError) as cm: t.save() - except ValidationError as e: - expect_msg = "Draft entries may not have a publication date." - self.assertTrue(expect_msg in e.message) - self.assertEqual(e.to_dict(), {'__all__': expect_msg}) + + expected_msg = "Draft entries may not have a publication date." + self.assertIn(expected_msg, cm.exception.message) + self.assertEqual(cm.exception.to_dict(), {'__all__': expected_msg}) t = TestDocument(status="published") t.save(clean=False) @@ -755,12 +766,13 @@ class InstanceTest(unittest.TestCase): TestDocument.drop_collection() t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25, z=15)) - try: + + with self.assertRaises(ValidationError) as cm: t.save() - except ValidationError as e: - expect_msg = "Value of z != x + y" - self.assertTrue(expect_msg in e.message) - self.assertEqual(e.to_dict(), {'doc': {'__all__': expect_msg}}) + + expected_msg = "Value of z != x + y" + self.assertIn(expected_msg, cm.exception.message) + self.assertEqual(cm.exception.to_dict(), {'doc': {'__all__': expected_msg}}) t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save() self.assertEqual(t.doc.z, 35) @@ -1341,6 +1353,23 @@ class InstanceTest(unittest.TestCase): site = Site.objects.first() self.assertEqual(site.page.log_message, "Error: Dummy message") + def test_update_list_field(self): + """Test update on `ListField` with $pull + $in. + """ + class Doc(Document): + foo = ListField(StringField()) + + Doc.drop_collection() + doc = Doc(foo=['a', 'b', 'c']) + doc.save() + + # Update + doc = Doc.objects.first() + doc.update(pull__foo__in=['a', 'c']) + + doc = Doc.objects.first() + self.assertEqual(doc.foo, ['b']) + def test_embedded_update_db_field(self): """Test update on `EmbeddedDocumentField` fields when db_field is other than default. @@ -1884,6 +1913,25 @@ class InstanceTest(unittest.TestCase): author.delete() self.assertEqual(BlogPost.objects.count(), 0) + def test_reverse_delete_rule_pull(self): + """Ensure that a referenced document is also deleted with + pull. + """ + class Record(Document): + name = StringField() + children = ListField(ReferenceField('self', reverse_delete_rule=PULL)) + + Record.drop_collection() + + parent_record = Record(name='parent').save() + child_record = Record(name='child').save() + parent_record.children.append(child_record) + parent_record.save() + + child_record.delete() + self.assertEqual(Record.objects(name='parent').get().children, []) + + def test_reverse_delete_rule_with_custom_id_field(self): """Ensure that a referenced document with custom primary key is also deleted upon deletion. @@ -3094,6 +3142,64 @@ class InstanceTest(unittest.TestCase): self.assertEquals(p.id, None) p.id = "12345" # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here + def test_from_son_created_False_without_id(self): + class MyPerson(Document): + name = StringField() + + MyPerson.objects.delete() + + p = MyPerson.from_json('{"name": "a_fancy_name"}', created=False) + self.assertFalse(p._created) + self.assertIsNone(p.id) + p.save() + self.assertIsNotNone(p.id) + saved_p = MyPerson.objects.get(id=p.id) + self.assertEqual(saved_p.name, 'a_fancy_name') + + def test_from_son_created_False_with_id(self): + # 1854 + class MyPerson(Document): + name = StringField() + + MyPerson.objects.delete() + + p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=False) + self.assertFalse(p._created) + self.assertEqual(p._changed_fields, []) + self.assertEqual(p.name, 'a_fancy_name') + self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e')) + p.save() + + with self.assertRaises(DoesNotExist): + # Since created=False and we gave an id in the json and _changed_fields is empty + # mongoengine assumes that the document exits with that structure already + # and calling .save() didn't save anything + MyPerson.objects.get(id=p.id) + + self.assertFalse(p._created) + p.name = 'a new fancy name' + self.assertEqual(p._changed_fields, ['name']) + p.save() + saved_p = MyPerson.objects.get(id=p.id) + self.assertEqual(saved_p.name, p.name) + + def test_from_son_created_True_with_an_id(self): + class MyPerson(Document): + name = StringField() + + MyPerson.objects.delete() + + p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=True) + self.assertTrue(p._created) + self.assertEqual(p._changed_fields, []) + self.assertEqual(p.name, 'a_fancy_name') + self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e')) + p.save() + + saved_p = MyPerson.objects.get(id=p.id) + self.assertEqual(saved_p, p) + self.assertEqual(p.name, 'a_fancy_name') + def test_null_field(self): # 734 class User(Document): @@ -3183,6 +3289,34 @@ class InstanceTest(unittest.TestCase): blog.reload() self.assertEqual(blog.tags, ['mongodb', 'code', 'python']) + def test_push_nested_list(self): + """Ensure that push update works in nested list""" + class BlogPost(Document): + slug = StringField() + tags = ListField() + + blog = BlogPost(slug="test").save() + blog.update(push__tags=["value1", 123]) + blog.reload() + self.assertEqual(blog.tags, [["value1", 123]]) + + def test_accessing_objects_with_indexes_error(self): + insert_result = self.db.company.insert_many([{'name': 'Foo'}, + {'name': 'Foo'}]) # Force 2 doc with same name + REF_OID = insert_result.inserted_ids[0] + self.db.user.insert_one({'company': REF_OID}) # Force 2 doc with same name + + class Company(Document): + name = StringField(unique=True) + + class User(Document): + company = ReferenceField(Company) + + + # Ensure index creation exception aren't swallowed (#1688) + with self.assertRaises(DuplicateKeyError): + User.objects().select_related() + if __name__ == '__main__': unittest.main() diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 7a0ccc25..7352d242 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -26,7 +26,7 @@ except ImportError: from mongoengine import * from mongoengine.connection import get_db from mongoengine.base import (BaseDict, BaseField, EmbeddedDocumentList, - _document_registry) + _document_registry, LazyReference) from tests.utils import MongoDBTestCase @@ -46,6 +46,17 @@ class FieldTest(MongoDBTestCase): md = MyDoc(dt='') self.assertRaises(ValidationError, md.save) + def test_date_from_empty_string(self): + """ + Ensure an exception is raised when trying to + cast an empty string to datetime. + """ + class MyDoc(Document): + dt = DateField() + + md = MyDoc(dt='') + self.assertRaises(ValidationError, md.save) + def test_datetime_from_whitespace_string(self): """ Ensure an exception is raised when trying to @@ -57,6 +68,17 @@ class FieldTest(MongoDBTestCase): md = MyDoc(dt=' ') self.assertRaises(ValidationError, md.save) + def test_date_from_whitespace_string(self): + """ + Ensure an exception is raised when trying to + cast a whitespace-only string to datetime. + """ + class MyDoc(Document): + dt = DateField() + + md = MyDoc(dt=' ') + self.assertRaises(ValidationError, md.save) + def test_default_values_nothing_set(self): """Ensure that default field values are used when creating a document. @@ -66,13 +88,14 @@ class FieldTest(MongoDBTestCase): age = IntField(default=30, required=False) userid = StringField(default=lambda: 'test', required=True) created = DateTimeField(default=datetime.datetime.utcnow) + day = DateField(default=datetime.date.today) person = Person(name="Ross") # Confirm saving now would store values data_to_be_saved = sorted(person.to_mongo().keys()) self.assertEqual(data_to_be_saved, - ['age', 'created', 'name', 'userid'] + ['age', 'created', 'day', 'name', 'userid'] ) self.assertTrue(person.validate() is None) @@ -81,16 +104,18 @@ class FieldTest(MongoDBTestCase): self.assertEqual(person.age, person.age) self.assertEqual(person.userid, person.userid) self.assertEqual(person.created, person.created) + self.assertEqual(person.day, person.day) self.assertEqual(person._data['name'], person.name) self.assertEqual(person._data['age'], person.age) self.assertEqual(person._data['userid'], person.userid) self.assertEqual(person._data['created'], person.created) + self.assertEqual(person._data['day'], person.day) # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) self.assertEqual( - data_to_be_saved, ['age', 'created', 'name', 'userid']) + data_to_be_saved, ['age', 'created', 'day', 'name', 'userid']) def test_default_values_set_to_None(self): """Ensure that default field values are used even when @@ -662,6 +687,32 @@ class FieldTest(MongoDBTestCase): log.time = 'ABC' self.assertRaises(ValidationError, log.validate) + def test_date_validation(self): + """Ensure that invalid values cannot be assigned to datetime + fields. + """ + class LogEntry(Document): + time = DateField() + + log = LogEntry() + log.time = datetime.datetime.now() + log.validate() + + log.time = datetime.date.today() + log.validate() + + log.time = datetime.datetime.now().isoformat(' ') + log.validate() + + if dateutil: + log.time = datetime.datetime.now().isoformat('T') + log.validate() + + log.time = -1 + self.assertRaises(ValidationError, log.validate) + log.time = 'ABC' + self.assertRaises(ValidationError, log.validate) + def test_datetime_tz_aware_mark_as_changed(self): from mongoengine import connection @@ -733,6 +784,51 @@ class FieldTest(MongoDBTestCase): self.assertNotEqual(log.date, d1) self.assertEqual(log.date, d2) + def test_date(self): + """Tests showing pymongo date fields + + See: http://api.mongodb.org/python/current/api/bson/son.html#dt + """ + class LogEntry(Document): + date = DateField() + + LogEntry.drop_collection() + + # Test can save dates + log = LogEntry() + log.date = datetime.date.today() + log.save() + log.reload() + self.assertEqual(log.date, datetime.date.today()) + + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) + d2 = datetime.datetime(1970, 1, 1, 0, 0, 1) + log = LogEntry() + log.date = d1 + log.save() + log.reload() + self.assertEqual(log.date, d1.date()) + self.assertEqual(log.date, d2.date()) + + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) + d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000) + log.date = d1 + log.save() + log.reload() + self.assertEqual(log.date, d1.date()) + self.assertEqual(log.date, d2.date()) + + if not six.PY3: + # Pre UTC dates microseconds below 1000 are dropped + # This does not seem to be true in PY3 + d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) + d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) + log.date = d1 + log.save() + log.reload() + self.assertEqual(log.date, d1.date()) + self.assertEqual(log.date, d2.date()) + def test_datetime_usage(self): """Tests for regular datetime fields""" class LogEntry(Document): @@ -787,6 +883,51 @@ class FieldTest(MongoDBTestCase): ) self.assertEqual(logs.count(), 5) + def test_date_usage(self): + """Tests for regular datetime fields""" + class LogEntry(Document): + date = DateField() + + LogEntry.drop_collection() + + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1) + log = LogEntry() + log.date = d1 + log.validate() + log.save() + + for query in (d1, d1.isoformat(' ')): + log1 = LogEntry.objects.get(date=query) + self.assertEqual(log, log1) + + if dateutil: + log1 = LogEntry.objects.get(date=d1.isoformat('T')) + self.assertEqual(log, log1) + + # create additional 19 log entries for a total of 20 + for i in range(1971, 1990): + d = datetime.datetime(i, 1, 1, 0, 0, 1) + LogEntry(date=d).save() + + self.assertEqual(LogEntry.objects.count(), 20) + + # Test ordering + logs = LogEntry.objects.order_by("date") + i = 0 + while i < 19: + self.assertTrue(logs[i].date <= logs[i + 1].date) + i += 1 + + logs = LogEntry.objects.order_by("-date") + i = 0 + while i < 19: + self.assertTrue(logs[i].date >= logs[i + 1].date) + i += 1 + + # Test searching + logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) + self.assertEqual(logs.count(), 10) + def test_complexdatetime_storage(self): """Tests for complex datetime fields - which can handle microseconds without rounding. @@ -920,6 +1061,12 @@ class FieldTest(MongoDBTestCase): def test_list_validation(self): """Ensure that a list field only accepts lists with valid elements.""" + AccessLevelChoices = ( + ('a', u'Administration'), + ('b', u'Manager'), + ('c', u'Staff'), + ) + class User(Document): pass @@ -931,7 +1078,10 @@ class FieldTest(MongoDBTestCase): comments = ListField(EmbeddedDocumentField(Comment)) tags = ListField(StringField()) authors = ListField(ReferenceField(User)) + authors_as_lazy = ListField(LazyReferenceField(User)) generic = ListField(GenericReferenceField()) + generic_as_lazy = ListField(GenericLazyReferenceField()) + access_list = ListField(choices=AccessLevelChoices, display_sep=', ') User.drop_collection() BlogPost.drop_collection() @@ -949,6 +1099,17 @@ class FieldTest(MongoDBTestCase): post.tags = ('fun', 'leisure') post.validate() + post.access_list = 'a,b' + self.assertRaises(ValidationError, post.validate) + + post.access_list = ['c', 'd'] + self.assertRaises(ValidationError, post.validate) + + post.access_list = ['a', 'b'] + post.validate() + + self.assertEqual(post.get_access_list_display(), u'Administration, Manager') + post.comments = ['a'] self.assertRaises(ValidationError, post.validate) post.comments = 'yay' @@ -969,6 +1130,15 @@ class FieldTest(MongoDBTestCase): post.authors = [user] post.validate() + post.authors_as_lazy = [Comment()] + self.assertRaises(ValidationError, post.validate) + + post.authors_as_lazy = [User()] + self.assertRaises(ValidationError, post.validate) + + post.authors_as_lazy = [user] + post.validate() + post.generic = [1, 2] self.assertRaises(ValidationError, post.validate) @@ -981,6 +1151,18 @@ class FieldTest(MongoDBTestCase): post.generic = [user] post.validate() + post.generic_as_lazy = [1, 2] + self.assertRaises(ValidationError, post.validate) + + post.generic_as_lazy = [User(), Comment()] + self.assertRaises(ValidationError, post.validate) + + post.generic_as_lazy = [Comment()] + self.assertRaises(ValidationError, post.validate) + + post.generic_as_lazy = [user] + post.validate() + def test_sorted_list_sorting(self): """Ensure that a sorted list field properly sorts values. """ @@ -4356,6 +4538,51 @@ class CachedReferenceFieldTest(MongoDBTestCase): self.assertEqual(SocialData.objects(person__group=g2).count(), 1) self.assertEqual(SocialData.objects(person__group=g2).first(), s2) + def test_cached_reference_field_push_with_fields(self): + class Product(Document): + name = StringField() + + Product.drop_collection() + + class Basket(Document): + products = ListField(CachedReferenceField(Product, fields=['name'])) + + Basket.drop_collection() + product1 = Product(name='abc').save() + product2 = Product(name='def').save() + basket = Basket(products=[product1]).save() + self.assertEqual( + Basket.objects._collection.find_one(), + { + '_id': basket.pk, + 'products': [ + { + '_id': product1.pk, + 'name': product1.name + } + ] + } + ) + # push to list + basket.update(push__products=product2) + basket.reload() + self.assertEqual( + Basket.objects._collection.find_one(), + { + '_id': basket.pk, + 'products': [ + { + '_id': product1.pk, + 'name': product1.name + }, + { + '_id': product2.pk, + 'name': product2.name + } + ] + } + ) + def test_cached_reference_field_update_all(self): class Person(Document): TYPES = ( @@ -4598,5 +4825,522 @@ class CachedReferenceFieldTest(MongoDBTestCase): self.assertTrue(isinstance(ocorrence.animal, Animal)) +class LazyReferenceFieldTest(MongoDBTestCase): + def test_lazy_reference_config(self): + # Make sure ReferenceField only accepts a document class or a string + # with a document class name. + self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument) + + def test_lazy_reference_simple(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = LazyReferenceField(Animal) + + Animal.drop_collection() + Ocurrence.drop_collection() + + animal = Animal(name="Leopard", tag="heavy").save() + Ocurrence(person="test", animal=animal).save() + p = Ocurrence.objects.get() + self.assertIsInstance(p.animal, LazyReference) + fetched_animal = p.animal.fetch() + self.assertEqual(fetched_animal, animal) + # `fetch` keep cache on referenced document by default... + animal.tag = "not so heavy" + animal.save() + double_fetch = p.animal.fetch() + self.assertIs(fetched_animal, double_fetch) + self.assertEqual(double_fetch.tag, "heavy") + # ...unless specified otherwise + fetch_force = p.animal.fetch(force=True) + self.assertIsNot(fetch_force, fetched_animal) + self.assertEqual(fetch_force.tag, "not so heavy") + + def test_lazy_reference_fetch_invalid_ref(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = LazyReferenceField(Animal) + + Animal.drop_collection() + Ocurrence.drop_collection() + + animal = Animal(name="Leopard", tag="heavy").save() + Ocurrence(person="test", animal=animal).save() + animal.delete() + p = Ocurrence.objects.get() + self.assertIsInstance(p.animal, LazyReference) + with self.assertRaises(DoesNotExist): + p.animal.fetch() + + def test_lazy_reference_set(self): + class Animal(Document): + meta = {'allow_inheritance': True} + + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = LazyReferenceField(Animal) + + Animal.drop_collection() + Ocurrence.drop_collection() + + class SubAnimal(Animal): + nick = StringField() + + animal = Animal(name="Leopard", tag="heavy").save() + sub_animal = SubAnimal(nick='doggo', name='dog').save() + for ref in ( + animal, + animal.pk, + DBRef(animal._get_collection_name(), animal.pk), + LazyReference(Animal, animal.pk), + + sub_animal, + sub_animal.pk, + DBRef(sub_animal._get_collection_name(), sub_animal.pk), + LazyReference(SubAnimal, sub_animal.pk), + ): + p = Ocurrence(person="test", animal=ref).save() + p.reload() + self.assertIsInstance(p.animal, LazyReference) + p.animal.fetch() + + def test_lazy_reference_bad_set(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = LazyReferenceField(Animal) + + Animal.drop_collection() + Ocurrence.drop_collection() + + class BadDoc(Document): + pass + + animal = Animal(name="Leopard", tag="heavy").save() + baddoc = BadDoc().save() + for bad in ( + 42, + 'foo', + baddoc, + DBRef(baddoc._get_collection_name(), animal.pk), + LazyReference(BadDoc, animal.pk) + ): + with self.assertRaises(ValidationError): + p = Ocurrence(person="test", animal=bad).save() + + def test_lazy_reference_query_conversion(self): + """Ensure that LazyReferenceFields can be queried using objects and values + of the type of the primary key of the referenced object. + """ + class Member(Document): + user_num = IntField(primary_key=True) + + class BlogPost(Document): + title = StringField() + author = LazyReferenceField(Member, dbref=False) + + Member.drop_collection() + BlogPost.drop_collection() + + m1 = Member(user_num=1) + m1.save() + m2 = Member(user_num=2) + m2.save() + + post1 = BlogPost(title='post 1', author=m1) + post1.save() + + post2 = BlogPost(title='post 2', author=m2) + post2.save() + + post = BlogPost.objects(author=m1).first() + self.assertEqual(post.id, post1.id) + + post = BlogPost.objects(author=m2).first() + self.assertEqual(post.id, post2.id) + + # Same thing by passing a LazyReference instance + post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() + self.assertEqual(post.id, post2.id) + + def test_lazy_reference_query_conversion_dbref(self): + """Ensure that LazyReferenceFields can be queried using objects and values + of the type of the primary key of the referenced object. + """ + class Member(Document): + user_num = IntField(primary_key=True) + + class BlogPost(Document): + title = StringField() + author = LazyReferenceField(Member, dbref=True) + + Member.drop_collection() + BlogPost.drop_collection() + + m1 = Member(user_num=1) + m1.save() + m2 = Member(user_num=2) + m2.save() + + post1 = BlogPost(title='post 1', author=m1) + post1.save() + + post2 = BlogPost(title='post 2', author=m2) + post2.save() + + post = BlogPost.objects(author=m1).first() + self.assertEqual(post.id, post1.id) + + post = BlogPost.objects(author=m2).first() + self.assertEqual(post.id, post2.id) + + # Same thing by passing a LazyReference instance + post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() + self.assertEqual(post.id, post2.id) + + def test_lazy_reference_passthrough(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + animal = LazyReferenceField(Animal, passthrough=False) + animal_passthrough = LazyReferenceField(Animal, passthrough=True) + + Animal.drop_collection() + Ocurrence.drop_collection() + + animal = Animal(name="Leopard", tag="heavy").save() + Ocurrence(animal=animal, animal_passthrough=animal).save() + p = Ocurrence.objects.get() + self.assertIsInstance(p.animal, LazyReference) + with self.assertRaises(KeyError): + p.animal['name'] + with self.assertRaises(AttributeError): + p.animal.name + self.assertEqual(p.animal.pk, animal.pk) + + self.assertEqual(p.animal_passthrough.name, "Leopard") + self.assertEqual(p.animal_passthrough['name'], "Leopard") + + # Should not be able to access referenced document's methods + with self.assertRaises(AttributeError): + p.animal.save + with self.assertRaises(KeyError): + p.animal['save'] + + def test_lazy_reference_not_set(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = LazyReferenceField(Animal) + + Animal.drop_collection() + Ocurrence.drop_collection() + + Ocurrence(person='foo').save() + p = Ocurrence.objects.get() + self.assertIs(p.animal, None) + + def test_lazy_reference_equality(self): + class Animal(Document): + name = StringField() + tag = StringField() + + Animal.drop_collection() + + animal = Animal(name="Leopard", tag="heavy").save() + animalref = LazyReference(Animal, animal.pk) + self.assertEqual(animal, animalref) + self.assertEqual(animalref, animal) + + other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90")) + self.assertNotEqual(animal, other_animalref) + self.assertNotEqual(other_animalref, animal) + + def test_lazy_reference_embedded(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class EmbeddedOcurrence(EmbeddedDocument): + in_list = ListField(LazyReferenceField(Animal)) + direct = LazyReferenceField(Animal) + + class Ocurrence(Document): + in_list = ListField(LazyReferenceField(Animal)) + in_embedded = EmbeddedDocumentField(EmbeddedOcurrence) + direct = LazyReferenceField(Animal) + + Animal.drop_collection() + Ocurrence.drop_collection() + + animal1 = Animal('doggo').save() + animal2 = Animal('cheeta').save() + + def check_fields_type(occ): + self.assertIsInstance(occ.direct, LazyReference) + for elem in occ.in_list: + self.assertIsInstance(elem, LazyReference) + self.assertIsInstance(occ.in_embedded.direct, LazyReference) + for elem in occ.in_embedded.in_list: + self.assertIsInstance(elem, LazyReference) + + occ = Ocurrence( + in_list=[animal1, animal2], + in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, + direct=animal1 + ).save() + check_fields_type(occ) + occ.reload() + check_fields_type(occ) + occ.direct = animal1.id + occ.in_list = [animal1.id, animal2.id] + occ.in_embedded.direct = animal1.id + occ.in_embedded.in_list = [animal1.id, animal2.id] + check_fields_type(occ) + + +class GenericLazyReferenceFieldTest(MongoDBTestCase): + def test_generic_lazy_reference_simple(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = GenericLazyReferenceField() + + Animal.drop_collection() + Ocurrence.drop_collection() + + animal = Animal(name="Leopard", tag="heavy").save() + Ocurrence(person="test", animal=animal).save() + p = Ocurrence.objects.get() + self.assertIsInstance(p.animal, LazyReference) + fetched_animal = p.animal.fetch() + self.assertEqual(fetched_animal, animal) + # `fetch` keep cache on referenced document by default... + animal.tag = "not so heavy" + animal.save() + double_fetch = p.animal.fetch() + self.assertIs(fetched_animal, double_fetch) + self.assertEqual(double_fetch.tag, "heavy") + # ...unless specified otherwise + fetch_force = p.animal.fetch(force=True) + self.assertIsNot(fetch_force, fetched_animal) + self.assertEqual(fetch_force.tag, "not so heavy") + + def test_generic_lazy_reference_choices(self): + class Animal(Document): + name = StringField() + + class Vegetal(Document): + name = StringField() + + class Mineral(Document): + name = StringField() + + class Ocurrence(Document): + living_thing = GenericLazyReferenceField(choices=[Animal, Vegetal]) + thing = GenericLazyReferenceField() + + Animal.drop_collection() + Vegetal.drop_collection() + Mineral.drop_collection() + Ocurrence.drop_collection() + + animal = Animal(name="Leopard").save() + vegetal = Vegetal(name="Oak").save() + mineral = Mineral(name="Granite").save() + + occ_animal = Ocurrence(living_thing=animal, thing=animal).save() + occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save() + with self.assertRaises(ValidationError): + Ocurrence(living_thing=mineral).save() + + occ = Ocurrence.objects.get(living_thing=animal) + self.assertEqual(occ, occ_animal) + self.assertIsInstance(occ.thing, LazyReference) + self.assertIsInstance(occ.living_thing, LazyReference) + + occ.thing = vegetal + occ.living_thing = vegetal + occ.save() + + occ.thing = mineral + occ.living_thing = mineral + with self.assertRaises(ValidationError): + occ.save() + + def test_generic_lazy_reference_set(self): + class Animal(Document): + meta = {'allow_inheritance': True} + + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = GenericLazyReferenceField() + + Animal.drop_collection() + Ocurrence.drop_collection() + + class SubAnimal(Animal): + nick = StringField() + + animal = Animal(name="Leopard", tag="heavy").save() + sub_animal = SubAnimal(nick='doggo', name='dog').save() + for ref in ( + animal, + LazyReference(Animal, animal.pk), + {'_cls': 'Animal', '_ref': DBRef(animal._get_collection_name(), animal.pk)}, + + sub_animal, + LazyReference(SubAnimal, sub_animal.pk), + {'_cls': 'SubAnimal', '_ref': DBRef(sub_animal._get_collection_name(), sub_animal.pk)}, + ): + p = Ocurrence(person="test", animal=ref).save() + p.reload() + self.assertIsInstance(p.animal, (LazyReference, Document)) + p.animal.fetch() + + def test_generic_lazy_reference_bad_set(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = GenericLazyReferenceField(choices=['Animal']) + + Animal.drop_collection() + Ocurrence.drop_collection() + + class BadDoc(Document): + pass + + animal = Animal(name="Leopard", tag="heavy").save() + baddoc = BadDoc().save() + for bad in ( + 42, + 'foo', + baddoc, + LazyReference(BadDoc, animal.pk) + ): + with self.assertRaises(ValidationError): + p = Ocurrence(person="test", animal=bad).save() + + def test_generic_lazy_reference_query_conversion(self): + class Member(Document): + user_num = IntField(primary_key=True) + + class BlogPost(Document): + title = StringField() + author = GenericLazyReferenceField() + + Member.drop_collection() + BlogPost.drop_collection() + + m1 = Member(user_num=1) + m1.save() + m2 = Member(user_num=2) + m2.save() + + post1 = BlogPost(title='post 1', author=m1) + post1.save() + + post2 = BlogPost(title='post 2', author=m2) + post2.save() + + post = BlogPost.objects(author=m1).first() + self.assertEqual(post.id, post1.id) + + post = BlogPost.objects(author=m2).first() + self.assertEqual(post.id, post2.id) + + # Same thing by passing a LazyReference instance + post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() + self.assertEqual(post.id, post2.id) + + def test_generic_lazy_reference_not_set(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class Ocurrence(Document): + person = StringField() + animal = GenericLazyReferenceField() + + Animal.drop_collection() + Ocurrence.drop_collection() + + Ocurrence(person='foo').save() + p = Ocurrence.objects.get() + self.assertIs(p.animal, None) + + def test_generic_lazy_reference_embedded(self): + class Animal(Document): + name = StringField() + tag = StringField() + + class EmbeddedOcurrence(EmbeddedDocument): + in_list = ListField(GenericLazyReferenceField()) + direct = GenericLazyReferenceField() + + class Ocurrence(Document): + in_list = ListField(GenericLazyReferenceField()) + in_embedded = EmbeddedDocumentField(EmbeddedOcurrence) + direct = GenericLazyReferenceField() + + Animal.drop_collection() + Ocurrence.drop_collection() + + animal1 = Animal('doggo').save() + animal2 = Animal('cheeta').save() + + def check_fields_type(occ): + self.assertIsInstance(occ.direct, LazyReference) + for elem in occ.in_list: + self.assertIsInstance(elem, LazyReference) + self.assertIsInstance(occ.in_embedded.direct, LazyReference) + for elem in occ.in_embedded.in_list: + self.assertIsInstance(elem, LazyReference) + + occ = Ocurrence( + in_list=[animal1, animal2], + in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, + direct=animal1 + ).save() + check_fields_type(occ) + occ.reload() + check_fields_type(occ) + animal1_ref = {'_cls': 'Animal', '_ref': DBRef(animal1._get_collection_name(), animal1.pk)} + animal2_ref = {'_cls': 'Animal', '_ref': DBRef(animal2._get_collection_name(), animal2.pk)} + occ.direct = animal1_ref + occ.in_list = [animal1_ref, animal2_ref] + occ.in_embedded.direct = animal1_ref + occ.in_embedded.in_list = [animal1_ref, animal2_ref] + check_fields_type(occ) + + if __name__ == '__main__': unittest.main() diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index 8364d5ef..841e7c7d 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -54,7 +54,7 @@ class FileTest(MongoDBTestCase): result = PutFile.objects.first() self.assertTrue(putfile == result) - self.assertEqual("%s" % result.the_file, "") + self.assertEqual("%s" % result.the_file, "" % result.the_file.grid_id) self.assertEqual(result.the_file.read(), text) self.assertEqual(result.the_file.content_type, content_type) result.the_file.delete() # Remove file from GridFS diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index c78ed985..a405e892 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -9,6 +9,7 @@ from nose.plugins.skip import SkipTest import pymongo from pymongo.errors import ConfigurationError from pymongo.read_preferences import ReadPreference +from pymongo.results import UpdateResult import six from mongoengine import * @@ -589,6 +590,20 @@ class QuerySetTest(unittest.TestCase): Scores.objects(id=scores.id).update(max__high_score=500) self.assertEqual(Scores.objects.get(id=scores.id).high_score, 1000) + @needs_mongodb_v26 + def test_update_multiple(self): + class Product(Document): + item = StringField() + price = FloatField() + + product = Product.objects.create(item='ABC', price=10.99) + product = Product.objects.create(item='ABC', price=10.99) + Product.objects(id=product.id).update(mul__price=1.25) + self.assertEqual(Product.objects.get(id=product.id).price, 13.7375) + unknown_product = Product.objects.create(item='Unknown') + Product.objects(id=unknown_product.id).update(mul__price=100) + self.assertEqual(Product.objects.get(id=unknown_product.id).price, 0) + def test_updates_can_have_match_operators(self): class Comment(EmbeddedDocument): @@ -656,14 +671,14 @@ class QuerySetTest(unittest.TestCase): result = self.Person(name="Bob", age=25).update( upsert=True, full_result=True) - self.assertTrue(isinstance(result, dict)) - self.assertTrue("upserted" in result) - self.assertFalse(result["updatedExisting"]) + self.assertTrue(isinstance(result, UpdateResult)) + self.assertTrue("upserted" in result.raw_result) + self.assertFalse(result.raw_result["updatedExisting"]) bob = self.Person.objects.first() result = bob.update(set__age=30, full_result=True) - self.assertTrue(isinstance(result, dict)) - self.assertTrue(result["updatedExisting"]) + self.assertTrue(isinstance(result, UpdateResult)) + self.assertTrue(result.raw_result["updatedExisting"]) self.Person(name="Bob", age=20).save() result = self.Person.objects(name="Bob").update( @@ -830,11 +845,8 @@ class QuerySetTest(unittest.TestCase): blogs.append(Blog(title="post %s" % i, posts=[post1, post2])) Blog.objects.insert(blogs, load_bulk=False) - if mongodb_version < (2, 6): - self.assertEqual(q, 1) - else: - # profiling logs each doc now in the bulk op - self.assertEqual(q, 99) + # profiling logs each doc now in the bulk op + self.assertEqual(q, 99) Blog.drop_collection() Blog.ensure_indexes() @@ -843,11 +855,7 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(q, 0) Blog.objects.insert(blogs) - if mongodb_version < (2, 6): - self.assertEqual(q, 2) # 1 for insert, and 1 for in bulk fetch - else: - # 99 for insert, and 1 for in bulk fetch - self.assertEqual(q, 100) + self.assertEqual(q, 100) # 99 for insert 1 for fetch Blog.drop_collection() @@ -912,10 +920,6 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(Blog.objects.count(), 2) - Blog.objects.insert([blog2, blog3], - write_concern={"w": 0, 'continue_on_error': True}) - self.assertEqual(Blog.objects.count(), 3) - def test_get_changed_fields_query_count(self): """Make sure we don't perform unnecessary db operations when none of document's fields were updated. @@ -1929,6 +1933,21 @@ class QuerySetTest(unittest.TestCase): post.reload() self.assertEqual(post.tags, ['scala', 'mongodb', 'python', 'java']) + def test_update_push_list_of_list(self): + """Ensure that the 'push' update operation works in the list of list + """ + class BlogPost(Document): + slug = StringField() + tags = ListField() + + BlogPost.drop_collection() + + post = BlogPost(slug="test").save() + + BlogPost.objects.filter(slug="test").update(push__tags=["value1", 123]) + post.reload() + self.assertEqual(post.tags, [["value1", 123]]) + def test_update_push_and_pull_add_to_set(self): """Ensure that the 'pull' update operation works correctly. """ @@ -2071,6 +2090,23 @@ class QuerySetTest(unittest.TestCase): Site.objects(id=s.id).update_one( pull_all__collaborators__helpful__user=['Ross']) + def test_pull_in_genericembedded_field(self): + + class Foo(EmbeddedDocument): + name = StringField() + + class Bar(Document): + foos = ListField(GenericEmbeddedDocumentField( + choices=[Foo, ])) + + Bar.drop_collection() + + foo = Foo(name="bar") + bar = Bar(foos=[foo]).save() + Bar.objects(id=bar.id).update(pull__foos=foo) + bar.reload() + self.assertEqual(len(bar.foos), 0) + def test_update_one_pop_generic_reference(self): class BlogTag(Document): @@ -2164,6 +2200,24 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(message.authors[1].name, "Ross") self.assertEqual(message.authors[2].name, "Adam") + def test_set_generic_embedded_documents(self): + + class Bar(EmbeddedDocument): + name = StringField() + + class User(Document): + username = StringField() + bar = GenericEmbeddedDocumentField(choices=[Bar,]) + + User.drop_collection() + + User(username='abc').save() + User.objects(username='abc').update( + set__bar=Bar(name='test'), upsert=True) + + user = User.objects(username='abc').first() + self.assertEqual(user.bar.name, "test") + def test_reload_embedded_docs_instance(self): class SubDoc(EmbeddedDocument): @@ -2333,14 +2387,19 @@ class QuerySetTest(unittest.TestCase): age = IntField() with db_ops_tracker() as q: - adult = (User.objects.filter(age__gte=18) + adult1 = (User.objects.filter(age__gte=18) .comment('looking for an adult') .first()) + + adult2 = (User.objects.comment('looking for an adult') + .filter(age__gte=18) + .first()) + ops = q.get_ops() - self.assertEqual(len(ops), 1) - op = ops[0] - self.assertEqual(op['query']['$query'], {'age': {'$gte': 18}}) - self.assertEqual(op['query']['$comment'], 'looking for an adult') + self.assertEqual(len(ops), 2) + for op in ops: + self.assertEqual(op['query']['$query'], {'age': {'$gte': 18}}) + self.assertEqual(op['query']['$comment'], 'looking for an adult') def test_map_reduce(self): """Ensure map/reduce is both mapping and reducing. @@ -4379,6 +4438,25 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(bars._cursor._Cursor__read_preference, ReadPreference.SECONDARY_PREFERRED) + @needs_mongodb_v26 + def test_read_preference_aggregation_framework(self): + class Bar(Document): + txt = StringField() + + meta = { + 'indexes': ['txt'] + } + # Aggregates with read_preference + bars = Bar.objects \ + .read_preference(ReadPreference.SECONDARY_PREFERRED) \ + .aggregate() + if IS_PYMONGO_3: + self.assertEqual(bars._CommandCursor__collection.read_preference, + ReadPreference.SECONDARY_PREFERRED) + else: + self.assertNotEqual(bars._CommandCursor__collection.read_preference, + ReadPreference.SECONDARY_PREFERRED) + def test_json_simple(self): class Embedded(EmbeddedDocument): @@ -4790,6 +4868,30 @@ class QuerySetTest(unittest.TestCase): for obj in C.objects.no_sub_classes(): self.assertEqual(obj.__class__, C) + def test_query_generic_embedded_document(self): + """Ensure that querying sub field on generic_embedded_field works + """ + class A(EmbeddedDocument): + a_name = StringField() + + class B(EmbeddedDocument): + b_name = StringField() + + class Doc(Document): + document = GenericEmbeddedDocumentField(choices=(A, B)) + + Doc.drop_collection() + Doc(document=A(a_name='A doc')).save() + Doc(document=B(b_name='B doc')).save() + + # Using raw in filter working fine + self.assertEqual(Doc.objects( + __raw__={'document.a_name': 'A doc'}).count(), 1) + self.assertEqual(Doc.objects( + __raw__={'document.b_name': 'B doc'}).count(), 1) + self.assertEqual(Doc.objects(document__a_name='A doc').count(), 1) + self.assertEqual(Doc.objects(document__b_name='B doc').count(), 1) + def test_query_reference_to_custom_pk_doc(self): class A(Document): diff --git a/tests/queryset/transform.py b/tests/queryset/transform.py index 20ab0b3f..38098432 100644 --- a/tests/queryset/transform.py +++ b/tests/queryset/transform.py @@ -1,5 +1,7 @@ import unittest +from bson.son import SON + from mongoengine import * from mongoengine.queryset import Q, transform @@ -28,12 +30,16 @@ class TransformTest(unittest.TestCase): {'name': {'$exists': True}}) def test_transform_update(self): + class LisDoc(Document): + foo = ListField(StringField()) + class DicDoc(Document): dictField = DictField() class Doc(Document): pass + LisDoc.drop_collection() DicDoc.drop_collection() Doc.drop_collection() @@ -50,6 +56,20 @@ class TransformTest(unittest.TestCase): update = transform.update(DicDoc, pull__dictField__test=doc) self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict)) + + update = transform.update(LisDoc, pull__foo__in=['a']) + self.assertEqual(update, {'$pull': {'foo': {'$in': ['a']}}}) + + def test_transform_update_push(self): + """Ensure the differences in behvaior between 'push' and 'push_all'""" + class BlogPost(Document): + tags = ListField(StringField()) + + update = transform.update(BlogPost, push__tags=['mongo', 'db']) + self.assertEqual(update, {'$push': {'tags': ['mongo', 'db']}}) + + update = transform.update(BlogPost, push_all__tags=['mongo', 'db']) + self.assertEqual(update, {'$push': {'tags': {'$each': ['mongo', 'db']}}}) def test_query_field_name(self): """Ensure that the correct field name is used when querying. @@ -240,7 +260,31 @@ class TransformTest(unittest.TestCase): events = Event.objects(location__within=box) with self.assertRaises(InvalidQueryError): events.count() + + def test_update_pull_for_list_fields(self): + """ + Test added to check pull operation in update for + EmbeddedDocumentListField which is inside a EmbeddedDocumentField + """ + class Word(EmbeddedDocument): + word = StringField() + index = IntField() + + class SubDoc(EmbeddedDocument): + heading = ListField(StringField()) + text = EmbeddedDocumentListField(Word) + + class MainDoc(Document): + title = StringField() + content = EmbeddedDocumentField(SubDoc) + + word = Word(word='abc', index=1) + update = transform.update(MainDoc, pull__content__text=word) + self.assertEqual(update, {'$pull': {'content.text': SON([('word', u'abc'), ('index', 1)])}}) - + update = transform.update(MainDoc, pull__content__heading='xyz') + self.assertEqual(update, {'$pull': {'content.heading': 'xyz'}}) + + if __name__ == '__main__': unittest.main() diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 79381c5a..1ea562a5 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -1,6 +1,21 @@ import unittest -from mongoengine.base.datastructures import StrictDict +from mongoengine.base.datastructures import StrictDict, BaseList + + +class TestBaseList(unittest.TestCase): + + def test_iter_simple(self): + values = [True, False, True, False] + base_list = BaseList(values, instance=None, name='my_name') + self.assertEqual(values, list(base_list)) + + def test_iter_allow_modification_while_iterating_withou_error(self): + # regular list allows for this, thus this subclass must comply to that + base_list = BaseList([True, False, True, False], instance=None, name='my_name') + for idx, val in enumerate(base_list): + if val: + base_list.pop(idx) class TestStrictDict(unittest.TestCase): diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 00000000..562cc1ff --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,38 @@ +import unittest +import re + +from mongoengine.base.utils import LazyRegexCompiler + +signal_output = [] + + +class LazyRegexCompilerTest(unittest.TestCase): + + def test_lazy_regex_compiler_verify_laziness_of_descriptor(self): + class UserEmail(object): + EMAIL_REGEX = LazyRegexCompiler('@', flags=32) + + descriptor = UserEmail.__dict__['EMAIL_REGEX'] + self.assertIsNone(descriptor._compiled_regex) + + regex = UserEmail.EMAIL_REGEX + self.assertEqual(regex, re.compile('@', flags=32)) + self.assertEqual(regex.search('user@domain.com').group(), '@') + + user_email = UserEmail() + self.assertIs(user_email.EMAIL_REGEX, UserEmail.EMAIL_REGEX) + + def test_lazy_regex_compiler_verify_cannot_set_descriptor_on_instance(self): + class UserEmail(object): + EMAIL_REGEX = LazyRegexCompiler('@') + + user_email = UserEmail() + with self.assertRaises(AttributeError): + user_email.EMAIL_REGEX = re.compile('@') + + def test_lazy_regex_compiler_verify_can_override_class_attr(self): + class UserEmail(object): + EMAIL_REGEX = LazyRegexCompiler('@') + + UserEmail.EMAIL_REGEX = re.compile('cookies') + self.assertEqual(UserEmail.EMAIL_REGEX.search('Cake & cookies').group(), 'cookies') diff --git a/tox.ini b/tox.ini index 7f0d36e4..815d2acc 100644 --- a/tox.ini +++ b/tox.ini @@ -1,13 +1,12 @@ [tox] -envlist = {py27,py35,pypy,pypy3}-{mg27,mg28,mg30} +envlist = {py27,py35,pypy,pypy3}-{mg35,mg3x} [testenv] commands = python setup.py nosetests {posargs} deps = nose - mg27: PyMongo<2.8 - mg28: PyMongo>=2.8,<2.9 - mg30: PyMongo>=3.0 + mg35: PyMongo==3.5 + mg3x: PyMongo>=3.0,<3.7 setenv = PYTHON_EGG_CACHE = {envdir}/python-eggs