Compare commits

..

67 Commits

Author SHA1 Message Date
Stefan Wojcik
60571ce1de document breaking change where we dont allow outdated "from mongoengine.base import ErrorClass" imports 2016-12-11 18:02:53 -05:00
Stefan Wojcik
953123b3dc minor compat tweak 2016-12-11 17:25:37 -05:00
Stefan Wojcik
fdc1d94f47 slightly simpler condition in _clear_changed_fields 2016-12-11 17:22:38 -05:00
Stefan Wojcik
828d5d6d29 *finally* a working .landscape.yml 2016-12-11 16:38:33 -05:00
Stefan Wojcik
501f6f11ba try another landscape approach 2016-12-11 16:01:31 -05:00
Stefan Wojcik
1199f0d649 another attempt at the right landscape config 2016-12-11 16:00:07 -05:00
Stefan Wojcik
af6601a2e1 update changelog and upgrade docs 2016-12-11 15:56:04 -05:00
Stefan Wojcik
d51788050f Merge branch 'master' of github.com:MongoEngine/mongoengine into improve-health-2 2016-12-11 15:38:29 -05:00
Stefan Wojcik
93d8d97fbd fix .landscape.yml 2016-12-11 15:24:25 -05:00
Stefan Wojcik
dc15195dd8 merge master into improve-health-2 2016-12-11 15:02:51 -05:00
Stefan Wojcik
688ea4f0f2 add long to built-ins in landscape 2016-12-11 15:01:51 -05:00
Stefan Wojcik
a12abe2da4 add xrange as a valid built-in in landscape 2016-12-11 14:56:34 -05:00
Stefan Wojcik
7ffaace4dd update setup.py classifiers 2016-12-11 14:33:53 -05:00
Stefan Wojcik
3ebe3748fa use with self.assertRaises for readability 2016-12-10 22:33:39 -05:00
Stefan Wojcik
a8884391c2 restore cover-package in setup.cfg 2016-12-10 21:04:04 -05:00
Stefan Wojcik
eb903987eb remove a print statement 2016-12-10 20:59:56 -05:00
Stefan Wojcik
b32cd19266 setup.cfg cleanup + only run coveralls on py27 2016-12-10 20:57:43 -05:00
Stefan Wojcik
500b182d17 deprecate explain's format param 2016-12-10 20:09:19 -05:00
Stefan Wojcik
5b70a451c4 fix improper syntax for datetimes 2016-12-10 19:59:15 -05:00
Stefan Wojcik
05fea58d6a remove a print statement 2016-12-10 19:58:41 -05:00
Stefan Wojcik
a9c205bffe remove ridiculous verify_exists option from URLField 2016-12-10 19:56:26 -05:00
Stefan Wojcik
fa9ca2555a remove more python 2.6 code + upgrade coverage + cleaner setup.py 2016-12-10 19:02:07 -05:00
Stefan Wojcik
d89cdff90a remove unused import and dont override built-in "id" 2016-12-10 14:29:48 -05:00
Stefan Wojcik
1e9a120f7e drop unused imports 2016-12-10 14:06:40 -05:00
Stefan Wojcik
94870d7377 merge master into improve-health-2 2016-12-10 13:53:13 -05:00
Stefan Wojcik
30ebe7c11e make the delete rules nicer and safer in BaseQuerySet.delete 2016-12-10 13:50:52 -05:00
Stefan Wojcik
566e8ee801 readd accidentally dropped line in setup.cfg 2016-12-10 13:25:22 -05:00
Stefan Wojcik
5778cb4b51 merge master into improve-health-2 2016-12-10 13:10:19 -05:00
Stefan Wojcik
37c86350f2 drop Python v2.6 support and use dict comprehensions 2016-12-10 12:57:54 -05:00
Stefan Wojcik
cb1eda480b remove outdated migration tests 2016-12-09 16:07:02 -05:00
Stefan Wojcik
e50b23f047 remove xrange and unused variables 2016-12-09 00:08:59 -05:00
Stefan Wojcik
6eb470a821 remove unnecessary usage of the "global" keyword 2016-12-08 23:50:36 -05:00
Stefan Wojcik
756d8b2ac5 remove ridiculous try-finally clause from BaseQuerySet.distinct 2016-12-08 23:44:36 -05:00
Stefan Wojcik
b99985eaf8 slightly cleaner and more performant BaseQuerySet.delete 2016-12-08 23:36:11 -05:00
Stefan Wojcik
4e1145d890 improve documentation regarding allow_inheritance 2016-12-08 23:06:17 -05:00
Stefan Wojcik
5b7b65a750 Prefer ' over " + minor docstring tweaks 2016-12-08 22:44:02 -05:00
Stefan Wojcik
76219901db nicer merge_index_specs (thanks @gukoff!) 2016-12-08 21:49:25 -05:00
Stefan Wojcik
44b86e29c6 more cleanup 2016-12-08 19:27:57 -05:00
Stefan Wojcik
f1f999a570 cleanup + nicer EmbeddedDocumentList.__match_all and __only_matches 2016-12-08 18:59:25 -05:00
Stefan Wojcik
9a32ff4c42 document and slightly simplify BaseDocument._lookup_field 2016-12-08 17:41:40 -05:00
Stefan Wojcik
4b024409ba fix benchmark.py + ignore it in landscape 2016-12-08 16:31:44 -05:00
Stefan Wojcik
b2825119ce remove unnecessary parentheses 2016-12-08 15:36:54 -05:00
Stefan Wojcik
b02904ee75 BREAKING CHANGE rename ConnectionError to MongoEngineConnectionError to avoid conflicts with PY3's built-in ConnectionError 2016-12-08 15:18:17 -05:00
Stefan Wojcik
c86155e571 cleaner connection code 2016-12-08 15:10:10 -05:00
Stefan Wojcik
fa6949eca2 no need to redefine PY3 - six already has it 2016-12-08 12:53:01 -05:00
Stefan Wojcik
18a91cc794 drop an unnecessary ALLOW_INHERITANCE 2016-12-08 11:22:51 -05:00
Stefan Wojcik
ae777e45b2 better comment about overriding allow_inheritance 2016-12-08 11:15:35 -05:00
Stefan Wojcik
0189818f3e clearer .landscape.yml + change self.__class__._meta to self._meta 2016-12-08 10:45:24 -05:00
Stefan Wojcik
205a975781 fix flake8 2016-12-08 10:34:09 -05:00
Stefan Wojcik
bb81652ffe nicer imports 2016-12-08 10:33:15 -05:00
Stefan Wojcik
edbecb4df0 minimize cyclic import warnings
remaining ones are wrongly attributed to mongoengine.common which only does inline imports
2016-12-08 10:12:46 -05:00
Stefan Wojcik
4373ea98cf more import fixes 2016-12-08 08:42:32 -05:00
Stefan Wojcik
8f657e0f7d cleaner code + prefer top-level import over _import_class 2016-12-08 08:18:33 -05:00
Stefan Wojcik
f6b8899bba fix broken inheritance for Document and EmbeddedDocument 2016-12-07 22:48:06 -05:00
Stefan Wojcik
c43a5fe760 add .landscape.yml 2016-12-07 00:13:32 -05:00
Stefan Wojcik
c1993de524 remove one last unicode + safer default param 2016-12-06 23:36:57 -05:00
Stefan Wojcik
bc83ba6a24 minor tweaks to Document._build_index_specs 2016-12-06 23:28:21 -05:00
Stefan Wojcik
0fc44efbcc minor tweak to python_support 2016-12-06 23:09:55 -05:00
Stefan Wojcik
1b36ca00e5 minor health tweak to benchmark.py 2016-12-06 23:04:38 -05:00
Stefan Wojcik
7dd4639037 pk as a property with a setter + get rid of basestring 2016-12-06 23:02:08 -05:00
Stefan Wojcik
557f9bd971 flake8 tweaks 2016-12-06 16:20:41 -05:00
Stefan Wojcik
59cac2b75c remove last few uses of "unicode" 2016-12-06 16:17:15 -05:00
Stefan Wojcik
548c7438b0 dont re-implement six 2016-12-06 16:14:53 -05:00
Stefan Wojcik
50df653768 imports working in py2 and py3 in mongoengine/__init__.py 2016-12-06 13:53:12 -05:00
Stefan Wojcik
bc6c84c408 remove unnecessary sys.path manipulation in tests 2016-12-06 13:27:10 -05:00
Stefan Wojcik
6b2cebb07b healthier, cleaner imports 2016-12-06 13:17:40 -05:00
Stefan Wojcik
db673a9033 ditch the old "except Exception, e" syntax 2016-12-05 23:23:38 -05:00
29 changed files with 299 additions and 937 deletions

View File

@@ -14,13 +14,13 @@ Before starting to write code, look for existing `tickets
<https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one <https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one
<https://github.com/MongoEngine/mongoengine/issues>`_ for your specific <https://github.com/MongoEngine/mongoengine/issues>`_ for your specific
issue or feature request. That way you avoid working on something issue or feature request. That way you avoid working on something
that might not be of interest or that has already been addressed. If in doubt that might not be of interest or that has already been addressed. If in doubt
post to the `user group <http://groups.google.com/group/mongoengine-users>` post to the `user group <http://groups.google.com/group/mongoengine-users>`
Supported Interpreters Supported Interpreters
---------------------- ----------------------
MongoEngine supports CPython 2.7 and newer. Language MongoEngine supports CPython 2.6 and newer. Language
features not supported by all interpreters can not be used. features not supported by all interpreters can not be used.
Please also ensure that your code is properly converted by Please also ensure that your code is properly converted by
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support. `2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.

View File

@@ -4,7 +4,7 @@ MongoEngine
:Info: MongoEngine is an ORM-like layer on top of PyMongo. :Info: MongoEngine is an ORM-like layer on top of PyMongo.
:Repository: https://github.com/MongoEngine/mongoengine :Repository: https://github.com/MongoEngine/mongoengine
:Author: Harry Marr (http://github.com/hmarr) :Author: Harry Marr (http://github.com/hmarr)
:Maintainer: Stefan Wójcik (http://github.com/wojcikstefan) :Maintainer: Ross Lawley (http://github.com/rozza)
.. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master .. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master
:target: https://travis-ci.org/MongoEngine/mongoengine :target: https://travis-ci.org/MongoEngine/mongoengine
@@ -35,22 +35,16 @@ setup.py install``.
Dependencies Dependencies
============ ============
All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_. At the very least, you'll need these two packages to use MongoEngine:
- pymongo>=2.7.1 - pymongo>=2.7.1
- six>=1.10.0 - sphinx (optional - for documentation generation)
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
Optional Dependencies
---------------------
- **Image Fields**: Pillow>=2.0.0
- dateutil>=2.1.0 - dateutil>=2.1.0
If you need to use an ``ImageField`` or ``ImageGridFsProxy``: .. note
MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: PyMongo 3.0.1
- Pillow>=2.0.0
If you want to generate the documentation (e.g. to contribute to it):
- sphinx
Examples Examples
======== ========
@@ -63,7 +57,7 @@ Some simple examples of what MongoEngine code looks like:
class BlogPost(Document): class BlogPost(Document):
title = StringField(required=True, max_length=200) title = StringField(required=True, max_length=200)
posted = DateTimeField(default=datetime.datetime.utcnow) posted = DateTimeField(default=datetime.datetime.now)
tags = ListField(StringField(max_length=50)) tags = ListField(StringField(max_length=50))
meta = {'allow_inheritance': True} meta = {'allow_inheritance': True}
@@ -93,18 +87,17 @@ Some simple examples of what MongoEngine code looks like:
... print ... print
... ...
# Count all blog posts and its subtypes >>> len(BlogPost.objects)
>>> BlogPost.objects.count()
2 2
>>> TextPost.objects.count() >>> len(TextPost.objects)
1 1
>>> LinkPost.objects.count() >>> len(LinkPost.objects)
1 1
# Count tagged posts # Find tagged posts
>>> BlogPost.objects(tags='mongoengine').count() >>> len(BlogPost.objects(tags='mongoengine'))
2 2
>>> BlogPost.objects(tags='mongodb').count() >>> len(BlogPost.objects(tags='mongodb'))
1 1
Tests Tests
@@ -137,7 +130,8 @@ Community
<http://groups.google.com/group/mongoengine-users>`_ <http://groups.google.com/group/mongoengine-users>`_
- `MongoEngine Developers mailing list - `MongoEngine Developers mailing list
<http://groups.google.com/group/mongoengine-dev>`_ <http://groups.google.com/group/mongoengine-dev>`_
- `#mongoengine IRC channel <http://webchat.freenode.net/?channels=mongoengine>`_
Contributing Contributing
============ ============
We welcome contributions! See the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_ We welcome contributions! see the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_

View File

@@ -4,19 +4,13 @@ Changelog
Development Development
=========== ===========
- (Fill this out as you fix issues and develop your features). - (Fill this out as you fix issues and develop you features).
- Fixed using sets in field choices #1481
- POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476
- POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476
- Fixed connecting to a replica set with PyMongo 2.x #1436
- Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237
Changes in 0.11.0 Changes in 0.11.0
================= =================
- BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428 - BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428
- BREAKING CHANGE: Dropped Python 2.6 support. #1428 - BREAKING CHANGE: Dropped Python 2.6 support. #1428
- BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428 - BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428
- BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334
- Fixed absent rounding for DecimalField when `force_string` is set. #1103 - Fixed absent rounding for DecimalField when `force_string` is set. #1103
Changes in 0.10.8 Changes in 0.10.8

View File

@@ -33,7 +33,7 @@ the :attr:`host` to
corresponding parameters in :func:`~mongoengine.connect`: :: corresponding parameters in :func:`~mongoengine.connect`: ::
connect( connect(
db='test', name='test',
username='user', username='user',
password='12345', password='12345',
host='mongodb://admin:qwerty@localhost/production' host='mongodb://admin:qwerty@localhost/production'

View File

@@ -150,7 +150,7 @@ arguments can be set on all fields:
.. note:: If set, this field is also accessible through the `pk` field. .. note:: If set, this field is also accessible through the `pk` field.
:attr:`choices` (Default: None) :attr:`choices` (Default: None)
An iterable (e.g. list, tuple or set) of choices to which the value of this An iterable (e.g. a list or tuple) of choices to which the value of this
field should be limited. field should be limited.
Can be either be a nested tuples of value (stored in mongo) and a Can be either be a nested tuples of value (stored in mongo) and a
@@ -214,8 +214,8 @@ document class as the first argument::
Dictionary Fields Dictionary Fields
----------------- -----------------
Often, an embedded document may be used instead of a dictionary generally Often, an embedded document may be used instead of a dictionary generally
embedded documents are recommended as dictionaries dont support validation embedded documents are recommended as dictionaries dont support validation
or custom field types. However, sometimes you will not know the structure of what you want to or custom field types. However, sometimes you will not know the structure of what you want to
store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate:: store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate::
@@ -361,6 +361,11 @@ Its value can take any of the following constants:
In Django, be sure to put all apps that have such delete rule declarations in In Django, be sure to put all apps that have such delete rule declarations in
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
.. warning::
Signals are not triggered when doing cascading updates / deletes - if this
is required you must manually handle the update / delete.
Generic reference fields Generic reference fields
'''''''''''''''''''''''' ''''''''''''''''''''''''
A second kind of reference field also exists, A second kind of reference field also exists,

View File

@@ -479,8 +479,6 @@ operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the
first positional argument to :attr:`Document.objects` when you filter it by first positional argument to :attr:`Document.objects` when you filter it by
calling it with keyword arguments:: calling it with keyword arguments::
from mongoengine.queryset.visitor import Q
# Get published posts # Get published posts
Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now())) Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now()))

View File

@@ -142,4 +142,11 @@ cleaner looking while still allowing manual execution of the callback::
modified = DateTimeField() modified = DateTimeField()
ReferenceFields and Signals
---------------------------
Currently `reverse_delete_rule` does not trigger signals on the other part of
the relationship. If this is required you must manually handle the
reverse deletion.
.. _blinker: http://pypi.python.org/pypi/blinker .. _blinker: http://pypi.python.org/pypi/blinker

View File

@@ -2,20 +2,6 @@
Upgrading Upgrading
######### #########
Development
***********
(Fill this out whenever you introduce breaking changes to MongoEngine)
This release includes various fixes for the `BaseQuerySet` methods and how they
are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size
to an already-existing queryset wouldn't modify the underlying PyMongo cursor.
This has been fixed now, so you'll need to make sure that your code didn't rely
on the broken implementation.
Additionally, a public `BaseQuerySet.clone_into` has been renamed to a private
`_clone_into`. If you directly used that method in your code, you'll need to
rename its occurrences.
0.11.0 0.11.0
****** ******
This release includes a major rehaul of MongoEngine's code quality and This release includes a major rehaul of MongoEngine's code quality and

View File

@@ -23,7 +23,7 @@ __all__ = (list(document.__all__) + list(fields.__all__) +
list(signals.__all__) + list(errors.__all__)) list(signals.__all__) + list(errors.__all__))
VERSION = (0, 11, 0) VERSION = (0, 10, 9)
def get_version(): def get_version():

View File

@@ -5,7 +5,7 @@ __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push', UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
'push_all', 'pull', 'pull_all', 'add_to_set', 'push_all', 'pull', 'pull_all', 'add_to_set',
'set_on_insert', 'min', 'max', 'rename']) 'set_on_insert', 'min', 'max'])
_document_registry = {} _document_registry = {}

View File

@@ -138,7 +138,10 @@ class BaseList(list):
return super(BaseList, self).__setitem__(key, value) return super(BaseList, self).__setitem__(key, value)
def __delitem__(self, key, *args, **kwargs): def __delitem__(self, key, *args, **kwargs):
self._mark_as_changed() if isinstance(key, slice):
self._mark_as_changed()
else:
self._mark_as_changed(key)
return super(BaseList, self).__delitem__(key) return super(BaseList, self).__delitem__(key)
def __setslice__(self, *args, **kwargs): def __setslice__(self, *args, **kwargs):
@@ -429,7 +432,7 @@ class StrictDict(object):
def __eq__(self, other): def __eq__(self, other):
return self.items() == other.items() return self.items() == other.items()
def __ne__(self, other): def __neq__(self, other):
return self.items() != other.items() return self.items() != other.items()
@classmethod @classmethod

View File

@@ -402,11 +402,9 @@ class BaseDocument(object):
raise ValidationError(message, errors=errors) raise ValidationError(message, errors=errors)
def to_json(self, *args, **kwargs): def to_json(self, *args, **kwargs):
"""Convert this document to JSON. """Converts a document to JSON.
:param use_db_field: Set to True by default but enables the output of the json structure with the field names
:param use_db_field: Serialize field names as they appear in and not the mongodb store db_names in case of set to False
MongoDB (as opposed to attribute names on this document).
Defaults to True.
""" """
use_db_field = kwargs.pop('use_db_field', True) use_db_field = kwargs.pop('use_db_field', True)
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs) return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
@@ -677,9 +675,6 @@ class BaseDocument(object):
if not only_fields: if not only_fields:
only_fields = [] only_fields = []
if son and not isinstance(son, dict):
raise ValueError("The source SON object needs to be of type 'dict'")
# Get the class name from the document, falling back to the given # Get the class name from the document, falling back to the given
# class if unavailable # class if unavailable
class_name = son.get('_cls', cls._class_name) class_name = son.get('_cls', cls._class_name)

View File

@@ -23,6 +23,7 @@ class BaseField(object):
.. versionchanged:: 0.5 - added verbose and help text .. versionchanged:: 0.5 - added verbose and help text
""" """
name = None name = None
_geo_index = False _geo_index = False
_auto_gen = False # Call `generate` to generate a value _auto_gen = False # Call `generate` to generate a value
@@ -41,7 +42,7 @@ class BaseField(object):
""" """
:param db_field: The database field to store this field in :param db_field: The database field to store this field in
(defaults to the name of the field) (defaults to the name of the field)
:param name: Deprecated - use db_field :param name: Depreciated - use db_field
:param required: If the field is required. Whether it has to have a :param required: If the field is required. Whether it has to have a
value or not. Defaults to False. value or not. Defaults to False.
:param default: (optional) The default value for this field if no value :param default: (optional) The default value for this field if no value
@@ -81,17 +82,6 @@ class BaseField(object):
self.sparse = sparse self.sparse = sparse
self._owner_document = None self._owner_document = None
# Validate the db_field
if isinstance(self.db_field, six.string_types) and (
'.' in self.db_field or
'\0' in self.db_field or
self.db_field.startswith('$')
):
raise ValueError(
'field names cannot contain dots (".") or null characters '
'("\\0"), and they must not start with a dollar sign ("$").'
)
# Detect and report conflicts between metadata and base properties. # Detect and report conflicts between metadata and base properties.
conflicts = set(dir(self)) & set(kwargs) conflicts = set(dir(self)) & set(kwargs)
if conflicts: if conflicts:
@@ -193,8 +183,7 @@ class BaseField(object):
EmbeddedDocument = _import_class('EmbeddedDocument') EmbeddedDocument = _import_class('EmbeddedDocument')
choice_list = self.choices choice_list = self.choices
if isinstance(next(iter(choice_list)), (list, tuple)): if isinstance(choice_list[0], (list, tuple)):
# next(iter) is useful for sets
choice_list = [k for k, _ in choice_list] choice_list = [k for k, _ in choice_list]
# Choices which are other types of Documents # Choices which are other types of Documents

View File

@@ -34,10 +34,7 @@ def _import_class(cls_name):
queryset_classes = ('OperationError',) queryset_classes = ('OperationError',)
deref_classes = ('DeReference',) deref_classes = ('DeReference',)
if cls_name == 'BaseDocument': if cls_name in doc_classes:
from mongoengine.base import document as module
import_classes = ['BaseDocument']
elif cls_name in doc_classes:
from mongoengine import document as module from mongoengine import document as module
import_classes = doc_classes import_classes = doc_classes
elif cls_name in field_classes: elif cls_name in field_classes:

View File

@@ -66,9 +66,9 @@ def register_connection(alias, name=None, host=None, port=None,
'authentication_mechanism': authentication_mechanism 'authentication_mechanism': authentication_mechanism
} }
# Handle uri style connections
conn_host = conn_settings['host'] conn_host = conn_settings['host']
# host can be a list or a string, so if string, force to a list
# Host can be a list or a string, so if string, force to a list.
if isinstance(conn_host, six.string_types): if isinstance(conn_host, six.string_types):
conn_host = [conn_host] conn_host = [conn_host]
@@ -96,7 +96,7 @@ def register_connection(alias, name=None, host=None, port=None,
uri_options = uri_dict['options'] uri_options = uri_dict['options']
if 'replicaset' in uri_options: if 'replicaset' in uri_options:
conn_settings['replicaSet'] = uri_options['replicaset'] conn_settings['replicaSet'] = True
if 'authsource' in uri_options: if 'authsource' in uri_options:
conn_settings['authentication_source'] = uri_options['authsource'] conn_settings['authentication_source'] = uri_options['authsource']
if 'authmechanism' in uri_options: if 'authmechanism' in uri_options:
@@ -170,22 +170,23 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
else: else:
connection_class = MongoClient connection_class = MongoClient
# For replica set connections with PyMongo 2.x, use # Handle replica set connections
# MongoReplicaSetClient. if 'replicaSet' in conn_settings:
# TODO remove this once we stop supporting PyMongo 2.x.
if 'replicaSet' in conn_settings and not IS_PYMONGO_3:
connection_class = MongoReplicaSetClient
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
# hosts_or_uri has to be a string, so if 'host' was provided
# as a list, join its parts and separate them by ','
if isinstance(conn_settings['hosts_or_uri'], list):
conn_settings['hosts_or_uri'] = ','.join(
conn_settings['hosts_or_uri'])
# Discard port since it can't be used on MongoReplicaSetClient # Discard port since it can't be used on MongoReplicaSetClient
conn_settings.pop('port', None) conn_settings.pop('port', None)
# Discard replicaSet if it's not a string
if not isinstance(conn_settings['replicaSet'], six.string_types):
del conn_settings['replicaSet']
# For replica set connections with PyMongo 2.x, use
# MongoReplicaSetClient.
# TODO remove this once we stop supporting PyMongo 2.x.
if not IS_PYMONGO_3:
connection_class = MongoReplicaSetClient
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
# Iterate over all of the connection settings and if a connection with # Iterate over all of the connection settings and if a connection with
# the same parameters is already established, use it instead of creating # the same parameters is already established, use it instead of creating
# a new one. # a new one.

View File

@@ -313,9 +313,6 @@ class Document(BaseDocument):
.. versionchanged:: 0.10.7 .. versionchanged:: 0.10.7
Add signal_kwargs argument Add signal_kwargs argument
""" """
if self._meta.get('abstract'):
raise InvalidDocumentError('Cannot save an abstract document.')
signal_kwargs = signal_kwargs or {} signal_kwargs = signal_kwargs or {}
signals.pre_save.send(self.__class__, document=self, **signal_kwargs) signals.pre_save.send(self.__class__, document=self, **signal_kwargs)
@@ -332,20 +329,68 @@ class Document(BaseDocument):
signals.pre_save_post_validation.send(self.__class__, document=self, signals.pre_save_post_validation.send(self.__class__, document=self,
created=created, **signal_kwargs) created=created, **signal_kwargs)
if self._meta.get('auto_create_index', True):
self.ensure_indexes()
try: try:
# Save a new document or update an existing one collection = self._get_collection()
if self._meta.get('auto_create_index', True):
self.ensure_indexes()
if created: if created:
object_id = self._save_create(doc, force_insert, write_concern) if force_insert:
object_id = collection.insert(doc, **write_concern)
else:
object_id = collection.save(doc, **write_concern)
# In PyMongo 3.0, the save() call calls internally the _update() call
# but they forget to return the _id value passed back, therefore getting it back here
# Correct behaviour in 2.X and in 3.0.1+ versions
if not object_id and pymongo.version_tuple == (3, 0):
pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk)
object_id = (
self._qs.filter(pk=pk_as_mongo_obj).first() and
self._qs.filter(pk=pk_as_mongo_obj).first().pk
) # TODO doesn't this make 2 queries?
else: else:
object_id, created = self._save_update(doc, save_condition, object_id = doc['_id']
write_concern) updates, removals = self._delta()
# Need to add shard key to query, or you get an error
if save_condition is not None:
select_dict = transform.query(self.__class__,
**save_condition)
else:
select_dict = {}
select_dict['_id'] = object_id
shard_key = self._meta.get('shard_key', tuple())
for k in shard_key:
path = self._lookup_field(k.split('.'))
actual_key = [p.db_field for p in path]
val = doc
for ak in actual_key:
val = val[ak]
select_dict['.'.join(actual_key)] = val
def is_new_object(last_error):
if last_error is not None:
updated = last_error.get('updatedExisting')
if updated is not None:
return not updated
return created
update_query = {}
if updates:
update_query['$set'] = updates
if removals:
update_query['$unset'] = removals
if updates or removals:
upsert = save_condition is None
last_error = collection.update(select_dict, update_query,
upsert=upsert, **write_concern)
if not upsert and last_error['n'] == 0:
raise SaveConditionError('Race condition preventing'
' document update detected')
created = is_new_object(last_error)
if cascade is None: if cascade is None:
cascade = (self._meta.get('cascade', False) or cascade = self._meta.get(
cascade_kwargs is not None) 'cascade', False) or cascade_kwargs is not None
if cascade: if cascade:
kwargs = { kwargs = {
@@ -358,7 +403,6 @@ class Document(BaseDocument):
kwargs.update(cascade_kwargs) kwargs.update(cascade_kwargs)
kwargs['_refs'] = _refs kwargs['_refs'] = _refs
self.cascade_save(**kwargs) self.cascade_save(**kwargs)
except pymongo.errors.DuplicateKeyError as err: except pymongo.errors.DuplicateKeyError as err:
message = u'Tried to save duplicate unique keys (%s)' message = u'Tried to save duplicate unique keys (%s)'
raise NotUniqueError(message % six.text_type(err)) raise NotUniqueError(message % six.text_type(err))
@@ -371,91 +415,16 @@ class Document(BaseDocument):
raise NotUniqueError(message % six.text_type(err)) raise NotUniqueError(message % six.text_type(err))
raise OperationError(message % six.text_type(err)) raise OperationError(message % six.text_type(err))
# Make sure we store the PK on this document now that it's saved
id_field = self._meta['id_field'] id_field = self._meta['id_field']
if created or id_field not in self._meta.get('shard_key', []): if created or id_field not in self._meta.get('shard_key', []):
self[id_field] = self._fields[id_field].to_python(object_id) self[id_field] = self._fields[id_field].to_python(object_id)
signals.post_save.send(self.__class__, document=self, signals.post_save.send(self.__class__, document=self,
created=created, **signal_kwargs) created=created, **signal_kwargs)
self._clear_changed_fields() self._clear_changed_fields()
self._created = False self._created = False
return self return self
def _save_create(self, doc, force_insert, write_concern):
"""Save a new document.
Helper method, should only be used inside save().
"""
collection = self._get_collection()
if force_insert:
return collection.insert(doc, **write_concern)
object_id = collection.save(doc, **write_concern)
# In PyMongo 3.0, the save() call calls internally the _update() call
# but they forget to return the _id value passed back, therefore getting it back here
# Correct behaviour in 2.X and in 3.0.1+ versions
if not object_id and pymongo.version_tuple == (3, 0):
pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk)
object_id = (
self._qs.filter(pk=pk_as_mongo_obj).first() and
self._qs.filter(pk=pk_as_mongo_obj).first().pk
) # TODO doesn't this make 2 queries?
return object_id
def _save_update(self, doc, save_condition, write_concern):
"""Update an existing document.
Helper method, should only be used inside save().
"""
collection = self._get_collection()
object_id = doc['_id']
created = False
select_dict = {}
if save_condition is not None:
select_dict = transform.query(self.__class__, **save_condition)
select_dict['_id'] = object_id
# Need to add shard key to query, or you get an error
shard_key = self._meta.get('shard_key', tuple())
for k in shard_key:
path = self._lookup_field(k.split('.'))
actual_key = [p.db_field for p in path]
val = doc
for ak in actual_key:
val = val[ak]
select_dict['.'.join(actual_key)] = val
updates, removals = self._delta()
update_query = {}
if updates:
update_query['$set'] = updates
if removals:
update_query['$unset'] = removals
if updates or removals:
upsert = save_condition is None
last_error = collection.update(select_dict, update_query,
upsert=upsert, **write_concern)
if not upsert and last_error['n'] == 0:
raise SaveConditionError('Race condition preventing'
' document update detected')
if last_error is not None:
updated_existing = last_error.get('updatedExisting')
if updated_existing is False:
created = True
# !!! This is bad, means we accidentally created a new,
# potentially corrupted document. See
# https://github.com/MongoEngine/mongoengine/issues/564
return object_id, created
def cascade_save(self, **kwargs): def cascade_save(self, **kwargs):
"""Recursively save any references and generic references on the """Recursively save any references and generic references on the
document. document.
@@ -859,6 +828,7 @@ class Document(BaseDocument):
""" Lists all of the indexes that should be created for given """ Lists all of the indexes that should be created for given
collection. It includes all the indexes from super- and sub-classes. collection. It includes all the indexes from super- and sub-classes.
""" """
if cls._meta.get('abstract'): if cls._meta.get('abstract'):
return [] return []

View File

@@ -50,8 +50,8 @@ class FieldDoesNotExist(Exception):
or an :class:`~mongoengine.EmbeddedDocument`. or an :class:`~mongoengine.EmbeddedDocument`.
To avoid this behavior on data loading, To avoid this behavior on data loading,
you should set the :attr:`strict` to ``False`` you should the :attr:`strict` to ``False``
in the :attr:`meta` dictionary. in the :attr:`meta` dictionnary.
""" """

View File

@@ -28,7 +28,7 @@ from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField,
GeoJsonBaseField, ObjectIdField, get_document) GeoJsonBaseField, ObjectIdField, get_document)
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
from mongoengine.document import Document, EmbeddedDocument from mongoengine.document import Document, EmbeddedDocument
from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError from mongoengine.errors import DoesNotExist, ValidationError
from mongoengine.python_support import StringIO from mongoengine.python_support import StringIO
from mongoengine.queryset import DO_NOTHING, QuerySet from mongoengine.queryset import DO_NOTHING, QuerySet
@@ -139,12 +139,12 @@ class URLField(StringField):
# Check first if the scheme is valid # Check first if the scheme is valid
scheme = value.split('://')[0].lower() scheme = value.split('://')[0].lower()
if scheme not in self.schemes: if scheme not in self.schemes:
self.error(u'Invalid scheme {} in URL: {}'.format(scheme, value)) self.error('Invalid scheme {} in URL: {}'.format(scheme, value))
return return
# Then check full URL # Then check full URL
if not self.url_regex.match(value): if not self.url_regex.match(value):
self.error(u'Invalid URL: {}'.format(value)) self.error('Invalid URL: {}'.format(value))
return return
@@ -566,11 +566,7 @@ class EmbeddedDocumentField(BaseField):
def prepare_query_value(self, op, value): def prepare_query_value(self, op, value):
if value is not None and not isinstance(value, self.document_type): if value is not None and not isinstance(value, self.document_type):
try: value = self.document_type._from_son(value)
value = self.document_type._from_son(value)
except ValueError:
raise InvalidQueryError("Querying the embedded document '%s' failed, due to an invalid query value" %
(self.document_type._class_name,))
super(EmbeddedDocumentField, self).prepare_query_value(op, value) super(EmbeddedDocumentField, self).prepare_query_value(op, value)
return self.to_mongo(value) return self.to_mongo(value)
@@ -888,6 +884,10 @@ class ReferenceField(BaseField):
Foo.register_delete_rule(Bar, 'foo', NULLIFY) Foo.register_delete_rule(Bar, 'foo', NULLIFY)
.. note ::
`reverse_delete_rule` does not trigger pre / post delete signals to be
triggered.
.. versionchanged:: 0.5 added `reverse_delete_rule` .. versionchanged:: 0.5 added `reverse_delete_rule`
""" """

View File

@@ -86,7 +86,6 @@ class BaseQuerySet(object):
self._batch_size = None self._batch_size = None
self.only_fields = [] self.only_fields = []
self._max_time_ms = None self._max_time_ms = None
self._comment = None
def __call__(self, q_obj=None, class_check=True, read_preference=None, def __call__(self, q_obj=None, class_check=True, read_preference=None,
**query): **query):
@@ -707,36 +706,39 @@ class BaseQuerySet(object):
with switch_db(self._document, alias) as cls: with switch_db(self._document, alias) as cls:
collection = cls._get_collection() collection = cls._get_collection()
return self._clone_into(self.__class__(self._document, collection)) return self.clone_into(self.__class__(self._document, collection))
def clone(self): def clone(self):
"""Create a copy of the current queryset.""" """Creates a copy of the current
return self._clone_into(self.__class__(self._document, self._collection_obj)) :class:`~mongoengine.queryset.QuerySet`
def _clone_into(self, new_qs): .. versionadded:: 0.5
"""Copy all of the relevant properties of this queryset to
a new queryset (which has to be an instance of
:class:`~mongoengine.queryset.base.BaseQuerySet`).
""" """
if not isinstance(new_qs, BaseQuerySet): return self.clone_into(self.__class__(self._document, self._collection_obj))
def clone_into(self, cls):
"""Creates a copy of the current
:class:`~mongoengine.queryset.base.BaseQuerySet` into another child class
"""
if not isinstance(cls, BaseQuerySet):
raise OperationError( raise OperationError(
'%s is not a subclass of BaseQuerySet' % new_qs.__name__) '%s is not a subclass of BaseQuerySet' % cls.__name__)
copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj',
'_where_clause', '_loaded_fields', '_ordering', '_snapshot', '_where_clause', '_loaded_fields', '_ordering', '_snapshot',
'_timeout', '_class_check', '_slave_okay', '_read_preference', '_timeout', '_class_check', '_slave_okay', '_read_preference',
'_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce',
'_limit', '_skip', '_hint', '_auto_dereference', '_limit', '_skip', '_hint', '_auto_dereference',
'_search_text', 'only_fields', '_max_time_ms', '_comment') '_search_text', 'only_fields', '_max_time_ms')
for prop in copy_props: for prop in copy_props:
val = getattr(self, prop) val = getattr(self, prop)
setattr(new_qs, prop, copy.copy(val)) setattr(cls, prop, copy.copy(val))
if self._cursor_obj: if self._cursor_obj:
new_qs._cursor_obj = self._cursor_obj.clone() cls._cursor_obj = self._cursor_obj.clone()
return new_qs return cls
def select_related(self, max_depth=1): def select_related(self, max_depth=1):
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects or """Handles dereferencing of :class:`~bson.dbref.DBRef` objects or
@@ -758,11 +760,7 @@ class BaseQuerySet(object):
""" """
queryset = self.clone() queryset = self.clone()
queryset._limit = n if n != 0 else 1 queryset._limit = n if n != 0 else 1
# Return self to allow chaining
# If a cursor object has already been created, apply the limit to it.
if queryset._cursor_obj:
queryset._cursor_obj.limit(queryset._limit)
return queryset return queryset
def skip(self, n): def skip(self, n):
@@ -773,11 +771,6 @@ class BaseQuerySet(object):
""" """
queryset = self.clone() queryset = self.clone()
queryset._skip = n queryset._skip = n
# If a cursor object has already been created, apply the skip to it.
if queryset._cursor_obj:
queryset._cursor_obj.skip(queryset._skip)
return queryset return queryset
def hint(self, index=None): def hint(self, index=None):
@@ -795,11 +788,6 @@ class BaseQuerySet(object):
""" """
queryset = self.clone() queryset = self.clone()
queryset._hint = index queryset._hint = index
# If a cursor object has already been created, apply the hint to it.
if queryset._cursor_obj:
queryset._cursor_obj.hint(queryset._hint)
return queryset return queryset
def batch_size(self, size): def batch_size(self, size):
@@ -813,11 +801,6 @@ class BaseQuerySet(object):
""" """
queryset = self.clone() queryset = self.clone()
queryset._batch_size = size queryset._batch_size = size
# If a cursor object has already been created, apply the batch size to it.
if queryset._cursor_obj:
queryset._cursor_obj.batch_size(queryset._batch_size)
return queryset return queryset
def distinct(self, field): def distinct(self, field):
@@ -917,24 +900,18 @@ class BaseQuerySet(object):
return self.fields(**fields) return self.fields(**fields)
def fields(self, _only_called=False, **kwargs): def fields(self, _only_called=False, **kwargs):
"""Manipulate how you load this document's fields. Used by `.only()` """Manipulate how you load this document's fields. Used by `.only()`
and `.exclude()` to manipulate which fields to retrieve. If called and `.exclude()` to manipulate which fields to retrieve. Fields also
directly, use a set of kwargs similar to the MongoDB projection allows for a greater level of control for example:
document. For example:
Include only a subset of fields: Retrieving a Subrange of Array Elements:
posts = BlogPost.objects(...).fields(author=1, title=1) You can use the $slice operator to retrieve a subrange of elements in
an array. For example to get the first 5 comments::
Exclude a specific field: post = BlogPost.objects(...).fields(slice__comments=5)
posts = BlogPost.objects(...).fields(comments=0) :param kwargs: A dictionary identifying what to include
To retrieve a subrange of array elements:
posts = BlogPost.objects(...).fields(slice__comments=5)
:param kwargs: A set keywors arguments identifying what to include.
.. versionadded:: 0.5 .. versionadded:: 0.5
""" """
@@ -950,20 +927,7 @@ class BaseQuerySet(object):
key = '.'.join(parts) key = '.'.join(parts)
cleaned_fields.append((key, value)) cleaned_fields.append((key, value))
# Sort fields by their values, explicitly excluded fields first, then fields = sorted(cleaned_fields, key=operator.itemgetter(1))
# explicitly included, and then more complicated operators such as
# $slice.
def _sort_key(field_tuple):
key, value = field_tuple
if isinstance(value, (int)):
return value # 0 for exclusion, 1 for inclusion
else:
return 2 # so that complex values appear last
fields = sorted(cleaned_fields, key=_sort_key)
# Clone the queryset, group all fields by their value, convert
# each of them to db_fields, and set the queryset's _loaded_fields
queryset = self.clone() queryset = self.clone()
for value, group in itertools.groupby(fields, lambda x: x[1]): for value, group in itertools.groupby(fields, lambda x: x[1]):
fields = [field for field, value in group] fields = [field for field, value in group]
@@ -989,31 +953,13 @@ class BaseQuerySet(object):
def order_by(self, *keys): def order_by(self, *keys):
"""Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The
order may be specified by prepending each of the keys by a + or a -. order may be specified by prepending each of the keys by a + or a -.
Ascending order is assumed. If no keys are passed, existing ordering Ascending order is assumed.
is cleared instead.
:param keys: fields to order the query results by; keys may be :param keys: fields to order the query results by; keys may be
prefixed with **+** or **-** to determine the ordering direction prefixed with **+** or **-** to determine the ordering direction
""" """
queryset = self.clone() queryset = self.clone()
queryset._ordering = queryset._get_order_by(keys)
old_ordering = queryset._ordering
new_ordering = queryset._get_order_by(keys)
if queryset._cursor_obj:
# If a cursor object has already been created, apply the sort to it
if new_ordering:
queryset._cursor_obj.sort(new_ordering)
# If we're trying to clear a previous explicit ordering, we need
# to clear the cursor entirely (because PyMongo doesn't allow
# clearing an existing sort on a cursor).
elif old_ordering:
queryset._cursor_obj = None
queryset._ordering = new_ordering
return queryset return queryset
def comment(self, text): def comment(self, text):
@@ -1459,13 +1405,10 @@ class BaseQuerySet(object):
raise StopIteration raise StopIteration
raw_doc = self._cursor.next() raw_doc = self._cursor.next()
if self._as_pymongo: if self._as_pymongo:
return self._get_as_pymongo(raw_doc) return self._get_as_pymongo(raw_doc)
doc = self._document._from_son(raw_doc,
doc = self._document._from_son( _auto_dereference=self._auto_dereference, only_fields=self.only_fields)
raw_doc, _auto_dereference=self._auto_dereference,
only_fields=self.only_fields)
if self._scalar: if self._scalar:
return self._get_scalar(doc) return self._get_scalar(doc)
@@ -1475,6 +1418,7 @@ class BaseQuerySet(object):
def rewind(self): def rewind(self):
"""Rewind the cursor to its unevaluated state. """Rewind the cursor to its unevaluated state.
.. versionadded:: 0.3 .. versionadded:: 0.3
""" """
self._iter = False self._iter = False
@@ -1524,54 +1468,43 @@ class BaseQuerySet(object):
@property @property
def _cursor(self): def _cursor(self):
"""Return a PyMongo cursor object corresponding to this queryset.""" if self._cursor_obj is None:
# If _cursor_obj already exists, return it immediately. # In PyMongo 3+, we define the read preference on a collection
if self._cursor_obj is not None: # level, not a cursor level. Thus, we need to get a cloned
return self._cursor_obj # collection object using `with_options` first.
if IS_PYMONGO_3 and self._read_preference is not None:
self._cursor_obj = self._collection\
.with_options(read_preference=self._read_preference)\
.find(self._query, **self._cursor_args)
else:
self._cursor_obj = self._collection.find(self._query,
**self._cursor_args)
# Apply where clauses to cursor
if self._where_clause:
where_clause = self._sub_js_fields(self._where_clause)
self._cursor_obj.where(where_clause)
# Create a new PyMongo cursor. if self._ordering:
# XXX In PyMongo 3+, we define the read preference on a collection # Apply query ordering
# level, not a cursor level. Thus, we need to get a cloned collection self._cursor_obj.sort(self._ordering)
# object using `with_options` first. elif self._ordering is None and self._document._meta['ordering']:
if IS_PYMONGO_3 and self._read_preference is not None: # Otherwise, apply the ordering from the document model, unless
self._cursor_obj = self._collection\ # it's been explicitly cleared via order_by with no arguments
.with_options(read_preference=self._read_preference)\ order = self._get_order_by(self._document._meta['ordering'])
.find(self._query, **self._cursor_args) self._cursor_obj.sort(order)
else:
self._cursor_obj = self._collection.find(self._query,
**self._cursor_args)
# Apply "where" clauses to cursor
if self._where_clause:
where_clause = self._sub_js_fields(self._where_clause)
self._cursor_obj.where(where_clause)
# Apply ordering to the cursor. if self._limit is not None:
# XXX self._ordering can be equal to: self._cursor_obj.limit(self._limit)
# * None if we didn't explicitly call order_by on this queryset.
# * A list of PyMongo-style sorting tuples.
# * An empty list if we explicitly called order_by() without any
# arguments. This indicates that we want to clear the default
# ordering.
if self._ordering:
# explicit ordering
self._cursor_obj.sort(self._ordering)
elif self._ordering is None and self._document._meta['ordering']:
# default ordering
order = self._get_order_by(self._document._meta['ordering'])
self._cursor_obj.sort(order)
if self._limit is not None: if self._skip is not None:
self._cursor_obj.limit(self._limit) self._cursor_obj.skip(self._skip)
if self._skip is not None: if self._hint != -1:
self._cursor_obj.skip(self._skip) self._cursor_obj.hint(self._hint)
if self._hint != -1: if self._batch_size is not None:
self._cursor_obj.hint(self._hint) self._cursor_obj.batch_size(self._batch_size)
if self._batch_size is not None:
self._cursor_obj.batch_size(self._batch_size)
return self._cursor_obj return self._cursor_obj
@@ -1746,13 +1679,7 @@ class BaseQuerySet(object):
return ret return ret
def _get_order_by(self, keys): def _get_order_by(self, keys):
"""Given a list of MongoEngine-style sort keys, return a list """Creates a list of order by fields"""
of sorting tuples that can be applied to a PyMongo cursor. For
example:
>>> qs._get_order_by(['-last_name', 'first_name'])
[('last_name', -1), ('first_name', 1)]
"""
key_list = [] key_list = []
for key in keys: for key in keys:
if not key: if not key:
@@ -1765,19 +1692,17 @@ class BaseQuerySet(object):
direction = pymongo.ASCENDING direction = pymongo.ASCENDING
if key[0] == '-': if key[0] == '-':
direction = pymongo.DESCENDING direction = pymongo.DESCENDING
if key[0] in ('-', '+'): if key[0] in ('-', '+'):
key = key[1:] key = key[1:]
key = key.replace('__', '.') key = key.replace('__', '.')
try: try:
key = self._document._translate_field_name(key) key = self._document._translate_field_name(key)
except Exception: except Exception:
# TODO this exception should be more specific
pass pass
key_list.append((key, direction)) key_list.append((key, direction))
if self._cursor_obj and key_list:
self._cursor_obj.sort(key_list)
return key_list return key_list
def _get_scalar(self, doc): def _get_scalar(self, doc):
@@ -1875,21 +1800,10 @@ class BaseQuerySet(object):
return code return code
def _chainable_method(self, method_name, val): def _chainable_method(self, method_name, val):
"""Call a particular method on the PyMongo cursor call a particular chainable method
with the provided value.
"""
queryset = self.clone() queryset = self.clone()
method = getattr(queryset._cursor, method_name)
# Get an existing cursor object or create a new one method(val)
cursor = queryset._cursor
# Find the requested method on the cursor and call it with the
# provided value
getattr(cursor, method_name)(val)
# Cache the value on the queryset._{method_name}
setattr(queryset, '_' + method_name, val) setattr(queryset, '_' + method_name, val)
return queryset return queryset
# Deprecated # Deprecated

View File

@@ -136,15 +136,13 @@ class QuerySet(BaseQuerySet):
return self._len return self._len
def no_cache(self): def no_cache(self):
"""Convert to a non-caching queryset """Convert to a non_caching queryset
.. versionadded:: 0.8.3 Convert to non caching queryset .. versionadded:: 0.8.3 Convert to non caching queryset
""" """
if self._result_cache is not None: if self._result_cache is not None:
raise OperationError('QuerySet already cached') raise OperationError('QuerySet already cached')
return self.clone_into(QuerySetNoCache(self._document, self._collection))
return self._clone_into(QuerySetNoCache(self._document,
self._collection))
class QuerySetNoCache(BaseQuerySet): class QuerySetNoCache(BaseQuerySet):
@@ -155,7 +153,7 @@ class QuerySetNoCache(BaseQuerySet):
.. versionadded:: 0.8.3 Convert to caching queryset .. versionadded:: 0.8.3 Convert to caching queryset
""" """
return self._clone_into(QuerySet(self._document, self._collection)) return self.clone_into(QuerySet(self._document, self._collection))
def __repr__(self): def __repr__(self):
"""Provides the string representation of the QuerySet """Provides the string representation of the QuerySet

View File

@@ -101,21 +101,8 @@ def query(_doc_cls=None, **kwargs):
value = value['_id'] value = value['_id']
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
# Raise an error if the in/nin/all/near param is not iterable. We need a # 'in', 'nin' and 'all' require a list of values
# special check for BaseDocument, because - although it's iterable - using value = [field.prepare_query_value(op, v) for v in value]
# it as such in the context of this method is most definitely a mistake.
BaseDocument = _import_class('BaseDocument')
if isinstance(value, BaseDocument):
raise TypeError("When using the `in`, `nin`, `all`, or "
"`near`-operators you can\'t use a "
"`Document`, you must wrap your object "
"in a list (object -> [object]).")
elif not hasattr(value, '__iter__'):
raise TypeError("The `in`, `nin`, `all`, or "
"`near`-operators must be applied to an "
"iterable (e.g. a list).")
else:
value = [field.prepare_query_value(op, v) for v in value]
# If we're querying a GenericReferenceField, we need to alter the # If we're querying a GenericReferenceField, we need to alter the
# key depending on the value: # key depending on the value:
@@ -233,7 +220,8 @@ def update(_doc_cls=None, **update):
# Support decrement by flipping a positive value's sign # Support decrement by flipping a positive value's sign
# and using 'inc' # and using 'inc'
op = 'inc' op = 'inc'
value = -value if value > 0:
value = -value
elif op == 'add_to_set': elif op == 'add_to_set':
op = 'addToSet' op = 'addToSet'
elif op == 'set_on_insert': elif op == 'set_on_insert':

View File

@@ -7,5 +7,5 @@ cover-package=mongoengine
[flake8] [flake8]
ignore=E501,F401,F403,F405,I201 ignore=E501,F401,F403,F405,I201
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
max-complexity=47 max-complexity=45
application-import-names=mongoengine,tests application-import-names=mongoengine,tests

View File

@@ -435,15 +435,6 @@ class InstanceTest(unittest.TestCase):
person.to_dbref() person.to_dbref()
def test_save_abstract_document(self):
"""Saving an abstract document should fail."""
class Doc(Document):
name = StringField()
meta = {'abstract': True}
with self.assertRaises(InvalidDocumentError):
Doc(name='aaa').save()
def test_reload(self): def test_reload(self):
"""Ensure that attributes may be reloaded. """Ensure that attributes may be reloaded.
""" """
@@ -1232,19 +1223,6 @@ class InstanceTest(unittest.TestCase):
self.assertEqual(person.name, None) self.assertEqual(person.name, None)
self.assertEqual(person.age, None) self.assertEqual(person.age, None)
def test_update_rename_operator(self):
"""Test the $rename operator."""
coll = self.Person._get_collection()
doc = self.Person(name='John').save()
raw_doc = coll.find_one({'_id': doc.pk})
self.assertEqual(set(raw_doc.keys()), set(['_id', '_cls', 'name']))
doc.update(rename__name='first_name')
raw_doc = coll.find_one({'_id': doc.pk})
self.assertEqual(set(raw_doc.keys()),
set(['_id', '_cls', 'first_name']))
self.assertEqual(raw_doc['first_name'], 'John')
def test_inserts_if_you_set_the_pk(self): def test_inserts_if_you_set_the_pk(self):
p1 = self.Person(name='p1', id=bson.ObjectId()).save() p1 = self.Person(name='p1', id=bson.ObjectId()).save()
p2 = self.Person(name='p2') p2 = self.Person(name='p2')
@@ -1882,10 +1860,6 @@ class InstanceTest(unittest.TestCase):
'occurs': {"hello": None} 'occurs': {"hello": None}
}) })
# Tests for issue #1438: https://github.com/MongoEngine/mongoengine/issues/1438
with self.assertRaises(ValueError):
Word._from_son('this is not a valid SON dict')
def test_reverse_delete_rule_cascade_and_nullify(self): def test_reverse_delete_rule_cascade_and_nullify(self):
"""Ensure that a referenced document is also deleted upon deletion. """Ensure that a referenced document is also deleted upon deletion.
""" """

View File

@@ -1,12 +1,13 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import six
from nose.plugins.skip import SkipTest
import datetime import datetime
import unittest import unittest
import uuid import uuid
import math import math
import itertools import itertools
import re import re
from nose.plugins.skip import SkipTest
import six import six
try: try:
@@ -26,13 +27,21 @@ from mongoengine import *
from mongoengine.connection import get_db from mongoengine.connection import get_db
from mongoengine.base import (BaseDict, BaseField, EmbeddedDocumentList, from mongoengine.base import (BaseDict, BaseField, EmbeddedDocumentList,
_document_registry) _document_registry)
from mongoengine.errors import NotRegistered, DoesNotExist
from tests.utils import MongoDBTestCase
__all__ = ("FieldTest", "EmbeddedDocumentListFieldTestCase") __all__ = ("FieldTest", "EmbeddedDocumentListFieldTestCase")
class FieldTest(MongoDBTestCase): class FieldTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = get_db()
def tearDown(self):
self.db.drop_collection('fs.files')
self.db.drop_collection('fs.chunks')
self.db.drop_collection('mongoengine.counters')
def test_default_values_nothing_set(self): def test_default_values_nothing_set(self):
"""Ensure that default field values are used when creating a document. """Ensure that default field values are used when creating a document.
@@ -218,9 +227,9 @@ class FieldTest(MongoDBTestCase):
self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime)) self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime))
def test_not_required_handles_none_from_database(self): def test_not_required_handles_none_from_database(self):
"""Ensure that every field can handle null values from the """Ensure that every fields can handle null values from the database.
database.
""" """
class HandleNoneFields(Document): class HandleNoneFields(Document):
str_fld = StringField(required=True) str_fld = StringField(required=True)
int_fld = IntField(required=True) int_fld = IntField(required=True)
@@ -297,24 +306,6 @@ class FieldTest(MongoDBTestCase):
person.id = '497ce96f395f2f052a494fd4' person.id = '497ce96f395f2f052a494fd4'
person.validate() person.validate()
def test_db_field_validation(self):
"""Ensure that db_field doesn't accept invalid values."""
# dot in the name
with self.assertRaises(ValueError):
class User(Document):
name = StringField(db_field='user.name')
# name starting with $
with self.assertRaises(ValueError):
class User(Document):
name = StringField(db_field='$name')
# name containing a null character
with self.assertRaises(ValueError):
class User(Document):
name = StringField(db_field='name\0')
def test_string_validation(self): def test_string_validation(self):
"""Ensure that invalid values cannot be assigned to string fields. """Ensure that invalid values cannot be assigned to string fields.
""" """
@@ -341,12 +332,11 @@ class FieldTest(MongoDBTestCase):
person.validate() person.validate()
def test_url_validation(self): def test_url_validation(self):
"""Ensure that URLFields validate urls properly.""" """Ensure that URLFields validate urls properly.
"""
class Link(Document): class Link(Document):
url = URLField() url = URLField()
Link.drop_collection()
link = Link() link = Link()
link.url = 'google' link.url = 'google'
self.assertRaises(ValidationError, link.validate) self.assertRaises(ValidationError, link.validate)
@@ -354,27 +344,6 @@ class FieldTest(MongoDBTestCase):
link.url = 'http://www.google.com:8080' link.url = 'http://www.google.com:8080'
link.validate() link.validate()
def test_unicode_url_validation(self):
"""Ensure unicode URLs are validated properly."""
class Link(Document):
url = URLField()
Link.drop_collection()
link = Link()
link.url = u'http://привет.com'
# TODO fix URL validation - this *IS* a valid URL
# For now we just want to make sure that the error message is correct
try:
link.validate()
self.assertTrue(False)
except ValidationError as e:
self.assertEqual(
unicode(e),
u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])"
)
def test_url_scheme_validation(self): def test_url_scheme_validation(self):
"""Ensure that URLFields validate urls with specific schemes properly. """Ensure that URLFields validate urls with specific schemes properly.
""" """
@@ -1073,7 +1042,6 @@ class FieldTest(MongoDBTestCase):
self.assertEqual( self.assertEqual(
BlogPost.objects.filter(info__100__test__exact='test').count(), 0) BlogPost.objects.filter(info__100__test__exact='test').count(), 0)
# test queries by list
post = BlogPost() post = BlogPost()
post.info = ['1', '2'] post.info = ['1', '2']
post.save() post.save()
@@ -1085,248 +1053,6 @@ class FieldTest(MongoDBTestCase):
post.info *= 2 post.info *= 2
post.save() post.save()
self.assertEqual(BlogPost.objects(info=['1', '2', '3', '4', '1', '2', '3', '4']).count(), 1) self.assertEqual(BlogPost.objects(info=['1', '2', '3', '4', '1', '2', '3', '4']).count(), 1)
BlogPost.drop_collection()
def test_list_field_manipulative_operators(self):
"""Ensure that ListField works with standard list operators that manipulate the list.
"""
class BlogPost(Document):
ref = StringField()
info = ListField(StringField())
BlogPost.drop_collection()
post = BlogPost()
post.ref = "1234"
post.info = ['0', '1', '2', '3', '4', '5']
post.save()
def reset_post():
post.info = ['0', '1', '2', '3', '4', '5']
post.save()
# '__add__(listB)'
# listA+listB
# operator.add(listA, listB)
reset_post()
temp = ['a', 'b']
post.info = post.info + temp
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b'])
post.save()
post.reload()
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b'])
# '__delitem__(index)'
# aka 'del list[index]'
# aka 'operator.delitem(list, index)'
reset_post()
del post.info[2] # del from middle ('2')
self.assertEqual(post.info, ['0', '1', '3', '4', '5'])
post.save()
post.reload()
self.assertEqual(post.info, ['0', '1', '3', '4', '5'])
# '__delitem__(slice(i, j))'
# aka 'del list[i:j]'
# aka 'operator.delitem(list, slice(i,j))'
reset_post()
del post.info[1:3] # removes '1', '2'
self.assertEqual(post.info, ['0', '3', '4', '5'])
post.save()
post.reload()
self.assertEqual(post.info, ['0', '3', '4', '5'])
# '__iadd__'
# aka 'list += list'
reset_post()
temp = ['a', 'b']
post.info += temp
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b'])
post.save()
post.reload()
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b'])
# '__imul__'
# aka 'list *= number'
reset_post()
post.info *= 2
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5'])
post.save()
post.reload()
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5'])
# '__mul__'
# aka 'listA*listB'
reset_post()
post.info = post.info * 2
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5'])
post.save()
post.reload()
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5'])
# '__rmul__'
# aka 'listB*listA'
reset_post()
post.info = 2 * post.info
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5'])
post.save()
post.reload()
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5'])
# '__setitem__(index, value)'
# aka 'list[index]=value'
# aka 'setitem(list, value)'
reset_post()
post.info[4] = 'a'
self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5'])
post.save()
post.reload()
self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5'])
# '__setitem__(slice(i, j), listB)'
# aka 'listA[i:j] = listB'
# aka 'setitem(listA, slice(i, j), listB)'
reset_post()
post.info[1:3] = ['h', 'e', 'l', 'l', 'o']
self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5'])
post.save()
post.reload()
self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5'])
# 'append'
reset_post()
post.info.append('h')
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h'])
post.save()
post.reload()
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h'])
# 'extend'
reset_post()
post.info.extend(['h', 'e', 'l', 'l', 'o'])
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h', 'e', 'l', 'l', 'o'])
post.save()
post.reload()
self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h', 'e', 'l', 'l', 'o'])
# 'insert'
# 'pop'
reset_post()
x = post.info.pop(2)
y = post.info.pop()
self.assertEqual(post.info, ['0', '1', '3', '4'])
self.assertEqual(x, '2')
self.assertEqual(y, '5')
post.save()
post.reload()
self.assertEqual(post.info, ['0', '1', '3', '4'])
# 'remove'
reset_post()
post.info.remove('2')
self.assertEqual(post.info, ['0', '1', '3', '4', '5'])
post.save()
post.reload()
self.assertEqual(post.info, ['0', '1', '3', '4', '5'])
# 'reverse'
reset_post()
post.info.reverse()
self.assertEqual(post.info, ['5', '4', '3', '2', '1', '0'])
post.save()
post.reload()
self.assertEqual(post.info, ['5', '4', '3', '2', '1', '0'])
# 'sort': though this operator method does manipulate the list, it is tested in
# the 'test_list_field_lexicograpic_operators' function
BlogPost.drop_collection()
def test_list_field_invalid_operators(self):
class BlogPost(Document):
ref = StringField()
info = ListField(StringField())
post = BlogPost()
post.ref = "1234"
post.info = ['0', '1', '2', '3', '4', '5']
# '__hash__'
# aka 'hash(list)'
# # assert TypeError
self.assertRaises(TypeError, lambda: hash(post.info))
def test_list_field_lexicographic_operators(self):
"""Ensure that ListField works with standard list operators that do lexigraphic ordering.
"""
class BlogPost(Document):
ref = StringField()
text_info = ListField(StringField())
oid_info = ListField(ObjectIdField())
bool_info = ListField(BooleanField())
BlogPost.drop_collection()
blogSmall = BlogPost(ref="small")
blogSmall.text_info = ["a", "a", "a"]
blogSmall.bool_info = [False, False]
blogSmall.save()
blogSmall.reload()
blogLargeA = BlogPost(ref="big")
blogLargeA.text_info = ["a", "z", "j"]
blogLargeA.bool_info = [False, True]
blogLargeA.save()
blogLargeA.reload()
blogLargeB = BlogPost(ref="big2")
blogLargeB.text_info = ["a", "z", "j"]
blogLargeB.oid_info = [
"54495ad94c934721ede76f90",
"54495ad94c934721ede76d23",
"54495ad94c934721ede76d00"
]
blogLargeB.bool_info = [False, True]
blogLargeB.save()
blogLargeB.reload()
# '__eq__' aka '=='
self.assertEqual(blogLargeA.text_info, blogLargeB.text_info)
self.assertEqual(blogLargeA.bool_info, blogLargeB.bool_info)
# '__ge__' aka '>='
self.assertGreaterEqual(blogLargeA.text_info, blogSmall.text_info)
self.assertGreaterEqual(blogLargeA.text_info, blogLargeB.text_info)
self.assertGreaterEqual(blogLargeA.bool_info, blogSmall.bool_info)
self.assertGreaterEqual(blogLargeA.bool_info, blogLargeB.bool_info)
# '__gt__' aka '>'
self.assertGreaterEqual(blogLargeA.text_info, blogSmall.text_info)
self.assertGreaterEqual(blogLargeA.bool_info, blogSmall.bool_info)
# '__le__' aka '<='
self.assertLessEqual(blogSmall.text_info, blogLargeB.text_info)
self.assertLessEqual(blogLargeA.text_info, blogLargeB.text_info)
self.assertLessEqual(blogSmall.bool_info, blogLargeB.bool_info)
self.assertLessEqual(blogLargeA.bool_info, blogLargeB.bool_info)
# '__lt__' aka '<'
self.assertLess(blogSmall.text_info, blogLargeB.text_info)
self.assertLess(blogSmall.bool_info, blogLargeB.bool_info)
# '__ne__' aka '!='
self.assertNotEqual(blogSmall.text_info, blogLargeB.text_info)
self.assertNotEqual(blogSmall.bool_info, blogLargeB.bool_info)
# 'sort'
blogLargeB.bool_info = [True, False, True, False]
blogLargeB.text_info.sort()
blogLargeB.oid_info.sort()
blogLargeB.bool_info.sort()
sorted_target_list = [
ObjectId("54495ad94c934721ede76d00"),
ObjectId("54495ad94c934721ede76d23"),
ObjectId("54495ad94c934721ede76f90")
]
self.assertEqual(blogLargeB.text_info, ["a", "j", "z"])
self.assertEqual(blogLargeB.oid_info, sorted_target_list)
self.assertEqual(blogLargeB.bool_info, [False, False, True, True])
blogLargeB.save()
blogLargeB.reload()
self.assertEqual(blogLargeB.text_info, ["a", "j", "z"])
self.assertEqual(blogLargeB.oid_info, sorted_target_list)
self.assertEqual(blogLargeB.bool_info, [False, False, True, True])
BlogPost.drop_collection() BlogPost.drop_collection()
def test_list_assignment(self): def test_list_assignment(self):
@@ -1376,6 +1102,7 @@ class FieldTest(MongoDBTestCase):
post.reload() post.reload()
self.assertEqual(post.info, [1, 2, 3, 4, 'n5']) self.assertEqual(post.info, [1, 2, 3, 4, 'n5'])
def test_list_field_passed_in_value(self): def test_list_field_passed_in_value(self):
class Foo(Document): class Foo(Document):
bars = ListField(ReferenceField("Bar")) bars = ListField(ReferenceField("Bar"))
@@ -1998,7 +1725,7 @@ class FieldTest(MongoDBTestCase):
self.assertEqual(content, User.objects.first().groups[0].content) self.assertEqual(content, User.objects.first().groups[0].content)
def test_reference_miss(self): def test_reference_miss(self):
"""Ensure an exception is raised when dereferencing unknown document """Ensure an exception is raised when dereferencing unknow document
""" """
class Foo(Document): class Foo(Document):
@@ -3199,42 +2926,26 @@ class FieldTest(MongoDBTestCase):
att.delete() att.delete()
self.assertEqual(0, Attachment.objects.count()) self.assertEqual(0, Attachment.objects.count())
def test_choices_allow_using_sets_as_choices(self): def test_choices_validation(self):
"""Ensure that sets can be used when setting choices """Ensure that value is in a container of allowed values.
""" """
class Shirt(Document): class Shirt(Document):
size = StringField(choices={'M', 'L'}) size = StringField(max_length=3, choices=(
('S', 'Small'), ('M', 'Medium'), ('L', 'Large'),
('XL', 'Extra Large'), ('XXL', 'Extra Extra Large')))
Shirt(size='M').validate() Shirt.drop_collection()
def test_choices_validation_allow_no_value(self):
"""Ensure that .validate passes and no value was provided
for a field setup with choices
"""
class Shirt(Document):
size = StringField(choices=('S', 'M'))
shirt = Shirt() shirt = Shirt()
shirt.validate() shirt.validate()
def test_choices_validation_accept_possible_value(self): shirt.size = "S"
"""Ensure that value is in a container of allowed values.
"""
class Shirt(Document):
size = StringField(choices=('S', 'M'))
shirt = Shirt(size='S')
shirt.validate() shirt.validate()
def test_choices_validation_reject_unknown_value(self): shirt.size = "XS"
"""Ensure that unallowed value are rejected upon validation self.assertRaises(ValidationError, shirt.validate)
"""
class Shirt(Document):
size = StringField(choices=('S', 'M'))
shirt = Shirt(size="XS") Shirt.drop_collection()
with self.assertRaises(ValidationError):
shirt.validate()
def test_choices_validation_documents(self): def test_choices_validation_documents(self):
""" """
@@ -4020,25 +3731,30 @@ class FieldTest(MongoDBTestCase):
"""Tests if a `FieldDoesNotExist` exception is raised when trying to """Tests if a `FieldDoesNotExist` exception is raised when trying to
instanciate a document with a field that's not defined. instanciate a document with a field that's not defined.
""" """
class Doc(Document):
foo = StringField()
with self.assertRaises(FieldDoesNotExist): class Doc(Document):
foo = StringField(db_field='f')
def test():
Doc(bar='test') Doc(bar='test')
self.assertRaises(FieldDoesNotExist, test)
def test_undefined_field_exception_with_strict(self): def test_undefined_field_exception_with_strict(self):
"""Tests if a `FieldDoesNotExist` exception is raised when trying to """Tests if a `FieldDoesNotExist` exception is raised when trying to
instanciate a document with a field that's not defined, instanciate a document with a field that's not defined,
even when strict is set to False. even when strict is set to False.
""" """
class Doc(Document): class Doc(Document):
foo = StringField() foo = StringField(db_field='f')
meta = {'strict': False} meta = {'strict': False}
with self.assertRaises(FieldDoesNotExist): def test():
Doc(bar='test') Doc(bar='test')
self.assertRaises(FieldDoesNotExist, test)
def test_long_field_is_considered_as_int64(self): def test_long_field_is_considered_as_int64(self):
""" """
Tests that long fields are stored as long in mongo, even if long value Tests that long fields are stored as long in mongo, even if long value
@@ -4053,13 +3769,12 @@ class FieldTest(MongoDBTestCase):
self.assertTrue(isinstance(doc.some_long, six.integer_types)) self.assertTrue(isinstance(doc.some_long, six.integer_types))
class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.db = connect(db='EmbeddedDocumentListFieldTestCase')
def setUp(self):
"""
Create two BlogPost entries in the database, each with
several EmbeddedDocuments.
"""
class Comments(EmbeddedDocument): class Comments(EmbeddedDocument):
author = StringField() author = StringField()
message = StringField() message = StringField()
@@ -4067,11 +3782,14 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase):
class BlogPost(Document): class BlogPost(Document):
comments = EmbeddedDocumentListField(Comments) comments = EmbeddedDocumentListField(Comments)
BlogPost.drop_collection() cls.Comments = Comments
cls.BlogPost = BlogPost
self.Comments = Comments
self.BlogPost = BlogPost
def setUp(self):
"""
Create two BlogPost entries in the database, each with
several EmbeddedDocuments.
"""
self.post1 = self.BlogPost(comments=[ self.post1 = self.BlogPost(comments=[
self.Comments(author='user1', message='message1'), self.Comments(author='user1', message='message1'),
self.Comments(author='user2', message='message1') self.Comments(author='user2', message='message1')
@@ -4083,6 +3801,13 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase):
self.Comments(author='user3', message='message1') self.Comments(author='user3', message='message1')
]).save() ]).save()
def tearDown(self):
self.BlogPost.drop_collection()
@classmethod
def tearDownClass(cls):
cls.db.drop_database('EmbeddedDocumentListFieldTestCase')
def test_no_keyword_filter(self): def test_no_keyword_filter(self):
""" """
Tests the filter method of a List of Embedded Documents Tests the filter method of a List of Embedded Documents
@@ -4440,8 +4165,7 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase):
my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique)) my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique))
A(my_list=[]).save() A(my_list=[]).save()
with self.assertRaises(NotUniqueError): self.assertRaises(NotUniqueError, lambda: A(my_list=[]).save())
A(my_list=[]).save()
class EmbeddedWithSparseUnique(EmbeddedDocument): class EmbeddedWithSparseUnique(EmbeddedDocument):
number = IntField(unique=True, sparse=True) number = IntField(unique=True, sparse=True)
@@ -4449,9 +4173,6 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase):
class B(Document): class B(Document):
my_list = ListField(EmbeddedDocumentField(EmbeddedWithSparseUnique)) my_list = ListField(EmbeddedDocumentField(EmbeddedWithSparseUnique))
A.drop_collection()
B.drop_collection()
B(my_list=[]).save() B(my_list=[]).save()
B(my_list=[]).save() B(my_list=[]).save()
@@ -4491,8 +4212,6 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase):
a_field = IntField() a_field = IntField()
c_field = IntField(custom_data=custom_data) c_field = IntField(custom_data=custom_data)
CustomData.drop_collection()
a1 = CustomData(a_field=1, c_field=2).save() a1 = CustomData(a_field=1, c_field=2).save()
self.assertEqual(2, a1.c_field) self.assertEqual(2, a1.c_field)
self.assertFalse(hasattr(a1.c_field, 'custom_data')) self.assertFalse(hasattr(a1.c_field, 'custom_data'))

View File

@@ -18,13 +18,15 @@ try:
except ImportError: except ImportError:
HAS_PIL = False HAS_PIL = False
from tests.utils import MongoDBTestCase
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png')
class FileTest(MongoDBTestCase): class FileTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = get_db()
def tearDown(self): def tearDown(self):
self.db.drop_collection('fs.files') self.db.drop_collection('fs.files')

View File

@@ -141,16 +141,6 @@ class OnlyExcludeAllTest(unittest.TestCase):
self.assertEqual(qs._loaded_fields.as_dict(), self.assertEqual(qs._loaded_fields.as_dict(),
{'b': {'$slice': 5}}) {'b': {'$slice': 5}})
def test_mix_slice_with_other_fields(self):
class MyDoc(Document):
a = ListField()
b = ListField()
c = ListField()
qs = MyDoc.objects.fields(a=1, b=0, slice__c=2)
self.assertEqual(qs._loaded_fields.as_dict(),
{'c': {'$slice': 2}, 'a': 1})
def test_only(self): def test_only(self):
"""Ensure that QuerySet.only only returns the requested fields. """Ensure that QuerySet.only only returns the requested fields.
""" """

View File

@@ -106,111 +106,58 @@ class QuerySetTest(unittest.TestCase):
list(BlogPost.objects(author2__name="test")) list(BlogPost.objects(author2__name="test"))
def test_find(self): def test_find(self):
"""Ensure that a query returns a valid set of results.""" """Ensure that a query returns a valid set of results.
user_a = self.Person.objects.create(name='User A', age=20) """
user_b = self.Person.objects.create(name='User B', age=30) self.Person(name="User A", age=20).save()
self.Person(name="User B", age=30).save()
# Find all people in the collection # Find all people in the collection
people = self.Person.objects people = self.Person.objects
self.assertEqual(people.count(), 2) self.assertEqual(people.count(), 2)
results = list(people) results = list(people)
self.assertTrue(isinstance(results[0], self.Person)) self.assertTrue(isinstance(results[0], self.Person))
self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode))) self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode)))
self.assertEqual(results[0].name, "User A")
self.assertEqual(results[0], user_a)
self.assertEqual(results[0].name, 'User A')
self.assertEqual(results[0].age, 20) self.assertEqual(results[0].age, 20)
self.assertEqual(results[1].name, "User B")
self.assertEqual(results[1], user_b)
self.assertEqual(results[1].name, 'User B')
self.assertEqual(results[1].age, 30) self.assertEqual(results[1].age, 30)
# Filter people by age # Use a query to filter the people found to just person1
people = self.Person.objects(age=20) people = self.Person.objects(age=20)
self.assertEqual(people.count(), 1) self.assertEqual(people.count(), 1)
person = people.next() person = people.next()
self.assertEqual(person, user_a)
self.assertEqual(person.name, "User A") self.assertEqual(person.name, "User A")
self.assertEqual(person.age, 20) self.assertEqual(person.age, 20)
def test_limit(self): # Test limit
"""Ensure that QuerySet.limit works as expected."""
user_a = self.Person.objects.create(name='User A', age=20)
user_b = self.Person.objects.create(name='User B', age=30)
# Test limit on a new queryset
people = list(self.Person.objects.limit(1)) people = list(self.Person.objects.limit(1))
self.assertEqual(len(people), 1) self.assertEqual(len(people), 1)
self.assertEqual(people[0], user_a) self.assertEqual(people[0].name, 'User A')
# Test limit on an existing queryset # Test skip
people = self.Person.objects
self.assertEqual(len(people), 2)
people2 = people.limit(1)
self.assertEqual(len(people), 2)
self.assertEqual(len(people2), 1)
self.assertEqual(people2[0], user_a)
# Test chaining of only after limit
person = self.Person.objects().limit(1).only('name').first()
self.assertEqual(person, user_a)
self.assertEqual(person.name, 'User A')
self.assertEqual(person.age, None)
def test_skip(self):
"""Ensure that QuerySet.skip works as expected."""
user_a = self.Person.objects.create(name='User A', age=20)
user_b = self.Person.objects.create(name='User B', age=30)
# Test skip on a new queryset
people = list(self.Person.objects.skip(1)) people = list(self.Person.objects.skip(1))
self.assertEqual(len(people), 1) self.assertEqual(len(people), 1)
self.assertEqual(people[0], user_b) self.assertEqual(people[0].name, 'User B')
# Test skip on an existing queryset person3 = self.Person(name="User C", age=40)
people = self.Person.objects person3.save()
self.assertEqual(len(people), 2)
people2 = people.skip(1)
self.assertEqual(len(people), 2)
self.assertEqual(len(people2), 1)
self.assertEqual(people2[0], user_b)
# Test chaining of only after skip
person = self.Person.objects().skip(1).only('name').first()
self.assertEqual(person, user_b)
self.assertEqual(person.name, 'User B')
self.assertEqual(person.age, None)
def test_slice(self):
"""Ensure slicing a queryset works as expected."""
user_a = self.Person.objects.create(name='User A', age=20)
user_b = self.Person.objects.create(name='User B', age=30)
user_c = self.Person.objects.create(name="User C", age=40)
# Test slice limit # Test slice limit
people = list(self.Person.objects[:2]) people = list(self.Person.objects[:2])
self.assertEqual(len(people), 2) self.assertEqual(len(people), 2)
self.assertEqual(people[0], user_a) self.assertEqual(people[0].name, 'User A')
self.assertEqual(people[1], user_b) self.assertEqual(people[1].name, 'User B')
# Test slice skip # Test slice skip
people = list(self.Person.objects[1:]) people = list(self.Person.objects[1:])
self.assertEqual(len(people), 2) self.assertEqual(len(people), 2)
self.assertEqual(people[0], user_b) self.assertEqual(people[0].name, 'User B')
self.assertEqual(people[1], user_c) self.assertEqual(people[1].name, 'User C')
# Test slice limit and skip # Test slice limit and skip
people = list(self.Person.objects[1:2]) people = list(self.Person.objects[1:2])
self.assertEqual(len(people), 1) self.assertEqual(len(people), 1)
self.assertEqual(people[0], user_b) self.assertEqual(people[0].name, 'User B')
# Test slice limit and skip on an existing queryset
people = self.Person.objects
self.assertEqual(len(people), 3)
people2 = people[1:2]
self.assertEqual(len(people2), 1)
self.assertEqual(people2[0], user_b)
# Test slice limit and skip cursor reset # Test slice limit and skip cursor reset
qs = self.Person.objects[1:2] qs = self.Person.objects[1:2]
@@ -221,7 +168,6 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(len(people), 1) self.assertEqual(len(people), 1)
self.assertEqual(people[0].name, 'User B') self.assertEqual(people[0].name, 'User B')
# Test empty slice
people = list(self.Person.objects[1:1]) people = list(self.Person.objects[1:1])
self.assertEqual(len(people), 0) self.assertEqual(len(people), 0)
@@ -241,6 +187,12 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual("[<Person: Person object>, <Person: Person object>]", self.assertEqual("[<Person: Person object>, <Person: Person object>]",
"%s" % self.Person.objects[51:53]) "%s" % self.Person.objects[51:53])
# Test only after limit
self.assertEqual(self.Person.objects().limit(2).only('name')[0].age, None)
# Test only after skip
self.assertEqual(self.Person.objects().skip(2).only('name')[0].age, None)
def test_find_one(self): def test_find_one(self):
"""Ensure that a query using find_one returns a valid result. """Ensure that a query using find_one returns a valid result.
""" """
@@ -1274,7 +1226,6 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection() BlogPost.drop_collection()
# default ordering should be used by default
with db_ops_tracker() as q: with db_ops_tracker() as q:
BlogPost.objects.filter(title='whatever').first() BlogPost.objects.filter(title='whatever').first()
self.assertEqual(len(q.get_ops()), 1) self.assertEqual(len(q.get_ops()), 1)
@@ -1283,28 +1234,11 @@ class QuerySetTest(unittest.TestCase):
{'published_date': -1} {'published_date': -1}
) )
# calling order_by() should clear the default ordering
with db_ops_tracker() as q: with db_ops_tracker() as q:
BlogPost.objects.filter(title='whatever').order_by().first() BlogPost.objects.filter(title='whatever').order_by().first()
self.assertEqual(len(q.get_ops()), 1) self.assertEqual(len(q.get_ops()), 1)
self.assertFalse('$orderby' in q.get_ops()[0]['query']) self.assertFalse('$orderby' in q.get_ops()[0]['query'])
# calling an explicit order_by should use a specified sort
with db_ops_tracker() as q:
BlogPost.objects.filter(title='whatever').order_by('published_date').first()
self.assertEqual(len(q.get_ops()), 1)
self.assertEqual(
q.get_ops()[0]['query']['$orderby'],
{'published_date': 1}
)
# calling order_by() after an explicit sort should clear it
with db_ops_tracker() as q:
qs = BlogPost.objects.filter(title='whatever').order_by('published_date')
qs.order_by().first()
self.assertEqual(len(q.get_ops()), 1)
self.assertFalse('$orderby' in q.get_ops()[0]['query'])
def test_no_ordering_for_get(self): def test_no_ordering_for_get(self):
""" Ensure that Doc.objects.get doesn't use any ordering. """ Ensure that Doc.objects.get doesn't use any ordering.
""" """
@@ -1332,7 +1266,7 @@ class QuerySetTest(unittest.TestCase):
def test_find_embedded(self): def test_find_embedded(self):
"""Ensure that an embedded document is properly returned from """Ensure that an embedded document is properly returned from
different manners of querying. a query.
""" """
class User(EmbeddedDocument): class User(EmbeddedDocument):
name = StringField() name = StringField()
@@ -1343,9 +1277,8 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection() BlogPost.drop_collection()
user = User(name='Test User')
BlogPost.objects.create( BlogPost.objects.create(
author=user, author=User(name='Test User'),
content='Had a good coffee today...' content='Had a good coffee today...'
) )
@@ -1353,19 +1286,6 @@ class QuerySetTest(unittest.TestCase):
self.assertTrue(isinstance(result.author, User)) self.assertTrue(isinstance(result.author, User))
self.assertEqual(result.author.name, 'Test User') self.assertEqual(result.author.name, 'Test User')
result = BlogPost.objects.get(author__name=user.name)
self.assertTrue(isinstance(result.author, User))
self.assertEqual(result.author.name, 'Test User')
result = BlogPost.objects.get(author={'name': user.name})
self.assertTrue(isinstance(result.author, User))
self.assertEqual(result.author.name, 'Test User')
# Fails, since the string is not a type that is able to represent the
# author's document structure (should be dict)
with self.assertRaises(InvalidQueryError):
BlogPost.objects.get(author=user.name)
def test_find_empty_embedded(self): def test_find_empty_embedded(self):
"""Ensure that you can save and find an empty embedded document.""" """Ensure that you can save and find an empty embedded document."""
class User(EmbeddedDocument): class User(EmbeddedDocument):
@@ -1892,11 +1812,6 @@ class QuerySetTest(unittest.TestCase):
post.reload() post.reload()
self.assertEqual(post.hits, 10) self.assertEqual(post.hits, 10)
# Negative dec operator is equal to a positive inc operator
BlogPost.objects.update_one(dec__hits=-1)
post.reload()
self.assertEqual(post.hits, 11)
BlogPost.objects.update(push__tags='mongo') BlogPost.objects.update(push__tags='mongo')
post.reload() post.reload()
self.assertTrue('mongo' in post.tags) self.assertTrue('mongo' in post.tags)
@@ -5048,35 +4963,6 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(i, 249) self.assertEqual(i, 249)
self.assertEqual(j, 249) self.assertEqual(j, 249)
def test_in_operator_on_non_iterable(self):
"""Ensure that using the `__in` operator on a non-iterable raises an
error.
"""
class User(Document):
name = StringField()
class BlogPost(Document):
content = StringField()
authors = ListField(ReferenceField(User))
User.drop_collection()
BlogPost.drop_collection()
author = User.objects.create(name='Test User')
post = BlogPost.objects.create(content='Had a good coffee today...',
authors=[author])
# Make sure using `__in` with a list works
blog_posts = BlogPost.objects(authors__in=[author])
self.assertEqual(list(blog_posts), [post])
# Using `__in` with a non-iterable should raise a TypeError
self.assertRaises(TypeError, BlogPost.objects(authors__in=author.pk).count)
# Using `__in` with a `Document` (which is seemingly iterable but not
# in a way we'd expect) should raise a TypeError, too
self.assertRaises(TypeError, BlogPost.objects(authors__in=author).count)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -200,19 +200,6 @@ class ConnectionTest(unittest.TestCase):
self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'test') self.assertEqual(db.name, 'test')
def test_connect_uri_with_replicaset(self):
"""Ensure connect() works when specifying a replicaSet."""
if IS_PYMONGO_3:
c = connect(host='mongodb://localhost/test?replicaSet=local-rs')
db = get_db()
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'test')
else:
# PyMongo < v3.x raises an exception:
# "localhost:27017 is not a member of replica set local-rs"
with self.assertRaises(MongoEngineConnectionError):
c = connect(host='mongodb://localhost/test?replicaSet=local-rs')
def test_uri_without_credentials_doesnt_override_conn_settings(self): def test_uri_without_credentials_doesnt_override_conn_settings(self):
"""Ensure connect() uses the username & password params if the URI """Ensure connect() uses the username & password params if the URI
doesn't explicitly specify them. doesn't explicitly specify them.
@@ -296,19 +283,6 @@ class ConnectionTest(unittest.TestCase):
conn = get_connection('t2') conn = get_connection('t2')
self.assertFalse(get_tz_awareness(conn)) self.assertFalse(get_tz_awareness(conn))
def test_write_concern(self):
"""Ensure write concern can be specified in connect() via
a kwarg or as part of the connection URI.
"""
conn1 = connect(alias='conn1', host='mongodb://localhost/testing?w=1&j=true')
conn2 = connect('testing', alias='conn2', w=1, j=True)
if IS_PYMONGO_3:
self.assertEqual(conn1.write_concern.document, {'w': 1, 'j': True})
self.assertEqual(conn2.write_concern.document, {'w': 1, 'j': True})
else:
self.assertEqual(dict(conn1.write_concern), {'w': 1, 'j': True})
self.assertEqual(dict(conn2.write_concern), {'w': 1, 'j': True})
def test_datetime(self): def test_datetime(self):
connect('mongoenginetest', tz_aware=True) connect('mongoenginetest', tz_aware=True)
d = datetime.datetime(2010, 5, 5, tzinfo=utc) d = datetime.datetime(2010, 5, 5, tzinfo=utc)

View File

@@ -1,22 +0,0 @@
import unittest
from mongoengine import connect
from mongoengine.connection import get_db
MONGO_TEST_DB = 'mongoenginetest'
class MongoDBTestCase(unittest.TestCase):
"""Base class for tests that need a mongodb connection
db is being dropped automatically
"""
@classmethod
def setUpClass(cls):
cls._connection = connect(db=MONGO_TEST_DB)
cls._connection.drop_database(MONGO_TEST_DB)
cls.db = get_db()
@classmethod
def tearDownClass(cls):
cls._connection.drop_database(MONGO_TEST_DB)