diff --git a/.gitignore b/.gitignore index 7c0a9172..b180e87e 100644 --- a/.gitignore +++ b/.gitignore @@ -14,4 +14,6 @@ env/ .project .pydevproject tests/test_bugfix.py -htmlcov/ \ No newline at end of file +htmlcov/ +venv +venv3 diff --git a/.landscape.yml b/.landscape.yml new file mode 100644 index 00000000..a27bbb03 --- /dev/null +++ b/.landscape.yml @@ -0,0 +1,22 @@ +pylint: + disable: + # We use this a lot (e.g. via document._meta) + - protected-access + + options: + additional-builtins: + # add xrange and long as valid built-ins. In Python 3, xrange is + # translated into range and long is translated into int via 2to3 (see + # "use_2to3" in setup.py). This should be removed when we drop Python + # 2 support (which probably won't happen any time soon). + - xrange + - long + +pyflakes: + disable: + # undefined variables are already covered by pylint (and exclude + # xrange & long) + - F821 + +ignore-paths: + - benchmark.py diff --git a/.travis.yml b/.travis.yml index 501ce314..836996b4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,54 +1,90 @@ language: python + python: -- '2.6' - '2.7' -- '3.2' - '3.3' - '3.4' +- '3.5' - pypy - pypy3 + env: -# MongoDB 2.4 + +# MongoDB v2.4 w/ PyMongo v2.7.x, v2.8.x, v3.x - PYMONGO=2.7 REPOSITORY='deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' PACKAGE_NAME='mongodb-10gen=2.4.14' SERVICE_NAME='mongodb' - PYMONGO=2.8 REPOSITORY='deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' PACKAGE_NAME='mongodb-10gen=2.4.14' SERVICE_NAME='mongodb' - PYMONGO=3.0 REPOSITORY='deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' PACKAGE_NAME='mongodb-10gen=2.4.14' SERVICE_NAME='mongodb' -- PYMONGO=dev REPOSITORY='deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' PACKAGE_NAME='mongodb-10gen=2.4.14' SERVICE_NAME='mongodb' -# MongoDB 2.6 + +# MongoDB v2.6 w/ PyMongo v2.7.x, v2.8.x, v3.x - PYMONGO=2.7 REPOSITORY='deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' PACKAGE_NAME='mongodb-org-server=2.6.10' - PYMONGO=2.8 REPOSITORY='deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' PACKAGE_NAME='mongodb-org-server=2.6.10' - PYMONGO=3.0 REPOSITORY='deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' PACKAGE_NAME='mongodb-org-server=2.6.10' -- PYMONGO=dev REPOSITORY='deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' PACKAGE_NAME='mongodb-org-server=2.6.10' -# MongoDB 3.0 + +# MongoDB v3.0 w/ PyMongo v3.x - PYMONGO=3.0 REPOSITORY='deb http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.0 multiverse' PACKAGE_NAME='mongodb-org-server=3.0.4' -- PYMONGO=dev REPOSITORY='deb http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.0 multiverse' PACKAGE_NAME='mongodb-org-server=3.0.4' + +# Finish the build as soon as one job fails matrix: fast_finish: true + before_install: - travis_retry sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 - echo $REPOSITORY | sudo tee /etc/apt/sources.list.d/mongodb.list - travis_retry sudo apt-get update - travis_retry sudo apt-get install $PACKAGE_NAME - if [ -z $SERVICE_NAME ]; then echo "MongoDB is already started"; else sudo service $SERVICE_NAME start; fi + install: - sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev python-tk -- travis_retry pip install tox>=1.9 coveralls +- travis_retry pip install --upgrade pip +- travis_retry pip install coveralls +- travis_retry pip install flake8 +- travis_retry pip install tox>=1.9 +- travis_retry pip install "virtualenv<14.0.0" # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) - travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test + +# Cache dependencies installed via pip +cache: pip + +# Run flake8 for py27 +before_script: +- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then tox -e flake8; fi + script: - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage -after_script: coveralls --verbose + +# For now only submit coveralls for Python v2.7. Python v3.x currently shows +# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible +# code in a separate dir and runs tests on that. +after_success: +- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi + notifications: irc: irc.freenode.org#mongoengine + branches: only: - master - /^v.*$/ + +# Whenever a new release is created via GitHub, publish it on PyPI. deploy: provider: pypi user: the_drow password: secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek= + + # create a source distribution and a pure python wheel for faster installs + distributions: "sdist bdist_wheel" + + # only deploy on tagged commits (aka GitHub releases) and only for the + # parent repo's builds running Python 2.7 along with dev PyMongo (we run + # Travis against many different Python and PyMongo versions and we don't + # want the deploy to occur multiple times). on: tags: true repo: MongoEngine/mongoengine + condition: "$PYMONGO = 3.0" + python: 2.7 diff --git a/AUTHORS b/AUTHORS index a66bf7c0..1d724718 100644 --- a/AUTHORS +++ b/AUTHORS @@ -228,3 +228,18 @@ that much better: * Vicki Donchenko (https://github.com/kivistein) * Emile Caron (https://github.com/emilecaron) * Amit Lichtenberg (https://github.com/amitlicht) + * Gang Li (https://github.com/iici-gli) + * Lars Butler (https://github.com/larsbutler) + * George Macon (https://github.com/gmacon) + * Ashley Whetter (https://github.com/AWhetter) + * Paul-Armand Verhaegen (https://github.com/paularmand) + * Steven Rossiter (https://github.com/BeardedSteve) + * Luo Peng (https://github.com/RussellLuo) + * Bryan Bennett (https://github.com/bbenne10) + * Gilb's Gilb's (https://github.com/gilbsgilbs) + * Joshua Nedrud (https://github.com/Neurostack) + * Shu Shen (https://github.com/shushen) + * xiaost7 (https://github.com/xiaost7) + * Victor Varvaryuk + * Stanislav Kaledin (https://github.com/sallyruthstruik) + * Dmitry Yantsen (https://github.com/mrTable) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index aeba41f7..2668499c 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -14,13 +14,13 @@ Before starting to write code, look for existing `tickets `_ or `create one `_ for your specific issue or feature request. That way you avoid working on something -that might not be of interest or that has already been addressed. If in doubt +that might not be of interest or that has already been addressed. If in doubt post to the `user group ` Supported Interpreters ---------------------- -MongoEngine supports CPython 2.6 and newer. Language +MongoEngine supports CPython 2.7 and newer. Language features not supported by all interpreters can not be used. Please also ensure that your code is properly converted by `2to3 `_ for Python 3 support. diff --git a/README.rst b/README.rst index f4c92d5f..adfa0c71 100644 --- a/README.rst +++ b/README.rst @@ -4,25 +4,25 @@ MongoEngine :Info: MongoEngine is an ORM-like layer on top of PyMongo. :Repository: https://github.com/MongoEngine/mongoengine :Author: Harry Marr (http://github.com/hmarr) -:Maintainer: Ross Lawley (http://github.com/rozza) +:Maintainer: Stefan Wójcik (http://github.com/wojcikstefan) -.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master - :target: http://travis-ci.org/MongoEngine/mongoengine +.. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master + :target: https://travis-ci.org/MongoEngine/mongoengine -.. image:: https://coveralls.io/repos/MongoEngine/mongoengine/badge.png?branch=master - :target: https://coveralls.io/r/MongoEngine/mongoengine?branch=master +.. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master + :target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master -.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.png - :target: https://landscape.io/github/MongoEngine/mongoengine/master - :alt: Code Health +.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.svg?style=flat + :target: https://landscape.io/github/MongoEngine/mongoengine/master + :alt: Code Health About ===== MongoEngine is a Python Object-Document Mapper for working with MongoDB. -Documentation available at http://mongoengine-odm.rtfd.org - there is currently -a `tutorial `_, a `user guide -`_ and an `API reference -`_. +Documentation available at https://mongoengine-odm.readthedocs.io - there is currently +a `tutorial `_, a `user guide +`_ and an `API reference +`_. Installation ============ @@ -35,25 +35,37 @@ setup.py install``. Dependencies ============ -- pymongo>=2.7.1 -- sphinx (optional - for documentation generation) +All of the dependencies can easily be installed via `pip `_. At the very least, you'll need these two packages to use MongoEngine: + +- pymongo>=2.7.1 +- six>=1.10.0 + +If you utilize a ``DateTimeField``, you might also use a more flexible date parser: -Optional Dependencies ---------------------- -- **Image Fields**: Pillow>=2.0.0 - dateutil>=2.1.0 -.. note - MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: PyMongo 3.0.1 +If you need to use an ``ImageField`` or ``ImageGridFsProxy``: + +- Pillow>=2.0.0 + +If you want to generate the documentation (e.g. to contribute to it): + +- sphinx Examples ======== -Some simple examples of what MongoEngine code looks like:: +Some simple examples of what MongoEngine code looks like: + +.. code :: python + + from mongoengine import * + connect('mydb') class BlogPost(Document): title = StringField(required=True, max_length=200) - posted = DateTimeField(default=datetime.datetime.now) + posted = DateTimeField(default=datetime.datetime.utcnow) tags = ListField(StringField(max_length=50)) + meta = {'allow_inheritance': True} class TextPost(BlogPost): content = StringField(required=True) @@ -81,23 +93,24 @@ Some simple examples of what MongoEngine code looks like:: ... print ... - >>> len(BlogPost.objects) + # Count all blog posts and its subtypes + >>> BlogPost.objects.count() 2 - >>> len(TextPost.objects) + >>> TextPost.objects.count() 1 - >>> len(LinkPost.objects) + >>> LinkPost.objects.count() 1 - # Find tagged posts - >>> len(BlogPost.objects(tags='mongoengine')) + # Count tagged posts + >>> BlogPost.objects(tags='mongoengine').count() 2 - >>> len(BlogPost.objects(tags='mongodb')) + >>> BlogPost.objects(tags='mongodb').count() 1 Tests ===== To run the test suite, ensure you are running a local instance of MongoDB on -the standard port, and run: ``python setup.py nosetests``. +the standard port and have ``nose`` installed. Then, run: ``python setup.py nosetests``. To run the test suite on every supported Python version and every supported PyMongo version, you can use ``tox``. @@ -124,8 +137,7 @@ Community `_ - `MongoEngine Developers mailing list `_ -- `#mongoengine IRC channel `_ Contributing ============ -We welcome contributions! see the `Contribution guidelines `_ +We welcome contributions! See the `Contribution guidelines `_ diff --git a/benchmark.py b/benchmark.py index 53ecf32c..8e93ee40 100644 --- a/benchmark.py +++ b/benchmark.py @@ -1,118 +1,41 @@ #!/usr/bin/env python +""" +Simple benchmark comparing PyMongo and MongoEngine. + +Sample run on a mid 2015 MacBook Pro (commit b282511): + +Benchmarking... +---------------------------------------------------------------------------------------------------- +Creating 10000 dictionaries - Pymongo +2.58979988098 +---------------------------------------------------------------------------------------------------- +Creating 10000 dictionaries - Pymongo write_concern={"w": 0} +1.26657605171 +---------------------------------------------------------------------------------------------------- +Creating 10000 dictionaries - MongoEngine +8.4351580143 +---------------------------------------------------------------------------------------------------- +Creating 10000 dictionaries without continual assign - MongoEngine +7.20191693306 +---------------------------------------------------------------------------------------------------- +Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True +6.31104588509 +---------------------------------------------------------------------------------------------------- +Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True +6.07083487511 +---------------------------------------------------------------------------------------------------- +Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False +5.97704291344 +---------------------------------------------------------------------------------------------------- +Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False +5.9111430645 +""" + import timeit -def cprofile_main(): - from pymongo import Connection - connection = Connection() - connection.drop_database('timeit_test') - connection.disconnect() - - from mongoengine import Document, DictField, connect - connect("timeit_test") - - class Noddy(Document): - fields = DictField() - - for i in range(1): - noddy = Noddy() - for j in range(20): - noddy.fields["key" + str(j)] = "value " + str(j) - noddy.save() - - def main(): - """ - 0.4 Performance Figures ... - - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - Pymongo - 3.86744189262 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine - 6.23374891281 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine, safe=False, validate=False - 5.33027005196 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False - pass - No Cascade - - 0.5.X - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - Pymongo - 3.89597702026 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine - 21.7735359669 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine, safe=False, validate=False - 19.8670389652 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False - pass - No Cascade - - 0.6.X - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - Pymongo - 3.81559205055 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine - 10.0446798801 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine, safe=False, validate=False - 9.51354718208 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False - 9.02567505836 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine, force=True - 8.44933390617 - - 0.7.X - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - Pymongo - 3.78801012039 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine - 9.73050498962 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine, safe=False, validate=False - 8.33456707001 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False - 8.37778115273 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine, force=True - 8.36906409264 - 0.8.X - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - Pymongo - 3.69964408875 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - Pymongo write_concern={"w": 0} - 3.5526599884 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine - 7.00959801674 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries without continual assign - MongoEngine - 5.60943293571 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade=True - 6.715102911 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True - 5.50644683838 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False - 4.69851183891 - ---------------------------------------------------------------------------------------------------- - Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False - 4.68946313858 - ---------------------------------------------------------------------------------------------------- - """ print("Benchmarking...") setup = """ @@ -131,7 +54,7 @@ noddy = db.noddy for i in range(10000): example = {'fields': {}} for j in range(20): - example['fields']["key"+str(j)] = "value "+str(j) + example['fields']['key' + str(j)] = 'value ' + str(j) noddy.save(example) @@ -146,9 +69,10 @@ myNoddys = noddy.find() stmt = """ from pymongo import MongoClient +from pymongo.write_concern import WriteConcern connection = MongoClient() -db = connection.timeit_test +db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0)) noddy = db.noddy for i in range(10000): @@ -156,7 +80,7 @@ for i in range(10000): for j in range(20): example['fields']["key"+str(j)] = "value "+str(j) - noddy.save(example, write_concern={"w": 0}) + noddy.save(example) myNoddys = noddy.find() [n for n in myNoddys] # iterate @@ -171,10 +95,10 @@ myNoddys = noddy.find() from pymongo import MongoClient connection = MongoClient() connection.drop_database('timeit_test') -connection.disconnect() +connection.close() from mongoengine import Document, DictField, connect -connect("timeit_test") +connect('timeit_test') class Noddy(Document): fields = DictField() diff --git a/docs/changelog.rst b/docs/changelog.rst index c512eca0..0da97e90 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -2,11 +2,101 @@ Changelog ========= -Changes in 0.10.1 - DEV -======================= +Development +=========== +- (Fill this out as you fix issues and develop your features). +- Fixed using sets in field choices #1481 +- POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476 +- POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476 +- Fixed connecting to a replica set with PyMongo 2.x #1436 +- Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237 + +Changes in 0.11.0 +================= +- BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428 +- BREAKING CHANGE: Dropped Python 2.6 support. #1428 +- BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428 +- BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334 +- Fixed absent rounding for DecimalField when `force_string` is set. #1103 + +Changes in 0.10.8 +================= +- Added support for QuerySet.batch_size (#1426) +- Fixed query set iteration within iteration #1427 +- Fixed an issue where specifying a MongoDB URI host would override more information than it should #1421 +- Added ability to filter the generic reference field by ObjectId and DBRef #1425 +- Fixed delete cascade for models with a custom primary key field #1247 +- Added ability to specify an authentication mechanism (e.g. X.509) #1333 +- Added support for falsey primary keys (e.g. doc.pk = 0) #1354 +- Fixed QuerySet#sum/average for fields w/ explicit db_field #1417 +- Fixed filtering by embedded_doc=None #1422 +- Added support for cursor.comment #1420 +- Fixed doc.get__display #1419 +- Fixed __repr__ method of the StrictDict #1424 +- Added a deprecation warning for Python 2.6 + +Changes in 0.10.7 +================= +- Dropped Python 3.2 support #1390 +- Fixed the bug where dynamic doc has index inside a dict field #1278 +- Fixed: ListField minus index assignment does not work #1128 +- Fixed cascade delete mixing among collections #1224 +- Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206 +- Raise `OperationError` when trying to do a `drop_collection` on document with no collection set. +- count on ListField of EmbeddedDocumentField fails. #1187 +- Fixed long fields stored as int32 in Python 3. #1253 +- MapField now handles unicodes keys correctly. #1267 +- ListField now handles negative indicies correctly. #1270 +- Fixed AttributeError when initializing EmbeddedDocument with positional args. #681 +- Fixed no_cursor_timeout error with pymongo 3.0+ #1304 +- Replaced map-reduce based QuerySet.sum/average with aggregation-based implementations #1336 +- Fixed support for `__` to escape field names that match operators names in `update` #1351 +- Fixed BaseDocument#_mark_as_changed #1369 +- Added support for pickling QuerySet instances. #1397 +- Fixed connecting to a list of hosts #1389 +- Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334 +- Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218 +- Improvements to the dictionary fields docs #1383 + +Changes in 0.10.6 +================= +- Add support for mocking MongoEngine based on mongomock. #1151 +- Fixed not being able to run tests on Windows. #1153 +- Allow creation of sparse compound indexes. #1114 +- count on ListField of EmbeddedDocumentField fails. #1187 + +Changes in 0.10.5 +================= +- Fix for reloading of strict with special fields. #1156 + +Changes in 0.10.4 +================= +- SaveConditionError is now importable from the top level package. #1165 +- upsert_one method added. #1157 + +Changes in 0.10.3 +================= +- Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042 + +Changes in 0.10.2 +================= +- Allow shard key to point to a field in an embedded document. #551 +- Allow arbirary metadata in fields. #1129 +- ReferenceFields now support abstract document types. #837 + +Changes in 0.10.1 +================= - Fix infinite recursion with CASCADE delete rules under specific conditions. #1046 - Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047 - Fix ignored chained options #842 +- Document save's save_condition error raises `SaveConditionError` exception #1070 +- Fix Document.reload for DynamicDocument. #1050 +- StrictDict & SemiStrictDict are shadowed at init time. #1105 +- Fix ListField minus index assignment does not work. #1119 +- Remove code that marks field as changed when the field has default but not existed in database #1126 +- Remove test dependencies (nose and rednose) from install dependencies list. #1079 +- Recursively build query when using elemMatch operator. #1130 +- Fix instance back references for lists of embedded documents. #1131 Changes in 0.10.0 ================= diff --git a/docs/code/tumblelog.py b/docs/code/tumblelog.py index 0e40e899..c10160ea 100644 --- a/docs/code/tumblelog.py +++ b/docs/code/tumblelog.py @@ -17,6 +17,10 @@ class Post(Document): tags = ListField(StringField(max_length=30)) comments = ListField(EmbeddedDocumentField(Comment)) + # bugfix + meta = {'allow_inheritance': True} + + class TextPost(Post): content = StringField() @@ -45,7 +49,8 @@ print 'ALL POSTS' print for post in Post.objects: print post.title - print '=' * post.title.count() + #print '=' * post.title.count() + print "=" * 20 if isinstance(post, TextPost): print post.content diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index 48926499..827e5a3c 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -33,7 +33,7 @@ the :attr:`host` to corresponding parameters in :func:`~mongoengine.connect`: :: connect( - name='test', + db='test', username='user', password='12345', host='mongodb://admin:qwerty@localhost/production' diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index 8f7382ee..d41ae7e6 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -29,7 +29,7 @@ documents are serialized based on their field order. Dynamic document schemas ======================== -One of the benefits of MongoDb is dynamic schemas for a collection, whilst data +One of the benefits of MongoDB is dynamic schemas for a collection, whilst data should be planned and organised (after all explicit is better than implicit!) there are scenarios where having dynamic / expando style documents is desirable. @@ -75,6 +75,7 @@ are as follows: * :class:`~mongoengine.fields.DynamicField` * :class:`~mongoengine.fields.EmailField` * :class:`~mongoengine.fields.EmbeddedDocumentField` +* :class:`~mongoengine.fields.EmbeddedDocumentListField` * :class:`~mongoengine.fields.FileField` * :class:`~mongoengine.fields.FloatField` * :class:`~mongoengine.fields.GenericEmbeddedDocumentField` @@ -149,7 +150,7 @@ arguments can be set on all fields: .. note:: If set, this field is also accessible through the `pk` field. :attr:`choices` (Default: None) - An iterable (e.g. a list or tuple) of choices to which the value of this + An iterable (e.g. list, tuple or set) of choices to which the value of this field should be limited. Can be either be a nested tuples of value (stored in mongo) and a @@ -172,11 +173,11 @@ arguments can be set on all fields: class Shirt(Document): size = StringField(max_length=3, choices=SIZE) -:attr:`help_text` (Default: None) - Optional help text to output with the field -- used by form libraries - -:attr:`verbose_name` (Default: None) - Optional human-readable name for the field -- used by form libraries +:attr:`**kwargs` (Optional) + You can supply additional metadata as arbitrary additional keyword + arguments. You can not override existing attributes, however. Common + choices include `help_text` and `verbose_name`, commonly used by form and + widget libraries. List fields @@ -213,9 +214,9 @@ document class as the first argument:: Dictionary Fields ----------------- -Often, an embedded document may be used instead of a dictionary -- generally -this is recommended as dictionaries don't support validation or custom field -types. However, sometimes you will not know the structure of what you want to +Often, an embedded document may be used instead of a dictionary – generally +embedded documents are recommended as dictionaries don’t support validation +or custom field types. However, sometimes you will not know the structure of what you want to store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate:: class SurveyResponse(Document): @@ -360,11 +361,6 @@ Its value can take any of the following constants: In Django, be sure to put all apps that have such delete rule declarations in their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. - -.. warning:: - Signals are not triggered when doing cascading updates / deletes - if this - is required you must manually handle the update / delete. - Generic reference fields '''''''''''''''''''''''' A second kind of reference field also exists, diff --git a/docs/guide/index.rst b/docs/guide/index.rst index c4077888..46eb7af2 100644 --- a/docs/guide/index.rst +++ b/docs/guide/index.rst @@ -13,3 +13,4 @@ User Guide gridfs signals text-indexes + mongomock diff --git a/docs/guide/mongomock.rst b/docs/guide/mongomock.rst new file mode 100644 index 00000000..1d5227ec --- /dev/null +++ b/docs/guide/mongomock.rst @@ -0,0 +1,21 @@ +============================== +Use mongomock for testing +============================== + +`mongomock `_ is a package to do just +what the name implies, mocking a mongo database. + +To use with mongoengine, simply specify mongomock when connecting with +mongoengine: + +.. code-block:: python + + connect('mongoenginetest', host='mongomock://localhost') + conn = get_connection() + +or with an alias: + +.. code-block:: python + + connect('mongoenginetest', host='mongomock://localhost', alias='testdb') + conn = get_connection('testdb') diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index f2e71b04..980947df 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -237,7 +237,7 @@ is preferred for achieving this:: # All except for the first 5 people users = User.objects[5:] - # 5 users, starting from the 10th user found + # 5 users, starting from the 11th user found users = User.objects[10:15] You may also index the query to retrieve a single result. If an item at that @@ -347,6 +347,8 @@ way of achieving this:: num_users = len(User.objects) +Even if len() is the Pythonic way of counting results, keep in mind that if you concerned about performance, :meth:`~mongoengine.queryset.QuerySet.count` is the way to go since it only execute a server side count query, while len() retrieves the results, places them in cache, and finally counts them. If we compare the performance of the two operations, len() is much slower than :meth:`~mongoengine.queryset.QuerySet.count`. + Further aggregation ------------------- You may sum over the values of a specific field on documents using @@ -477,6 +479,8 @@ operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the first positional argument to :attr:`Document.objects` when you filter it by calling it with keyword arguments:: + from mongoengine.queryset.visitor import Q + # Get published posts Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now())) diff --git a/docs/guide/signals.rst b/docs/guide/signals.rst index 797a4869..30277966 100644 --- a/docs/guide/signals.rst +++ b/docs/guide/signals.rst @@ -142,11 +142,4 @@ cleaner looking while still allowing manual execution of the callback:: modified = DateTimeField() -ReferenceFields and Signals ---------------------------- - -Currently `reverse_delete_rule` does not trigger signals on the other part of -the relationship. If this is required you must manually handle the -reverse deletion. - .. _blinker: http://pypi.python.org/pypi/blinker diff --git a/docs/guide/text-indexes.rst b/docs/guide/text-indexes.rst index 695159c6..725ad369 100644 --- a/docs/guide/text-indexes.rst +++ b/docs/guide/text-indexes.rst @@ -17,7 +17,7 @@ Use the *$* prefix to set a text index, Look the declaration:: meta = {'indexes': [ {'fields': ['$title', "$content"], 'default_language': 'english', - 'weight': {'title': 10, 'content': 2} + 'weights': {'title': 10, 'content': 2} } ]} diff --git a/docs/upgrade.rst b/docs/upgrade.rst index b1a2217f..17b1c4ac 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -2,6 +2,53 @@ Upgrading ######### +Development +*********** +(Fill this out whenever you introduce breaking changes to MongoEngine) + +This release includes various fixes for the `BaseQuerySet` methods and how they +are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size +to an already-existing queryset wouldn't modify the underlying PyMongo cursor. +This has been fixed now, so you'll need to make sure that your code didn't rely +on the broken implementation. + +Additionally, a public `BaseQuerySet.clone_into` has been renamed to a private +`_clone_into`. If you directly used that method in your code, you'll need to +rename its occurrences. + +0.11.0 +****** +This release includes a major rehaul of MongoEngine's code quality and +introduces a few breaking changes. It also touches many different parts of +the package and although all the changes have been tested and scrutinized, +you're encouraged to thorougly test the upgrade. + +First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`. +If you import or catch this exception, you'll need to rename it in your code. + +Second breaking change drops Python v2.6 support. If you run MongoEngine on +that Python version, you'll need to upgrade it first. + +Third breaking change drops an old backward compatibility measure where +`from mongoengine.base import ErrorClass` would work on top of +`from mongoengine.errors import ErrorClass` (where `ErrorClass` is e.g. +`ValidationError`). If you import any exceptions from `mongoengine.base`, +change it to `mongoengine.errors`. + +0.10.8 +****** +This version fixed an issue where specifying a MongoDB URI host would override +more information than it should. These changes are minor, but they still +subtly modify the connection logic and thus you're encouraged to test your +MongoDB connection before shipping v0.10.8 in production. + +0.10.7 +****** + +`QuerySet.aggregate_sum` and `QuerySet.aggregate_average` are dropped. Use +`QuerySet.sum` and `QuerySet.average` instead which use the aggreation framework +by default from now on. + 0.9.0 ***** diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 474c2154..f8969592 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -1,25 +1,36 @@ -import document -from document import * -import fields -from fields import * -import connection -from connection import * -import queryset -from queryset import * -import signals -from signals import * -from errors import * -import errors +# Import submodules so that we can expose their __all__ +from mongoengine import connection +from mongoengine import document +from mongoengine import errors +from mongoengine import fields +from mongoengine import queryset +from mongoengine import signals -__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + - list(queryset.__all__) + signals.__all__ + list(errors.__all__)) +# Import everything from each submodule so that it can be accessed via +# mongoengine, e.g. instead of `from mongoengine.connection import connect`, +# users can simply use `from mongoengine import connect`, or even +# `from mongoengine import *` and then `connect('testdb')`. +from mongoengine.connection import * +from mongoengine.document import * +from mongoengine.errors import * +from mongoengine.fields import * +from mongoengine.queryset import * +from mongoengine.signals import * -VERSION = (0, 10, 0) + +__all__ = (list(document.__all__) + list(fields.__all__) + + list(connection.__all__) + list(queryset.__all__) + + list(signals.__all__) + list(errors.__all__)) + + +VERSION = (0, 11, 0) def get_version(): - if isinstance(VERSION[-1], basestring): - return '.'.join(map(str, VERSION[:-1])) + VERSION[-1] + """Return the VERSION as a string, e.g. for VERSION == (0, 10, 7), + return '0.10.7'. + """ return '.'.join(map(str, VERSION)) + __version__ = get_version() diff --git a/mongoengine/base/__init__.py b/mongoengine/base/__init__.py index e8d4b6ad..da31b922 100644 --- a/mongoengine/base/__init__.py +++ b/mongoengine/base/__init__.py @@ -1,8 +1,28 @@ +# Base module is split into several files for convenience. Files inside of +# this module should import from a specific submodule (e.g. +# `from mongoengine.base.document import BaseDocument`), but all of the +# other modules should import directly from the top-level module (e.g. +# `from mongoengine.base import BaseDocument`). This approach is cleaner and +# also helps with cyclical import errors. from mongoengine.base.common import * from mongoengine.base.datastructures import * from mongoengine.base.document import * from mongoengine.base.fields import * from mongoengine.base.metaclasses import * -# Help with backwards compatibility -from mongoengine.errors import * +__all__ = ( + # common + 'UPDATE_OPERATORS', '_document_registry', 'get_document', + + # datastructures + 'BaseDict', 'BaseList', 'EmbeddedDocumentList', + + # document + 'BaseDocument', + + # fields + 'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField', + + # metaclasses + 'DocumentMetaclass', 'TopLevelDocumentMetaclass' +) diff --git a/mongoengine/base/common.py b/mongoengine/base/common.py index 3a966c79..b9971ff7 100644 --- a/mongoengine/base/common.py +++ b/mongoengine/base/common.py @@ -1,13 +1,18 @@ from mongoengine.errors import NotRegistered -__all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry') +__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') + + +UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push', + 'push_all', 'pull', 'pull_all', 'add_to_set', + 'set_on_insert', 'min', 'max', 'rename']) -ALLOW_INHERITANCE = False _document_registry = {} def get_document(name): + """Get a document class by name.""" doc = _document_registry.get(name, None) if not doc: # Possible old style name diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index e4d2b392..b9aca8fa 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -1,14 +1,16 @@ -import weakref import itertools +import weakref + +import six from mongoengine.common import _import_class from mongoengine.errors import DoesNotExist, MultipleObjectsReturned -__all__ = ("BaseDict", "BaseList", "EmbeddedDocumentList") +__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList') class BaseDict(dict): - """A special dict so we can watch any changes""" + """A special dict so we can watch any changes.""" _dereferenced = False _instance = None @@ -93,8 +95,7 @@ class BaseDict(dict): class BaseList(list): - """A special list so we can watch any changes - """ + """A special list so we can watch any changes.""" _dereferenced = False _instance = None @@ -137,10 +138,7 @@ class BaseList(list): return super(BaseList, self).__setitem__(key, value) def __delitem__(self, key, *args, **kwargs): - if isinstance(key, slice): - self._mark_as_changed() - else: - self._mark_as_changed(key) + self._mark_as_changed() return super(BaseList, self).__delitem__(key) def __setslice__(self, *args, **kwargs): @@ -199,7 +197,9 @@ class BaseList(list): def _mark_as_changed(self, key=None): if hasattr(self._instance, '_mark_as_changed'): if key: - self._instance._mark_as_changed('%s.%s' % (self._name, key)) + self._instance._mark_as_changed( + '%s.%s' % (self._name, key % len(self)) + ) else: self._instance._mark_as_changed(self._name) @@ -207,17 +207,22 @@ class BaseList(list): class EmbeddedDocumentList(BaseList): @classmethod - def __match_all(cls, i, kwargs): - items = kwargs.items() - return all([ - getattr(i, k) == v or str(getattr(i, k)) == v for k, v in items - ]) + def __match_all(cls, embedded_doc, kwargs): + """Return True if a given embedded doc matches all the filter + kwargs. If it doesn't return False. + """ + for key, expected_value in kwargs.items(): + doc_val = getattr(embedded_doc, key) + if doc_val != expected_value and six.text_type(doc_val) != expected_value: + return False + return True @classmethod - def __only_matches(cls, obj, kwargs): + def __only_matches(cls, embedded_docs, kwargs): + """Return embedded docs that match the filter kwargs.""" if not kwargs: - return obj - return filter(lambda i: cls.__match_all(i, kwargs), obj) + return embedded_docs + return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)] def __init__(self, list_items, instance, name): super(EmbeddedDocumentList, self).__init__(list_items, instance, name) @@ -283,18 +288,18 @@ class EmbeddedDocumentList(BaseList): values = self.__only_matches(self, kwargs) if len(values) == 0: raise DoesNotExist( - "%s matching query does not exist." % self._name + '%s matching query does not exist.' % self._name ) elif len(values) > 1: raise MultipleObjectsReturned( - "%d items returned, instead of 1" % len(values) + '%d items returned, instead of 1' % len(values) ) return values[0] def first(self): - """ - Returns the first embedded document in the list, or ``None`` if empty. + """Return the first embedded document in the list, or ``None`` + if empty. """ if len(self) > 0: return self[0] @@ -424,7 +429,7 @@ class StrictDict(object): def __eq__(self, other): return self.items() == other.items() - def __neq__(self, other): + def __ne__(self, other): return self.items() != other.items() @classmethod @@ -436,7 +441,7 @@ class StrictDict(object): __slots__ = allowed_keys_tuple def __repr__(self): - return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k) for k in self.iterkeys()) + return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items()) cls._classes[allowed_keys] = SpecificStrictDict return cls._classes[allowed_keys] diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index ee807cb4..9d366706 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -1,37 +1,33 @@ import copy -import operator import numbers from collections import Hashable from functools import partial -import pymongo -from bson import json_util, ObjectId +from bson import ObjectId, json_util from bson.dbref import DBRef from bson.son import SON +import pymongo +import six from mongoengine import signals -from mongoengine.common import _import_class -from mongoengine.errors import (ValidationError, InvalidDocumentError, - LookUpError, FieldDoesNotExist) -from mongoengine.python_support import PY3, txt_type -from mongoengine.base.common import get_document, ALLOW_INHERITANCE -from mongoengine.base.datastructures import ( - BaseDict, - BaseList, - EmbeddedDocumentList, - StrictDict, - SemiStrictDict -) +from mongoengine.base.common import get_document +from mongoengine.base.datastructures import (BaseDict, BaseList, + EmbeddedDocumentList, + SemiStrictDict, StrictDict) from mongoengine.base.fields import ComplexBaseField +from mongoengine.common import _import_class +from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, + LookUpError, OperationError, ValidationError) -__all__ = ('BaseDocument', 'NON_FIELD_ERRORS') +__all__ = ('BaseDocument',) NON_FIELD_ERRORS = '__all__' class BaseDocument(object): __slots__ = ('_changed_fields', '_initialised', '_created', '_data', - '_dynamic_fields', '_auto_id_field', '_db_field_map', '__weakref__') + '_dynamic_fields', '_auto_id_field', '_db_field_map', + '__weakref__') _dynamic = False _dynamic_lock = True @@ -51,33 +47,34 @@ class BaseDocument(object): # We only want named arguments. field = iter(self._fields_ordered) # If its an automatic id field then skip to the first defined field - if self._auto_id_field: + if getattr(self, '_auto_id_field', False): next(field) for value in args: name = next(field) if name in values: raise TypeError( - "Multiple values for keyword argument '" + name + "'") + 'Multiple values for keyword argument "%s"' % name) values[name] = value - __auto_convert = values.pop("__auto_convert", True) + __auto_convert = values.pop('__auto_convert', True) # 399: set default values only to fields loaded from DB - __only_fields = set(values.pop("__only_fields", values)) + __only_fields = set(values.pop('__only_fields', values)) - _created = values.pop("_created", True) + _created = values.pop('_created', True) signals.pre_init.send(self.__class__, document=self, values=values) # Check if there are undefined fields supplied to the constructor, # if so raise an Exception. if not self._dynamic and (self._meta.get('strict', True) or _created): - for var in values.keys(): - if var not in self._fields.keys() + ['id', 'pk', '_cls', '_text_score']: - msg = ( - "The field '{0}' does not exist on the document '{1}'" - ).format(var, self._class_name) - raise FieldDoesNotExist(msg) + _undefined_fields = set(values.keys()) - set( + self._fields.keys() + ['id', 'pk', '_cls', '_text_score']) + if _undefined_fields: + msg = ( + 'The fields "{0}" do not exist on the document "{1}"' + ).format(_undefined_fields, self._class_name) + raise FieldDoesNotExist(msg) if self.STRICT and not self._dynamic: self._data = StrictDict.create(allowed_keys=self._fields_ordered)() @@ -85,7 +82,6 @@ class BaseDocument(object): self._data = SemiStrictDict.create( allowed_keys=self._fields_ordered)() - self._data = {} self._dynamic_fields = SON() # Assign default values to instance @@ -95,7 +91,7 @@ class BaseDocument(object): value = getattr(self, key, None) setattr(self, key, value) - if "_cls" not in values: + if '_cls' not in values: self._cls = self._class_name # Set passed values after initialisation @@ -121,7 +117,7 @@ class BaseDocument(object): else: self._data[key] = value - # Set any get_fieldname_display methods + # Set any get__display methods self.__set_field_display() if self._dynamic: @@ -150,7 +146,7 @@ class BaseDocument(object): if self._dynamic and not self._dynamic_lock: if not hasattr(self, name) and not name.startswith('_'): - DynamicField = _import_class("DynamicField") + DynamicField = _import_class('DynamicField') field = DynamicField(db_field=name) field.name = name self._dynamic_fields[name] = field @@ -169,11 +165,13 @@ class BaseDocument(object): except AttributeError: self__created = True - if (self._is_document and not self__created and - name in self._meta.get('shard_key', tuple()) and - self._data.get(name) != value): - OperationError = _import_class('OperationError') - msg = "Shard Keys are immutable. Tried to update %s" % name + if ( + self._is_document and + not self__created and + name in self._meta.get('shard_key', tuple()) and + self._data.get(name) != value + ): + msg = 'Shard Keys are immutable. Tried to update %s' % name raise OperationError(msg) try: @@ -197,8 +195,8 @@ class BaseDocument(object): return data def __setstate__(self, data): - if isinstance(data["_data"], SON): - data["_data"] = self.__class__._from_son(data["_data"])._data + if isinstance(data['_data'], SON): + data['_data'] = self.__class__._from_son(data['_data'])._data for k in ('_changed_fields', '_initialised', '_created', '_data', '_dynamic_fields'): if k in data: @@ -212,7 +210,7 @@ class BaseDocument(object): dynamic_fields = data.get('_dynamic_fields') or SON() for k in dynamic_fields.keys(): - setattr(self, k, data["_data"].get(k)) + setattr(self, k, data['_data'].get(k)) def __iter__(self): return iter(self._fields_ordered) @@ -254,12 +252,13 @@ class BaseDocument(object): return repr_type('<%s: %s>' % (self.__class__.__name__, u)) def __str__(self): + # TODO this could be simpler? if hasattr(self, '__unicode__'): - if PY3: + if six.PY3: return self.__unicode__() else: - return unicode(self).encode('utf-8') - return txt_type('%s object' % self.__class__.__name__) + return six.text_type(self).encode('utf-8') + return six.text_type('%s object' % self.__class__.__name__) def __eq__(self, other): if isinstance(other, self.__class__) and hasattr(other, 'id') and other.id is not None: @@ -308,9 +307,9 @@ class BaseDocument(object): fields = [] data = SON() - data["_id"] = None + data['_id'] = None data['_cls'] = self._class_name - EmbeddedDocumentField = _import_class("EmbeddedDocumentField") + # only root fields ['test1.a', 'test2'] => ['test1', 'test2'] root_fields = set([f.split('.')[0] for f in fields]) @@ -325,21 +324,20 @@ class BaseDocument(object): field = self._dynamic_fields.get(field_name) if value is not None: + f_inputs = field.to_mongo.__code__.co_varnames + ex_vars = {} + if fields and 'fields' in f_inputs: + key = '%s.' % field_name + embedded_fields = [ + i.replace(key, '') for i in fields + if i.startswith(key)] - if isinstance(field, EmbeddedDocumentField): - if fields: - key = '%s.' % field_name - embedded_fields = [ - i.replace(key, '') for i in fields - if i.startswith(key)] + ex_vars['fields'] = embedded_fields - else: - embedded_fields = [] + if 'use_db_field' in f_inputs: + ex_vars['use_db_field'] = use_db_field - value = field.to_mongo(value, use_db_field=use_db_field, - fields=embedded_fields) - else: - value = field.to_mongo(value) + value = field.to_mongo(value, **ex_vars) # Handle self generating fields if value is None and field._auto_gen: @@ -352,18 +350,8 @@ class BaseDocument(object): else: data[field.name] = value - # If "_id" has not been set, then try and set it - Document = _import_class("Document") - if isinstance(self, Document): - if data["_id"] is None: - data["_id"] = self._data.get("id", None) - - if data['_id'] is None: - data.pop('_id') - # Only add _cls if allow_inheritance is True - if (not hasattr(self, '_meta') or - not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)): + if not self._meta.get('allow_inheritance'): data.pop('_cls') return data @@ -377,16 +365,16 @@ class BaseDocument(object): if clean: try: self.clean() - except ValidationError, error: + except ValidationError as error: errors[NON_FIELD_ERRORS] = error # Get a list of tuples of field names and their current values fields = [(self._fields.get(name, self._dynamic_fields.get(name)), self._data.get(name)) for name in self._fields_ordered] - EmbeddedDocumentField = _import_class("EmbeddedDocumentField") + EmbeddedDocumentField = _import_class('EmbeddedDocumentField') GenericEmbeddedDocumentField = _import_class( - "GenericEmbeddedDocumentField") + 'GenericEmbeddedDocumentField') for field, value in fields: if value is not None: @@ -396,27 +384,29 @@ class BaseDocument(object): field._validate(value, clean=clean) else: field._validate(value) - except ValidationError, error: + except ValidationError as error: errors[field.name] = error.errors or error - except (ValueError, AttributeError, AssertionError), error: + except (ValueError, AttributeError, AssertionError) as error: errors[field.name] = error elif field.required and not getattr(field, '_auto_gen', False): errors[field.name] = ValidationError('Field is required', field_name=field.name) if errors: - pk = "None" + pk = 'None' if hasattr(self, 'pk'): pk = self.pk elif self._instance and hasattr(self._instance, 'pk'): pk = self._instance.pk - message = "ValidationError (%s:%s) " % (self._class_name, pk) + message = 'ValidationError (%s:%s) ' % (self._class_name, pk) raise ValidationError(message, errors=errors) def to_json(self, *args, **kwargs): - """Converts a document to JSON. - :param use_db_field: Set to True by default but enables the output of the json structure with the field names - and not the mongodb store db_names in case of set to False + """Convert this document to JSON. + + :param use_db_field: Serialize field names as they appear in + MongoDB (as opposed to attribute names on this document). + Defaults to True. """ use_db_field = kwargs.pop('use_db_field', True) return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs) @@ -427,33 +417,26 @@ class BaseDocument(object): return cls._from_son(json_util.loads(json_data), created=created) def __expand_dynamic_values(self, name, value): - """expand any dynamic values to their correct types / values""" + """Expand any dynamic values to their correct types / values.""" if not isinstance(value, (dict, list, tuple)): return value - EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') - - is_list = False - if not hasattr(value, 'items'): - is_list = True - value = dict([(k, v) for k, v in enumerate(value)]) - - if not is_list and '_cls' in value: + # If the value is a dict with '_cls' in it, turn it into a document + is_dict = isinstance(value, dict) + if is_dict and '_cls' in value: cls = get_document(value['_cls']) return cls(**value) - data = {} - for k, v in value.items(): - key = name if is_list else k - data[k] = self.__expand_dynamic_values(key, v) - - if is_list: # Convert back to a list - data_items = sorted(data.items(), key=operator.itemgetter(0)) - value = [v for k, v in data_items] + if is_dict: + value = { + k: self.__expand_dynamic_values(k, v) + for k, v in value.items() + } else: - value = data + value = [self.__expand_dynamic_values(name, v) for v in value] # Convert lists / values so we can watch for any changes on them + EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') if (isinstance(value, (list, tuple)) and not isinstance(value, BaseList)): if issubclass(type(self), EmbeddedDocumentListField): @@ -466,8 +449,7 @@ class BaseDocument(object): return value def _mark_as_changed(self, key): - """Marks a key as explicitly changed by the user - """ + """Mark a key as explicitly changed by the user.""" if not key: return @@ -492,15 +474,16 @@ class BaseDocument(object): # remove lower level changed fields level = '.'.join(levels[:idx]) + '.' remove = self._changed_fields.remove - for field in self._changed_fields: + for field in self._changed_fields[:]: if field.startswith(level): remove(field) def _clear_changed_fields(self): - """Using get_changed_fields iterate and remove any fields that are - marked as changed""" + """Using _get_changed_fields iterate and remove any fields that + are marked as changed. + """ for changed in self._get_changed_fields(): - parts = changed.split(".") + parts = changed.split('.') data = self for part in parts: if isinstance(data, list): @@ -512,10 +495,13 @@ class BaseDocument(object): data = data.get(part, None) else: data = getattr(data, part, None) - if hasattr(data, "_changed_fields"): - if hasattr(data, "_is_document") and data._is_document: + + if hasattr(data, '_changed_fields'): + if getattr(data, '_is_document', False): continue + data._changed_fields = [] + self._changed_fields = [] def _nestable_types_changed_fields(self, changed_fields, key, data, inspected): @@ -527,26 +513,27 @@ class BaseDocument(object): iterator = data.iteritems() for index, value in iterator: - list_key = "%s%s." % (key, index) + list_key = '%s%s.' % (key, index) # don't check anything lower if this key is already marked # as changed. if list_key[:-1] in changed_fields: continue if hasattr(value, '_get_changed_fields'): changed = value._get_changed_fields(inspected) - changed_fields += ["%s%s" % (list_key, k) + changed_fields += ['%s%s' % (list_key, k) for k in changed if k] elif isinstance(value, (list, tuple, dict)): self._nestable_types_changed_fields( changed_fields, list_key, value, inspected) def _get_changed_fields(self, inspected=None): - """Returns a list of all fields that have explicitly been changed. + """Return a list of all fields that have explicitly been changed. """ - EmbeddedDocument = _import_class("EmbeddedDocument") - DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument") - ReferenceField = _import_class("ReferenceField") - SortedListField = _import_class("SortedListField") + EmbeddedDocument = _import_class('EmbeddedDocument') + DynamicEmbeddedDocument = _import_class('DynamicEmbeddedDocument') + ReferenceField = _import_class('ReferenceField') + SortedListField = _import_class('SortedListField') + changed_fields = [] changed_fields += getattr(self, '_changed_fields', []) @@ -567,11 +554,13 @@ class BaseDocument(object): continue if isinstance(field, ReferenceField): continue - elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) - and db_field_name not in changed_fields): + elif ( + isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) and + db_field_name not in changed_fields + ): # Find all embedded fields that have been changed changed = data._get_changed_fields(inspected) - changed_fields += ["%s%s" % (key, k) for k in changed if k] + changed_fields += ['%s%s' % (key, k) for k in changed if k] elif (isinstance(data, (list, tuple, dict)) and db_field_name not in changed_fields): if (hasattr(field, 'field') and @@ -607,7 +596,9 @@ class BaseDocument(object): for p in parts: if isinstance(d, (ObjectId, DBRef)): break - elif isinstance(d, list) and p.isdigit(): + elif isinstance(d, list) and p.lstrip('-').isdigit(): + if p[0] == '-': + p = str(len(d) + int(p)) try: d = d[int(p)] except IndexError: @@ -641,7 +632,9 @@ class BaseDocument(object): parts = path.split('.') db_field_name = parts.pop() for p in parts: - if isinstance(d, list) and p.isdigit(): + if isinstance(d, list) and p.lstrip('-').isdigit(): + if p[0] == '-': + p = str(len(d) + int(p)) d = d[int(p)] elif (hasattr(d, '__getattribute__') and not isinstance(d, dict)): @@ -671,21 +664,28 @@ class BaseDocument(object): @classmethod def _get_collection_name(cls): - """Returns the collection name for this class. None for abstract class + """Return the collection name for this class. None for abstract + class. """ return cls._meta.get('collection', None) @classmethod def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False): - """Create an instance of a Document (subclass) from a PyMongo SON. + """Create an instance of a Document (subclass) from a PyMongo + SON. """ if not only_fields: only_fields = [] - # get the class name from the document, falling back to the given + if son and not isinstance(son, dict): + raise ValueError("The source SON object needs to be of type 'dict'") + + # Get the class name from the document, falling back to the given # class if unavailable class_name = son.get('_cls', cls._class_name) - data = dict(("%s" % key, value) for key, value in son.iteritems()) + + # Convert SON to a dict, making sure each key is a string + data = {str(key): value for key, value in son.iteritems()} # Return correct subclass for document type if class_name != cls._class_name: @@ -707,27 +707,20 @@ class BaseDocument(object): else field.to_python(value)) if field_name != field.db_field: del data[field.db_field] - except (AttributeError, ValueError), e: + except (AttributeError, ValueError) as e: errors_dict[field_name] = e - elif field.default: - default = field.default - if callable(default): - default = default() - if isinstance(default, BaseDocument): - changed_fields.append(field_name) - elif not only_fields or field_name in only_fields: - changed_fields.append(field_name) if errors_dict: - errors = "\n".join(["%s - %s" % (k, v) + errors = '\n'.join(['%s - %s' % (k, v) for k, v in errors_dict.items()]) - msg = ("Invalid data to create a `%s` instance.\n%s" + msg = ('Invalid data to create a `%s` instance.\n%s' % (cls._class_name, errors)) raise InvalidDocumentError(msg) + # In STRICT documents, remove any keys that aren't in cls._fields if cls.STRICT: - data = dict((k, v) - for k, v in data.iteritems() if k in cls._fields) + data = {k: v for k, v in data.iteritems() if k in cls._fields} + obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data) obj._changed_fields = changed_fields if not _auto_dereference: @@ -737,37 +730,43 @@ class BaseDocument(object): @classmethod def _build_index_specs(cls, meta_indexes): - """Generate and merge the full index specs - """ - + """Generate and merge the full index specs.""" geo_indices = cls._geo_indices() unique_indices = cls._unique_with_indexes() - index_specs = [cls._build_index_spec(spec) - for spec in meta_indexes] + index_specs = [cls._build_index_spec(spec) for spec in meta_indexes] def merge_index_specs(index_specs, indices): + """Helper method for merging index specs.""" if not indices: return index_specs - spec_fields = [v['fields'] - for k, v in enumerate(index_specs)] - # Merge unique_indexes with existing specs - for k, v in enumerate(indices): - if v['fields'] in spec_fields: - index_specs[spec_fields.index(v['fields'])].update(v) + # Create a map of index fields to index spec. We're converting + # the fields from a list to a tuple so that it's hashable. + spec_fields = { + tuple(index['fields']): index for index in index_specs + } + + # For each new index, if there's an existing index with the same + # fields list, update the existing spec with all data from the + # new spec. + for new_index in indices: + candidate = spec_fields.get(tuple(new_index['fields'])) + if candidate is None: + index_specs.append(new_index) else: - index_specs.append(v) + candidate.update(new_index) + return index_specs + # Merge geo indexes and unique_with indexes into the meta index specs. index_specs = merge_index_specs(index_specs, geo_indices) index_specs = merge_index_specs(index_specs, unique_indices) return index_specs @classmethod def _build_index_spec(cls, spec): - """Build a PyMongo index spec from a MongoEngine index spec. - """ - if isinstance(spec, basestring): + """Build a PyMongo index spec from a MongoEngine index spec.""" + if isinstance(spec, six.string_types): spec = {'fields': [spec]} elif isinstance(spec, (list, tuple)): spec = {'fields': list(spec)} @@ -778,14 +777,17 @@ class BaseDocument(object): direction = None # Check to see if we need to include _cls - allow_inheritance = cls._meta.get('allow_inheritance', - ALLOW_INHERITANCE) - include_cls = (allow_inheritance and not spec.get('sparse', False) and - spec.get('cls', True) and '_cls' not in spec['fields']) + allow_inheritance = cls._meta.get('allow_inheritance') + include_cls = ( + allow_inheritance and + not spec.get('sparse', False) and + spec.get('cls', True) and + '_cls' not in spec['fields'] + ) # 733: don't include cls if index_cls is False unless there is an explicit cls with the index include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True)) - if "cls" in spec: + if 'cls' in spec: spec.pop('cls') for key in spec['fields']: # If inherited spec continue @@ -800,19 +802,19 @@ class BaseDocument(object): # GEOHAYSTACK from ) # GEO2D from * direction = pymongo.ASCENDING - if key.startswith("-"): + if key.startswith('-'): direction = pymongo.DESCENDING - elif key.startswith("$"): + elif key.startswith('$'): direction = pymongo.TEXT - elif key.startswith("#"): + elif key.startswith('#'): direction = pymongo.HASHED - elif key.startswith("("): + elif key.startswith('('): direction = pymongo.GEOSPHERE - elif key.startswith(")"): + elif key.startswith(')'): direction = pymongo.GEOHAYSTACK - elif key.startswith("*"): + elif key.startswith('*'): direction = pymongo.GEO2D - if key.startswith(("+", "-", "*", "$", "#", "(", ")")): + if key.startswith(('+', '-', '*', '$', '#', '(', ')')): key = key[1:] # Use real field name, do it manually because we need field @@ -825,7 +827,7 @@ class BaseDocument(object): parts = [] for field in fields: try: - if field != "_id": + if field != '_id': field = field.db_field except AttributeError: pass @@ -840,57 +842,57 @@ class BaseDocument(object): if index_list: spec['fields'] = index_list - if spec.get('sparse', False) and len(spec['fields']) > 1: - raise ValueError( - 'Sparse indexes can only have one field in them. ' - 'See https://jira.mongodb.org/browse/SERVER-2193') return spec @classmethod - def _unique_with_indexes(cls, namespace=""): - """ - Find and set unique indexes - """ + def _unique_with_indexes(cls, namespace=''): + """Find unique indexes in the document schema and return them.""" unique_indexes = [] for field_name, field in cls._fields.items(): sparse = field.sparse + # Generate a list of indexes needed by uniqueness constraints if field.unique: unique_fields = [field.db_field] # Add any unique_with fields to the back of the index spec if field.unique_with: - if isinstance(field.unique_with, basestring): + if isinstance(field.unique_with, six.string_types): field.unique_with = [field.unique_with] # Convert unique_with field names to real field names unique_with = [] for other_name in field.unique_with: parts = other_name.split('.') + # Lookup real name parts = cls._lookup_field(parts) name_parts = [part.db_field for part in parts] unique_with.append('.'.join(name_parts)) + # Unique field should be required parts[-1].required = True sparse = (not sparse and parts[-1].name not in cls.__dict__) + unique_fields += unique_with # Add the new index to the list - fields = [("%s%s" % (namespace, f), pymongo.ASCENDING) - for f in unique_fields] + fields = [ + ('%s%s' % (namespace, f), pymongo.ASCENDING) + for f in unique_fields + ] index = {'fields': fields, 'unique': True, 'sparse': sparse} unique_indexes.append(index) - if field.__class__.__name__ == "ListField": + if field.__class__.__name__ == 'ListField': field = field.field # Grab any embedded document field unique indexes - if (field.__class__.__name__ == "EmbeddedDocumentField" and + if (field.__class__.__name__ == 'EmbeddedDocumentField' and field.document_type != cls): - field_namespace = "%s." % field_name + field_namespace = '%s.' % field_name doc_cls = field.document_type unique_indexes += doc_cls._unique_with_indexes(field_namespace) @@ -902,8 +904,9 @@ class BaseDocument(object): geo_indices = [] inspected.append(cls) - geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField", - "PointField", "LineStringField", "PolygonField"] + geo_field_type_names = ('EmbeddedDocumentField', 'GeoPointField', + 'PointField', 'LineStringField', + 'PolygonField') geo_field_types = tuple([_import_class(field) for field in geo_field_type_names]) @@ -911,32 +914,68 @@ class BaseDocument(object): for field in cls._fields.values(): if not isinstance(field, geo_field_types): continue + if hasattr(field, 'document_type'): field_cls = field.document_type if field_cls in inspected: continue + if hasattr(field_cls, '_geo_indices'): geo_indices += field_cls._geo_indices( inspected, parent_field=field.db_field) elif field._geo_index: field_name = field.db_field if parent_field: - field_name = "%s.%s" % (parent_field, field_name) - geo_indices.append({'fields': - [(field_name, field._geo_index)]}) + field_name = '%s.%s' % (parent_field, field_name) + geo_indices.append({ + 'fields': [(field_name, field._geo_index)] + }) + return geo_indices @classmethod def _lookup_field(cls, parts): - """Lookup a field based on its attribute and return a list containing - the field's parents and the field. - """ + """Given the path to a given field, return a list containing + the Field object associated with that field and all of its parent + Field objects. - ListField = _import_class("ListField") + Args: + parts (str, list, or tuple) - path to the field. Should be a + string for simple fields existing on this document or a list + of strings for a field that exists deeper in embedded documents. + + Returns: + A list of Field instances for fields that were found or + strings for sub-fields that weren't. + + Example: + >>> user._lookup_field('name') + [] + + >>> user._lookup_field('roles') + [] + + >>> user._lookup_field(['roles', 'role']) + [, + ] + + >>> user._lookup_field('doesnt_exist') + raises LookUpError + + >>> user._lookup_field(['roles', 'doesnt_exist']) + [, + 'doesnt_exist'] + + """ + # TODO this method is WAY too complicated. Simplify it. + # TODO don't think returning a string for embedded non-existent fields is desired + + ListField = _import_class('ListField') DynamicField = _import_class('DynamicField') if not isinstance(parts, (list, tuple)): parts = [parts] + fields = [] field = None @@ -946,16 +985,17 @@ class BaseDocument(object): fields.append(field_name) continue + # Look up first field from the document if field is None: - # Look up first field from the document if field_name == 'pk': # Deal with "primary key" alias field_name = cls._meta['id_field'] + if field_name in cls._fields: field = cls._fields[field_name] elif cls._dynamic: field = DynamicField(db_field=field_name) - elif cls._meta.get("allow_inheritance", False) or cls._meta.get("abstract", False): + elif cls._meta.get('allow_inheritance') or cls._meta.get('abstract', False): # 744: in case the field is defined in a subclass for subcls in cls.__subclasses__(): try: @@ -968,35 +1008,55 @@ class BaseDocument(object): else: raise LookUpError('Cannot resolve field "%s"' % field_name) else: - raise LookUpError('Cannot resolve field "%s"' - % field_name) + raise LookUpError('Cannot resolve field "%s"' % field_name) else: ReferenceField = _import_class('ReferenceField') GenericReferenceField = _import_class('GenericReferenceField') + + # If previous field was a reference, throw an error (we + # cannot look up fields that are on references). if isinstance(field, (ReferenceField, GenericReferenceField)): raise LookUpError('Cannot perform join in mongoDB: %s' % '__'.join(parts)) + + # If the parent field has a "field" attribute which has a + # lookup_member method, call it to find the field + # corresponding to this iteration. if hasattr(getattr(field, 'field', None), 'lookup_member'): new_field = field.field.lookup_member(field_name) + + # If the parent field is a DynamicField or if it's part of + # a DynamicDocument, mark current field as a DynamicField + # with db_name equal to the field name. elif cls._dynamic and (isinstance(field, DynamicField) or - getattr(getattr(field, 'document_type'), '_dynamic')): + getattr(getattr(field, 'document_type', None), '_dynamic', None)): new_field = DynamicField(db_field=field_name) + + # Else, try to use the parent field's lookup_member method + # to find the subfield. + elif hasattr(field, 'lookup_member'): + new_field = field.lookup_member(field_name) + + # Raise a LookUpError if all the other conditions failed. else: - # Look up subfield on the previous field or raise - try: - new_field = field.lookup_member(field_name) - except AttributeError: - raise LookUpError('Cannot resolve subfield or operator {} ' - 'on the field {}'.format( - field_name, field.name)) + raise LookUpError( + 'Cannot resolve subfield or operator {} ' + 'on the field {}'.format(field_name, field.name) + ) + + # If current field still wasn't found and the parent field + # is a ComplexBaseField, add the name current field name and + # move on. if not new_field and isinstance(field, ComplexBaseField): fields.append(field_name) continue elif not new_field: - raise LookUpError('Cannot resolve field "%s"' - % field_name) + raise LookUpError('Cannot resolve field "%s"' % field_name) + field = new_field # update field to the new field type + fields.append(field) + return fields @classmethod @@ -1008,19 +1068,18 @@ class BaseDocument(object): return '.'.join(parts) def __set_field_display(self): - """Dynamically set the display value for a field with choices""" - for attr_name, field in self._fields.items(): - if field.choices: - if self._dynamic: - obj = self - else: - obj = type(self) - setattr(obj, - 'get_%s_display' % attr_name, - partial(self.__get_field_display, field=field)) + """For each field that specifies choices, create a + get__display method. + """ + fields_with_choices = [(n, f) for n, f in self._fields.items() + if f.choices] + for attr_name, field in fields_with_choices: + setattr(self, + 'get_%s_display' % attr_name, + partial(self.__get_field_display, field=field)) def __get_field_display(self, field): - """Returns the display value for a choice field""" + """Return the display value for a choice field""" value = getattr(self, field.name) if field.choices and isinstance(field.choices[0], (list, tuple)): return dict(field.choices).get(value, value) diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 304c084d..5658b185 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -4,21 +4,17 @@ import weakref from bson import DBRef, ObjectId, SON import pymongo +import six +from mongoengine.base.common import UPDATE_OPERATORS +from mongoengine.base.datastructures import (BaseDict, BaseList, + EmbeddedDocumentList) from mongoengine.common import _import_class from mongoengine.errors import ValidationError -from mongoengine.base.common import ALLOW_INHERITANCE -from mongoengine.base.datastructures import ( - BaseDict, BaseList, EmbeddedDocumentList -) - -__all__ = ("BaseField", "ComplexBaseField", - "ObjectIdField", "GeoJsonBaseField") -UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push', - 'push_all', 'pull', 'pull_all', 'add_to_set', - 'set_on_insert', 'min', 'max']) +__all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField', + 'GeoJsonBaseField') class BaseField(object): @@ -27,7 +23,6 @@ class BaseField(object): .. versionchanged:: 0.5 - added verbose and help text """ - name = None _geo_index = False _auto_gen = False # Call `generate` to generate a value @@ -41,12 +36,12 @@ class BaseField(object): def __init__(self, db_field=None, name=None, required=False, default=None, unique=False, unique_with=None, primary_key=False, - validation=None, choices=None, verbose_name=None, - help_text=None, null=False, sparse=False, custom_data=None): + validation=None, choices=None, null=False, sparse=False, + **kwargs): """ :param db_field: The database field to store this field in (defaults to the name of the field) - :param name: Depreciated - use db_field + :param name: Deprecated - use db_field :param required: If the field is required. Whether it has to have a value or not. Defaults to False. :param default: (optional) The default value for this field if no value @@ -60,21 +55,20 @@ class BaseField(object): field. Generally this is deprecated in favour of the `FIELD.validate` method :param choices: (optional) The valid choices - :param verbose_name: (optional) The verbose name for the field. - Designed to be human readable and is often used when generating - model forms from the document model. - :param help_text: (optional) The help text for this field and is often - used when generating model forms from the document model. :param null: (optional) Is the field value can be null. If no and there is a default value then the default value is set :param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False` means that uniqueness won't be enforced for `None` values - :param custom_data: (optional) Custom metadata for this field. + :param **kwargs: (optional) Arbitrary indirection-free metadata for + this field can be supplied as additional keyword arguments and + accessed as attributes of the field. Must not conflict with any + existing attributes. Common metadata includes `verbose_name` and + `help_text`. """ self.db_field = (db_field or name) if not primary_key else '_id' if name: - msg = "Fields' 'name' attribute deprecated in favour of 'db_field'" + msg = 'Field\'s "name" attribute deprecated in favour of "db_field"' warnings.warn(msg, DeprecationWarning) self.required = required or primary_key self.default = default @@ -83,12 +77,30 @@ class BaseField(object): self.primary_key = primary_key self.validation = validation self.choices = choices - self.verbose_name = verbose_name - self.help_text = help_text self.null = null self.sparse = sparse self._owner_document = None - self.custom_data = custom_data + + # Validate the db_field + if isinstance(self.db_field, six.string_types) and ( + '.' in self.db_field or + '\0' in self.db_field or + self.db_field.startswith('$') + ): + raise ValueError( + 'field names cannot contain dots (".") or null characters ' + '("\\0"), and they must not start with a dollar sign ("$").' + ) + + # Detect and report conflicts between metadata and base properties. + conflicts = set(dir(self)) & set(kwargs) + if conflicts: + raise TypeError('%s already has attribute(s): %s' % ( + self.__class__.__name__, ', '.join(conflicts))) + + # Assign metadata to the instance + # This efficient method is available because no __slots__ are defined. + self.__dict__.update(kwargs) # Adjust the appropriate creation counter, and save our local copy. if self.db_field == '_id': @@ -127,7 +139,7 @@ class BaseField(object): if (self.name not in instance._data or instance._data[self.name] != value): instance._mark_as_changed(self.name) - except: + except Exception: # Values cant be compared eg: naive and tz datetimes # So mark it as changed instance._mark_as_changed(self.name) @@ -135,34 +147,45 @@ class BaseField(object): EmbeddedDocument = _import_class('EmbeddedDocument') if isinstance(value, EmbeddedDocument): value._instance = weakref.proxy(instance) + elif isinstance(value, (list, tuple)): + for v in value: + if isinstance(v, EmbeddedDocument): + v._instance = weakref.proxy(instance) instance._data[self.name] = value - def error(self, message="", errors=None, field_name=None): - """Raises a ValidationError. - """ + def error(self, message='', errors=None, field_name=None): + """Raise a ValidationError.""" field_name = field_name if field_name else self.name raise ValidationError(message, errors=errors, field_name=field_name) def to_python(self, value): - """Convert a MongoDB-compatible type to a Python type. - """ + """Convert a MongoDB-compatible type to a Python type.""" return value def to_mongo(self, value): - """Convert a Python type to a MongoDB-compatible type. - """ + """Convert a Python type to a MongoDB-compatible type.""" return self.to_python(value) + def _to_mongo_safe_call(self, value, use_db_field=True, fields=None): + """Helper method to call to_mongo with proper inputs.""" + f_inputs = self.to_mongo.__code__.co_varnames + ex_vars = {} + if 'fields' in f_inputs: + ex_vars['fields'] = fields + + if 'use_db_field' in f_inputs: + ex_vars['use_db_field'] = use_db_field + + return self.to_mongo(value, **ex_vars) + def prepare_query_value(self, op, value): - """Prepare a value that is being used in a query for PyMongo. - """ + """Prepare a value that is being used in a query for PyMongo.""" if op in UPDATE_OPERATORS: self.validate(value) return value def validate(self, value, clean=True): - """Perform validation on a value. - """ + """Perform validation on a value.""" pass def _validate_choices(self, value): @@ -170,19 +193,21 @@ class BaseField(object): EmbeddedDocument = _import_class('EmbeddedDocument') choice_list = self.choices - if isinstance(choice_list[0], (list, tuple)): + if isinstance(next(iter(choice_list)), (list, tuple)): + # next(iter) is useful for sets choice_list = [k for k, _ in choice_list] # Choices which are other types of Documents if isinstance(value, (Document, EmbeddedDocument)): if not any(isinstance(value, c) for c in choice_list): self.error( - 'Value must be instance of %s' % unicode(choice_list) + 'Value must be an instance of %s' % ( + six.text_type(choice_list) + ) ) # Choices which are types other than Documents elif value not in choice_list: - self.error('Value must be one of %s' % unicode(choice_list)) - + self.error('Value must be one of %s' % six.text_type(choice_list)) def _validate(self, value, **kwargs): # Check the Choices Constraint @@ -225,8 +250,7 @@ class ComplexBaseField(BaseField): field = None def __get__(self, instance, owner): - """Descriptor to automatically dereference references. - """ + """Descriptor to automatically dereference references.""" if instance is None: # Document class being used rather than a document object return self @@ -238,7 +262,7 @@ class ComplexBaseField(BaseField): (self.field is None or isinstance(self.field, (GenericReferenceField, ReferenceField)))) - _dereference = _import_class("DeReference")() + _dereference = _import_class('DeReference')() self._auto_dereference = instance._fields[self.name]._auto_dereference if instance._initialised and dereference and instance._data.get(self.name): @@ -273,11 +297,8 @@ class ComplexBaseField(BaseField): return value def to_python(self, value): - """Convert a MongoDB-compatible type to a Python type. - """ - Document = _import_class('Document') - - if isinstance(value, basestring): + """Convert a MongoDB-compatible type to a Python type.""" + if isinstance(value, six.string_types): return value if hasattr(value, 'to_python'): @@ -287,15 +308,16 @@ class ComplexBaseField(BaseField): if not hasattr(value, 'items'): try: is_list = True - value = dict([(k, v) for k, v in enumerate(value)]) + value = {k: v for k, v in enumerate(value)} except TypeError: # Not iterable return the value return value if self.field: self.field._auto_dereference = self._auto_dereference - value_dict = dict([(key, self.field.to_python(item)) - for key, item in value.items()]) + value_dict = {key: self.field.to_python(item) + for key, item in value.items()} else: + Document = _import_class('Document') value_dict = {} for k, v in value.items(): if isinstance(v, Document): @@ -315,21 +337,20 @@ class ComplexBaseField(BaseField): key=operator.itemgetter(0))] return value_dict - def to_mongo(self, value): - """Convert a Python type to a MongoDB-compatible type. - """ - Document = _import_class("Document") - EmbeddedDocument = _import_class("EmbeddedDocument") - GenericReferenceField = _import_class("GenericReferenceField") + def to_mongo(self, value, use_db_field=True, fields=None): + """Convert a Python type to a MongoDB-compatible type.""" + Document = _import_class('Document') + EmbeddedDocument = _import_class('EmbeddedDocument') + GenericReferenceField = _import_class('GenericReferenceField') - if isinstance(value, basestring): + if isinstance(value, six.string_types): return value if hasattr(value, 'to_mongo'): if isinstance(value, Document): return GenericReferenceField().to_mongo(value) cls = value.__class__ - val = value.to_mongo() + val = value.to_mongo(use_db_field, fields) # If it's a document that is not inherited add _cls if isinstance(value, EmbeddedDocument): val['_cls'] = cls.__name__ @@ -339,13 +360,15 @@ class ComplexBaseField(BaseField): if not hasattr(value, 'items'): try: is_list = True - value = dict([(k, v) for k, v in enumerate(value)]) + value = {k: v for k, v in enumerate(value)} except TypeError: # Not iterable return the value return value if self.field: - value_dict = dict([(key, self.field.to_mongo(item)) - for key, item in value.iteritems()]) + value_dict = { + key: self.field._to_mongo_safe_call(item, use_db_field, fields) + for key, item in value.iteritems() + } else: value_dict = {} for k, v in value.iteritems(): @@ -359,9 +382,7 @@ class ComplexBaseField(BaseField): # any _cls data so make it a generic reference allows # us to dereference meta = getattr(v, '_meta', {}) - allow_inheritance = ( - meta.get('allow_inheritance', ALLOW_INHERITANCE) - is True) + allow_inheritance = meta.get('allow_inheritance') if not allow_inheritance and not self.field: value_dict[k] = GenericReferenceField().to_mongo(v) else: @@ -369,13 +390,13 @@ class ComplexBaseField(BaseField): value_dict[k] = DBRef(collection, v.pk) elif hasattr(v, 'to_mongo'): cls = v.__class__ - val = v.to_mongo() + val = v.to_mongo(use_db_field, fields) # If it's a document that is not inherited add _cls if isinstance(v, (Document, EmbeddedDocument)): val['_cls'] = cls.__name__ value_dict[k] = val else: - value_dict[k] = self.to_mongo(v) + value_dict[k] = self.to_mongo(v, use_db_field, fields) if is_list: # Convert back to a list return [v for _, v in sorted(value_dict.items(), @@ -383,8 +404,7 @@ class ComplexBaseField(BaseField): return value_dict def validate(self, value): - """If field is provided ensure the value is valid. - """ + """If field is provided ensure the value is valid.""" errors = {} if self.field: if hasattr(value, 'iteritems') or hasattr(value, 'items'): @@ -394,9 +414,9 @@ class ComplexBaseField(BaseField): for k, v in sequence: try: self.field._validate(v) - except ValidationError, error: + except ValidationError as error: errors[k] = error.errors or error - except (ValueError, AssertionError), error: + except (ValueError, AssertionError) as error: errors[k] = error if errors: @@ -422,24 +442,23 @@ class ComplexBaseField(BaseField): class ObjectIdField(BaseField): - """A field wrapper around MongoDB's ObjectIds. - """ + """A field wrapper around MongoDB's ObjectIds.""" def to_python(self, value): try: if not isinstance(value, ObjectId): value = ObjectId(value) - except: + except Exception: pass return value def to_mongo(self, value): if not isinstance(value, ObjectId): try: - return ObjectId(unicode(value)) - except Exception, e: + return ObjectId(six.text_type(value)) + except Exception as e: # e.message attribute has been deprecated since Python 2.6 - self.error(unicode(e)) + self.error(six.text_type(e)) return value def prepare_query_value(self, op, value): @@ -447,8 +466,8 @@ class ObjectIdField(BaseField): def validate(self, value): try: - ObjectId(unicode(value)) - except: + ObjectId(six.text_type(value)) + except Exception: self.error('Invalid Object ID') @@ -459,21 +478,20 @@ class GeoJsonBaseField(BaseField): """ _geo_index = pymongo.GEOSPHERE - _type = "GeoBase" + _type = 'GeoBase' def __init__(self, auto_index=True, *args, **kwargs): """ - :param bool auto_index: Automatically create a "2dsphere" index.\ + :param bool auto_index: Automatically create a '2dsphere' index.\ Defaults to `True`. """ - self._name = "%sField" % self._type + self._name = '%sField' % self._type if not auto_index: self._geo_index = False super(GeoJsonBaseField, self).__init__(*args, **kwargs) def validate(self, value): - """Validate the GeoJson object based on its type - """ + """Validate the GeoJson object based on its type.""" if isinstance(value, dict): if set(value.keys()) == set(['type', 'coordinates']): if value['type'] != self._type: @@ -488,7 +506,7 @@ class GeoJsonBaseField(BaseField): self.error('%s can only accept lists of [x, y]' % self._name) return - validate = getattr(self, "_validate_%s" % self._type.lower()) + validate = getattr(self, '_validate_%s' % self._type.lower()) error = validate(value) if error: self.error(error) @@ -500,8 +518,8 @@ class GeoJsonBaseField(BaseField): # Quick and dirty validator try: value[0][0][0] - except: - return "Invalid Polygon must contain at least one valid linestring" + except (TypeError, IndexError): + return 'Invalid Polygon must contain at least one valid linestring' errors = [] for val in value: @@ -512,20 +530,20 @@ class GeoJsonBaseField(BaseField): errors.append(error) if errors: if top_level: - return "Invalid Polygon:\n%s" % ", ".join(errors) + return 'Invalid Polygon:\n%s' % ', '.join(errors) else: - return "%s" % ", ".join(errors) + return '%s' % ', '.join(errors) def _validate_linestring(self, value, top_level=True): - """Validates a linestring""" + """Validate a linestring.""" if not isinstance(value, (list, tuple)): return 'LineStrings must contain list of coordinate pairs' # Quick and dirty validator try: value[0][0] - except: - return "Invalid LineString must contain at least one valid point" + except (TypeError, IndexError): + return 'Invalid LineString must contain at least one valid point' errors = [] for val in value: @@ -534,19 +552,19 @@ class GeoJsonBaseField(BaseField): errors.append(error) if errors: if top_level: - return "Invalid LineString:\n%s" % ", ".join(errors) + return 'Invalid LineString:\n%s' % ', '.join(errors) else: - return "%s" % ", ".join(errors) + return '%s' % ', '.join(errors) def _validate_point(self, value): """Validate each set of coords""" if not isinstance(value, (list, tuple)): return 'Points must be a list of coordinate pairs' elif not len(value) == 2: - return "Value (%s) must be a two-dimensional point" % repr(value) + return 'Value (%s) must be a two-dimensional point' % repr(value) elif (not isinstance(value[0], (float, int)) or not isinstance(value[1], (float, int))): - return "Both values (%s) in point must be float or int" % repr(value) + return 'Both values (%s) in point must be float or int' % repr(value) def _validate_multipoint(self, value): if not isinstance(value, (list, tuple)): @@ -555,8 +573,8 @@ class GeoJsonBaseField(BaseField): # Quick and dirty validator try: value[0][0] - except: - return "Invalid MultiPoint must contain at least one valid point" + except (TypeError, IndexError): + return 'Invalid MultiPoint must contain at least one valid point' errors = [] for point in value: @@ -565,7 +583,7 @@ class GeoJsonBaseField(BaseField): errors.append(error) if errors: - return "%s" % ", ".join(errors) + return '%s' % ', '.join(errors) def _validate_multilinestring(self, value, top_level=True): if not isinstance(value, (list, tuple)): @@ -574,8 +592,8 @@ class GeoJsonBaseField(BaseField): # Quick and dirty validator try: value[0][0][0] - except: - return "Invalid MultiLineString must contain at least one valid linestring" + except (TypeError, IndexError): + return 'Invalid MultiLineString must contain at least one valid linestring' errors = [] for linestring in value: @@ -585,9 +603,9 @@ class GeoJsonBaseField(BaseField): if errors: if top_level: - return "Invalid MultiLineString:\n%s" % ", ".join(errors) + return 'Invalid MultiLineString:\n%s' % ', '.join(errors) else: - return "%s" % ", ".join(errors) + return '%s' % ', '.join(errors) def _validate_multipolygon(self, value): if not isinstance(value, (list, tuple)): @@ -596,8 +614,8 @@ class GeoJsonBaseField(BaseField): # Quick and dirty validator try: value[0][0][0][0] - except: - return "Invalid MultiPolygon must contain at least one valid Polygon" + except (TypeError, IndexError): + return 'Invalid MultiPolygon must contain at least one valid Polygon' errors = [] for polygon in value: @@ -606,9 +624,9 @@ class GeoJsonBaseField(BaseField): errors.append(error) if errors: - return "Invalid MultiPolygon:\n%s" % ", ".join(errors) + return 'Invalid MultiPolygon:\n%s' % ', '.join(errors) def to_mongo(self, value): if isinstance(value, dict): return value - return SON([("type", self._type), ("coordinates", value)]) + return SON([('type', self._type), ('coordinates', value)]) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index b5981aae..481408bf 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -1,22 +1,23 @@ import warnings +import six + +from mongoengine.base.common import _document_registry +from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField from mongoengine.common import _import_class from mongoengine.errors import InvalidDocumentError -from mongoengine.python_support import PY3 from mongoengine.queryset import (DO_NOTHING, DoesNotExist, MultipleObjectsReturned, QuerySetManager) -from mongoengine.base.common import _document_registry, ALLOW_INHERITANCE -from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField __all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass') class DocumentMetaclass(type): - """Metaclass for all documents. - """ + """Metaclass for all documents.""" + # TODO lower complexity of this method def __new__(cls, name, bases, attrs): flattened_bases = cls._get_bases(bases) super_new = super(DocumentMetaclass, cls).__new__ @@ -45,7 +46,8 @@ class DocumentMetaclass(type): attrs['_meta'] = meta attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract - if attrs['_meta'].get('allow_inheritance', ALLOW_INHERITANCE): + # If allow_inheritance is True, add a "_cls" string field to the attrs + if attrs['_meta'].get('allow_inheritance'): StringField = _import_class('StringField') attrs['_cls'] = StringField() @@ -87,16 +89,17 @@ class DocumentMetaclass(type): # Ensure no duplicate db_fields duplicate_db_fields = [k for k, v in field_names.items() if v > 1] if duplicate_db_fields: - msg = ("Multiple db_fields defined for: %s " % - ", ".join(duplicate_db_fields)) + msg = ('Multiple db_fields defined for: %s ' % + ', '.join(duplicate_db_fields)) raise InvalidDocumentError(msg) # Set _fields and db_field maps attrs['_fields'] = doc_fields - attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k)) - for k, v in doc_fields.iteritems()]) - attrs['_reverse_db_field_map'] = dict( - (v, k) for k, v in attrs['_db_field_map'].iteritems()) + attrs['_db_field_map'] = {k: getattr(v, 'db_field', k) + for k, v in doc_fields.items()} + attrs['_reverse_db_field_map'] = { + v: k for k, v in attrs['_db_field_map'].items() + } attrs['_fields_ordered'] = tuple(i[1] for i in sorted( (v.creation_counter, v.name) @@ -116,10 +119,8 @@ class DocumentMetaclass(type): if hasattr(base, '_meta'): # Warn if allow_inheritance isn't set and prevent # inheritance of classes where inheritance is set to False - allow_inheritance = base._meta.get('allow_inheritance', - ALLOW_INHERITANCE) - if (allow_inheritance is not True and - not base._meta.get('abstract')): + allow_inheritance = base._meta.get('allow_inheritance') + if not allow_inheritance and not base._meta.get('abstract'): raise ValueError('Document %s may not be subclassed' % base.__name__) @@ -161,8 +162,8 @@ class DocumentMetaclass(type): # module continues to use im_func and im_self, so the code below # copies __func__ into im_func and __self__ into im_self for # classmethod objects in Document derived classes. - if PY3: - for key, val in new_class.__dict__.items(): + if six.PY3: + for val in new_class.__dict__.values(): if isinstance(val, classmethod): f = val.__get__(new_class) if hasattr(f, '__func__') and not hasattr(f, 'im_func'): @@ -179,11 +180,11 @@ class DocumentMetaclass(type): if isinstance(f, CachedReferenceField): if issubclass(new_class, EmbeddedDocument): - raise InvalidDocumentError( - "CachedReferenceFields is not allowed in EmbeddedDocuments") + raise InvalidDocumentError('CachedReferenceFields is not ' + 'allowed in EmbeddedDocuments') if not f.document_type: raise InvalidDocumentError( - "Document is not available to sync") + 'Document is not available to sync') if f.auto_sync: f.start_listener() @@ -195,8 +196,8 @@ class DocumentMetaclass(type): 'reverse_delete_rule', DO_NOTHING) if isinstance(f, DictField) and delete_rule != DO_NOTHING: - msg = ("Reverse delete rules are not supported " - "for %s (field: %s)" % + msg = ('Reverse delete rules are not supported ' + 'for %s (field: %s)' % (field.__class__.__name__, field.name)) raise InvalidDocumentError(msg) @@ -204,16 +205,16 @@ class DocumentMetaclass(type): if delete_rule != DO_NOTHING: if issubclass(new_class, EmbeddedDocument): - msg = ("Reverse delete rules are not supported for " - "EmbeddedDocuments (field: %s)" % field.name) + msg = ('Reverse delete rules are not supported for ' + 'EmbeddedDocuments (field: %s)' % field.name) raise InvalidDocumentError(msg) f.document_type.register_delete_rule(new_class, field.name, delete_rule) if (field.name and hasattr(Document, field.name) and EmbeddedDocument not in new_class.mro()): - msg = ("%s is a document method and not a valid " - "field name" % field.name) + msg = ('%s is a document method and not a valid ' + 'field name' % field.name) raise InvalidDocumentError(msg) return new_class @@ -271,6 +272,11 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): 'index_drop_dups': False, 'index_opts': None, 'delete_rules': None, + + # allow_inheritance can be True, False, and None. True means + # "allow inheritance", False means "don't allow inheritance", + # None means "do whatever your parent does, or don't allow + # inheritance if you're a top-level class". 'allow_inheritance': None, } attrs['_is_base_cls'] = True @@ -303,7 +309,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): # If parent wasn't an abstract class if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and not parent_doc_cls._meta.get('abstract', True)): - msg = "Trying to set a collection on a subclass (%s)" % name + msg = 'Trying to set a collection on a subclass (%s)' % name warnings.warn(msg, SyntaxWarning) del attrs['_meta']['collection'] @@ -311,7 +317,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'): if (parent_doc_cls and not parent_doc_cls._meta.get('abstract', False)): - msg = "Abstract document cannot have non-abstract base" + msg = 'Abstract document cannot have non-abstract base' raise ValueError(msg) return super_new(cls, name, bases, attrs) @@ -334,12 +340,16 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): meta.merge(attrs.get('_meta', {})) # Top level meta - # Only simple classes (direct subclasses of Document) - # may set allow_inheritance to False + # Only simple classes (i.e. direct subclasses of Document) may set + # allow_inheritance to False. If the base Document allows inheritance, + # none of its subclasses can override allow_inheritance to False. simple_class = all([b._meta.get('abstract') for b in flattened_bases if hasattr(b, '_meta')]) - if (not simple_class and meta['allow_inheritance'] is False and - not meta['abstract']): + if ( + not simple_class and + meta['allow_inheritance'] is False and + not meta['abstract'] + ): raise ValueError('Only direct subclasses of Document may set ' '"allow_inheritance" to False') diff --git a/mongoengine/common.py b/mongoengine/common.py index 3e63e98e..bde7e78c 100644 --- a/mongoengine/common.py +++ b/mongoengine/common.py @@ -34,7 +34,10 @@ def _import_class(cls_name): queryset_classes = ('OperationError',) deref_classes = ('DeReference',) - if cls_name in doc_classes: + if cls_name == 'BaseDocument': + from mongoengine.base import document as module + import_classes = ['BaseDocument'] + elif cls_name in doc_classes: from mongoengine import document as module import_classes = doc_classes elif cls_name in field_classes: diff --git a/mongoengine/connection.py b/mongoengine/connection.py index cb1a731f..7eae810f 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -1,11 +1,14 @@ from pymongo import MongoClient, ReadPreference, uri_parser +import six + from mongoengine.python_support import IS_PYMONGO_3 -__all__ = ['ConnectionError', 'connect', 'register_connection', +__all__ = ['MongoEngineConnectionError', 'connect', 'register_connection', 'DEFAULT_CONNECTION_NAME'] DEFAULT_CONNECTION_NAME = 'default' + if IS_PYMONGO_3: READ_PREFERENCE = ReadPreference.PRIMARY else: @@ -13,7 +16,10 @@ else: READ_PREFERENCE = False -class ConnectionError(Exception): +class MongoEngineConnectionError(Exception): + """Error raised when the database connection can't be established or + when a connection with a requested alias can't be retrieved. + """ pass @@ -24,7 +30,9 @@ _dbs = {} def register_connection(alias, name=None, host=None, port=None, read_preference=READ_PREFERENCE, - username=None, password=None, authentication_source=None, + username=None, password=None, + authentication_source=None, + authentication_mechanism=None, **kwargs): """Add a connection. @@ -38,11 +46,17 @@ def register_connection(alias, name=None, host=None, port=None, :param username: username to authenticate with :param password: password to authenticate with :param authentication_source: database to authenticate against - :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver + :param authentication_mechanism: database authentication mechanisms. + By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, + MONGODB-CR (MongoDB Challenge Response protocol) for older servers. + :param is_mock: explicitly use mongomock for this connection + (can also be done by using `mongomock://` as db host prefix) + :param kwargs: ad-hoc parameters to be passed into the pymongo driver, + for example maxpoolsize, tz_aware, etc. See the documentation + for pymongo's `MongoClient` for a full list. + .. versionchanged:: 0.10.6 - added mongomock support """ - global _connection_settings - conn_settings = { 'name': name or 'test', 'host': host or 'localhost', @@ -50,23 +64,48 @@ def register_connection(alias, name=None, host=None, port=None, 'read_preference': read_preference, 'username': username, 'password': password, - 'authentication_source': authentication_source + 'authentication_source': authentication_source, + 'authentication_mechanism': authentication_mechanism } - # Handle uri style connections - if "://" in conn_settings['host']: - uri_dict = uri_parser.parse_uri(conn_settings['host']) - conn_settings.update({ - 'name': uri_dict.get('database') or name, - 'username': uri_dict.get('username'), - 'password': uri_dict.get('password'), - 'read_preference': read_preference, - }) - uri_options = uri_dict['options'] - if 'replicaset' in uri_options: - conn_settings['replicaSet'] = True - if 'authsource' in uri_options: - conn_settings['authentication_source'] = uri_options['authsource'] + conn_host = conn_settings['host'] + + # Host can be a list or a string, so if string, force to a list. + if isinstance(conn_host, six.string_types): + conn_host = [conn_host] + + resolved_hosts = [] + for entity in conn_host: + + # Handle Mongomock + if entity.startswith('mongomock://'): + conn_settings['is_mock'] = True + # `mongomock://` is not a valid url prefix and must be replaced by `mongodb://` + resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1)) + + # Handle URI style connections, only updating connection params which + # were explicitly specified in the URI. + elif '://' in entity: + uri_dict = uri_parser.parse_uri(entity) + resolved_hosts.append(entity) + + if uri_dict.get('database'): + conn_settings['name'] = uri_dict.get('database') + + for param in ('read_preference', 'username', 'password'): + if uri_dict.get(param): + conn_settings[param] = uri_dict[param] + + uri_options = uri_dict['options'] + if 'replicaset' in uri_options: + conn_settings['replicaSet'] = uri_options['replicaset'] + if 'authsource' in uri_options: + conn_settings['authentication_source'] = uri_options['authsource'] + if 'authmechanism' in uri_options: + conn_settings['authentication_mechanism'] = uri_options['authmechanism'] + else: + resolved_hosts.append(entity) + conn_settings['host'] = resolved_hosts # Deprecated parameters that should not be passed on kwargs.pop('slaves', None) @@ -77,67 +116,108 @@ def register_connection(alias, name=None, host=None, port=None, def disconnect(alias=DEFAULT_CONNECTION_NAME): - global _connections - global _dbs - + """Close the connection with a given alias.""" if alias in _connections: - get_connection(alias=alias).disconnect() + get_connection(alias=alias).close() del _connections[alias] if alias in _dbs: del _dbs[alias] def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): - global _connections + """Return a connection with a given alias.""" + # Connect to the database if not already connected if reconnect: disconnect(alias) - if alias not in _connections: - if alias not in _connection_settings: + # If the requested alias already exists in the _connections list, return + # it immediately. + if alias in _connections: + return _connections[alias] + + # Validate that the requested alias exists in the _connection_settings. + # Raise MongoEngineConnectionError if it doesn't. + if alias not in _connection_settings: + if alias == DEFAULT_CONNECTION_NAME: + msg = 'You have not defined a default connection' + else: msg = 'Connection with alias "%s" has not been defined' % alias - if alias == DEFAULT_CONNECTION_NAME: - msg = 'You have not defined a default connection' - raise ConnectionError(msg) - conn_settings = _connection_settings[alias].copy() + raise MongoEngineConnectionError(msg) - conn_settings.pop('name', None) - conn_settings.pop('username', None) - conn_settings.pop('password', None) - conn_settings.pop('authentication_source', None) + def _clean_settings(settings_dict): + irrelevant_fields = set([ + 'name', 'username', 'password', 'authentication_source', + 'authentication_mechanism' + ]) + return { + k: v for k, v in settings_dict.items() + if k not in irrelevant_fields + } + # Retrieve a copy of the connection settings associated with the requested + # alias and remove the database name and authentication info (we don't + # care about them at this point). + conn_settings = _clean_settings(_connection_settings[alias].copy()) + + # Determine if we should use PyMongo's or mongomock's MongoClient. + is_mock = conn_settings.pop('is_mock', False) + if is_mock: + try: + import mongomock + except ImportError: + raise RuntimeError('You need mongomock installed to mock ' + 'MongoEngine.') + connection_class = mongomock.MongoClient + else: connection_class = MongoClient - if 'replicaSet' in conn_settings: + + # For replica set connections with PyMongo 2.x, use + # MongoReplicaSetClient. + # TODO remove this once we stop supporting PyMongo 2.x. + if 'replicaSet' in conn_settings and not IS_PYMONGO_3: + connection_class = MongoReplicaSetClient conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) + + # hosts_or_uri has to be a string, so if 'host' was provided + # as a list, join its parts and separate them by ',' + if isinstance(conn_settings['hosts_or_uri'], list): + conn_settings['hosts_or_uri'] = ','.join( + conn_settings['hosts_or_uri']) + # Discard port since it can't be used on MongoReplicaSetClient conn_settings.pop('port', None) - # Discard replicaSet if not base string - if not isinstance(conn_settings['replicaSet'], basestring): - conn_settings.pop('replicaSet', None) - if not IS_PYMONGO_3: - connection_class = MongoReplicaSetClient + # Iterate over all of the connection settings and if a connection with + # the same parameters is already established, use it instead of creating + # a new one. + existing_connection = None + connection_settings_iterator = ( + (db_alias, settings.copy()) + for db_alias, settings in _connection_settings.items() + ) + for db_alias, connection_settings in connection_settings_iterator: + connection_settings = _clean_settings(connection_settings) + if conn_settings == connection_settings and _connections.get(db_alias): + existing_connection = _connections[db_alias] + break + + # If an existing connection was found, assign it to the new alias + if existing_connection: + _connections[alias] = existing_connection + else: + # Otherwise, create the new connection for this alias. Raise + # MongoEngineConnectionError if it can't be established. try: - connection = None - # check for shared connections - connection_settings_iterator = ( - (db_alias, settings.copy()) for db_alias, settings in _connection_settings.iteritems()) - for db_alias, connection_settings in connection_settings_iterator: - connection_settings.pop('name', None) - connection_settings.pop('username', None) - connection_settings.pop('password', None) - if conn_settings == connection_settings and _connections.get(db_alias, None): - connection = _connections[db_alias] - break + _connections[alias] = connection_class(**conn_settings) + except Exception as e: + raise MongoEngineConnectionError( + 'Cannot connect to database %s :\n%s' % (alias, e)) - _connections[alias] = connection if connection else connection_class(**conn_settings) - except Exception, e: - raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e)) return _connections[alias] def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): - global _dbs if reconnect: disconnect(alias) @@ -145,11 +225,13 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): conn = get_connection(alias) conn_settings = _connection_settings[alias] db = conn[conn_settings['name']] + auth_kwargs = {'source': conn_settings['authentication_source']} + if conn_settings['authentication_mechanism'] is not None: + auth_kwargs['mechanism'] = conn_settings['authentication_mechanism'] # Authenticate if necessary - if conn_settings['username'] and conn_settings['password']: - db.authenticate(conn_settings['username'], - conn_settings['password'], - source=conn_settings['authentication_source']) + if conn_settings['username'] and (conn_settings['password'] or + conn_settings['authentication_mechanism'] == 'MONGODB-X509'): + db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs) _dbs[alias] = db return _dbs[alias] @@ -161,12 +243,14 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): running on the default port on localhost. If authentication is needed, provide username and password arguments as well. - Multiple databases are supported by using aliases. Provide a separate + Multiple databases are supported by using aliases. Provide a separate `alias` to connect to a different instance of :program:`mongod`. + See the docstring for `register_connection` for more details about all + supported kwargs. + .. versionchanged:: 0.6 - added multiple database support. """ - global _connections if alias not in _connections: register_connection(alias, db, **kwargs) diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index cc860066..c477575e 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -2,12 +2,12 @@ from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db -__all__ = ("switch_db", "switch_collection", "no_dereference", - "no_sub_classes", "query_counter") +__all__ = ('switch_db', 'switch_collection', 'no_dereference', + 'no_sub_classes', 'query_counter') class switch_db(object): - """ switch_db alias context manager. + """switch_db alias context manager. Example :: @@ -18,15 +18,14 @@ class switch_db(object): class Group(Document): name = StringField() - Group(name="test").save() # Saves in the default db + Group(name='test').save() # Saves in the default db with switch_db(Group, 'testdb-1') as Group: - Group(name="hello testdb!").save() # Saves in testdb-1 - + Group(name='hello testdb!').save() # Saves in testdb-1 """ def __init__(self, cls, db_alias): - """ Construct the switch_db context manager + """Construct the switch_db context manager :param cls: the class to change the registered db :param db_alias: the name of the specific database to use @@ -34,37 +33,36 @@ class switch_db(object): self.cls = cls self.collection = cls._get_collection() self.db_alias = db_alias - self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) + self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME) def __enter__(self): - """ change the db_alias and clear the cached collection """ - self.cls._meta["db_alias"] = self.db_alias + """Change the db_alias and clear the cached collection.""" + self.cls._meta['db_alias'] = self.db_alias self.cls._collection = None return self.cls def __exit__(self, t, value, traceback): - """ Reset the db_alias and collection """ - self.cls._meta["db_alias"] = self.ori_db_alias + """Reset the db_alias and collection.""" + self.cls._meta['db_alias'] = self.ori_db_alias self.cls._collection = self.collection class switch_collection(object): - """ switch_collection alias context manager. + """switch_collection alias context manager. Example :: class Group(Document): name = StringField() - Group(name="test").save() # Saves in the default db + Group(name='test').save() # Saves in the default db with switch_collection(Group, 'group1') as Group: - Group(name="hello testdb!").save() # Saves in group1 collection - + Group(name='hello testdb!').save() # Saves in group1 collection """ def __init__(self, cls, collection_name): - """ Construct the switch_collection context manager + """Construct the switch_collection context manager. :param cls: the class to change the registered db :param collection_name: the name of the collection to use @@ -75,7 +73,7 @@ class switch_collection(object): self.collection_name = collection_name def __enter__(self): - """ change the _get_collection_name and clear the cached collection """ + """Change the _get_collection_name and clear the cached collection.""" @classmethod def _get_collection_name(cls): @@ -86,24 +84,23 @@ class switch_collection(object): return self.cls def __exit__(self, t, value, traceback): - """ Reset the collection """ + """Reset the collection.""" self.cls._collection = self.ori_collection self.cls._get_collection_name = self.ori_get_collection_name class no_dereference(object): - """ no_dereference context manager. + """no_dereference context manager. Turns off all dereferencing in Documents for the duration of the context manager:: with no_dereference(Group) as Group: Group.objects.find() - """ def __init__(self, cls): - """ Construct the no_dereference context manager. + """Construct the no_dereference context manager. :param cls: the class to turn dereferencing off on """ @@ -119,103 +116,102 @@ class no_dereference(object): ComplexBaseField))] def __enter__(self): - """ change the objects default and _auto_dereference values""" + """Change the objects default and _auto_dereference values.""" for field in self.deref_fields: self.cls._fields[field]._auto_dereference = False return self.cls def __exit__(self, t, value, traceback): - """ Reset the default and _auto_dereference values""" + """Reset the default and _auto_dereference values.""" for field in self.deref_fields: self.cls._fields[field]._auto_dereference = True return self.cls class no_sub_classes(object): - """ no_sub_classes context manager. + """no_sub_classes context manager. Only returns instances of this class and no sub (inherited) classes:: with no_sub_classes(Group) as Group: Group.objects.find() - """ def __init__(self, cls): - """ Construct the no_sub_classes context manager. + """Construct the no_sub_classes context manager. :param cls: the class to turn querying sub classes on """ self.cls = cls def __enter__(self): - """ change the objects default and _auto_dereference values""" + """Change the objects default and _auto_dereference values.""" self.cls._all_subclasses = self.cls._subclasses self.cls._subclasses = (self.cls,) return self.cls def __exit__(self, t, value, traceback): - """ Reset the default and _auto_dereference values""" + """Reset the default and _auto_dereference values.""" self.cls._subclasses = self.cls._all_subclasses delattr(self.cls, '_all_subclasses') return self.cls class query_counter(object): - """ Query_counter context manager to get the number of queries. """ + """Query_counter context manager to get the number of queries.""" def __init__(self): - """ Construct the query_counter. """ + """Construct the query_counter.""" self.counter = 0 self.db = get_db() def __enter__(self): - """ On every with block we need to drop the profile collection. """ + """On every with block we need to drop the profile collection.""" self.db.set_profiling_level(0) self.db.system.profile.drop() self.db.set_profiling_level(2) return self def __exit__(self, t, value, traceback): - """ Reset the profiling level. """ + """Reset the profiling level.""" self.db.set_profiling_level(0) def __eq__(self, value): - """ == Compare querycounter. """ + """== Compare querycounter.""" counter = self._get_count() return value == counter def __ne__(self, value): - """ != Compare querycounter. """ + """!= Compare querycounter.""" return not self.__eq__(value) def __lt__(self, value): - """ < Compare querycounter. """ + """< Compare querycounter.""" return self._get_count() < value def __le__(self, value): - """ <= Compare querycounter. """ + """<= Compare querycounter.""" return self._get_count() <= value def __gt__(self, value): - """ > Compare querycounter. """ + """> Compare querycounter.""" return self._get_count() > value def __ge__(self, value): - """ >= Compare querycounter. """ + """>= Compare querycounter.""" return self._get_count() >= value def __int__(self): - """ int representation. """ + """int representation.""" return self._get_count() def __repr__(self): - """ repr query_counter as the number of queries. """ + """repr query_counter as the number of queries.""" return u"%s" % self._get_count() def _get_count(self): - """ Get the number of queries. """ - ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}} + """Get the number of queries.""" + ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}} count = self.db.system.profile.find(ignore_query).count() - self.counter self.counter += 1 return count diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 7fcc2ad2..59204d4d 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -1,13 +1,12 @@ from bson import DBRef, SON +import six -from base import ( - BaseDict, BaseList, EmbeddedDocumentList, - TopLevelDocumentMetaclass, get_document -) -from fields import (ReferenceField, ListField, DictField, MapField) -from connection import get_db -from queryset import QuerySet -from document import Document, EmbeddedDocument +from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, + TopLevelDocumentMetaclass, get_document) +from mongoengine.connection import get_db +from mongoengine.document import Document, EmbeddedDocument +from mongoengine.fields import DictField, ListField, MapField, ReferenceField +from mongoengine.queryset import QuerySet class DeReference(object): @@ -24,7 +23,7 @@ class DeReference(object): :class:`~mongoengine.base.ComplexBaseField` :param get: A boolean determining if being called by __get__ """ - if items is None or isinstance(items, basestring): + if items is None or isinstance(items, six.string_types): return items # cheapest way to convert a queryset to a list @@ -67,11 +66,11 @@ class DeReference(object): items = _get_items(items) else: - items = dict([ - (k, field.to_python(v)) - if not isinstance(v, (DBRef, Document)) else (k, v) - for k, v in items.iteritems()] - ) + items = { + k: (v if isinstance(v, (DBRef, Document)) + else field.to_python(v)) + for k, v in items.iteritems() + } self.reference_map = self._find_references(items) self.object_map = self._fetch_objects(doc_type=doc_type) @@ -89,14 +88,14 @@ class DeReference(object): return reference_map # Determine the iterator to use - if not hasattr(items, 'items'): - iterator = enumerate(items) + if isinstance(items, dict): + iterator = items.values() else: - iterator = items.iteritems() + iterator = items # Recursively find dbreferences depth += 1 - for k, item in iterator: + for item in iterator: if isinstance(item, (Document, EmbeddedDocument)): for field_name, field in item._fields.iteritems(): v = item._data.get(field_name, None) @@ -150,7 +149,7 @@ class DeReference(object): references = get_db()[collection].find({'_id': {'$in': refs}}) for ref in references: if '_cls' in ref: - doc = get_document(ref["_cls"])._from_son(ref) + doc = get_document(ref['_cls'])._from_son(ref) elif doc_type is None: doc = get_document( ''.join(x.capitalize() @@ -217,7 +216,7 @@ class DeReference(object): if k in self.object_map and not is_list: data[k] = self.object_map[k] elif isinstance(v, (Document, EmbeddedDocument)): - for field_name, field in v._fields.iteritems(): + for field_name in v._fields: v = data[k]._data.get(field_name, None) if isinstance(v, DBRef): data[k]._data[field_name] = self.object_map.get( @@ -226,7 +225,7 @@ class DeReference(object): data[k]._data[field_name] = self.object_map.get( (v['_ref'].collection, v['_ref'].id), v) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - item_name = "{0}.{1}.{2}".format(name, k, field_name) + item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name) data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: item_name = '%s.%s' % (name, k) if name else name diff --git a/mongoengine/document.py b/mongoengine/document.py index c5498750..b79e5e97 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -1,27 +1,23 @@ -import warnings -import pymongo import re +import warnings -from pymongo.read_preferences import ReadPreference from bson.dbref import DBRef +import pymongo +from pymongo.read_preferences import ReadPreference +import six + from mongoengine import signals +from mongoengine.base import (BaseDict, BaseDocument, BaseList, + DocumentMetaclass, EmbeddedDocumentList, + TopLevelDocumentMetaclass, get_document) from mongoengine.common import _import_class -from mongoengine.base import ( - DocumentMetaclass, - TopLevelDocumentMetaclass, - BaseDocument, - BaseDict, - BaseList, - EmbeddedDocumentList, - ALLOW_INHERITANCE, - get_document -) -from mongoengine.errors import InvalidQueryError, InvalidDocumentError +from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db +from mongoengine.context_managers import switch_collection, switch_db +from mongoengine.errors import (InvalidDocumentError, InvalidQueryError, + SaveConditionError) from mongoengine.python_support import IS_PYMONGO_3 -from mongoengine.queryset import (OperationError, NotUniqueError, +from mongoengine.queryset import (NotUniqueError, OperationError, QuerySet, transform) -from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME -from mongoengine.context_managers import switch_db, switch_collection __all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument', 'DynamicEmbeddedDocument', 'OperationError', @@ -29,12 +25,10 @@ __all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument', def includes_cls(fields): - """ Helper function used for ensuring and comparing indexes - """ - + """Helper function used for ensuring and comparing indexes.""" first_field = None if len(fields): - if isinstance(fields[0], basestring): + if isinstance(fields[0], six.string_types): first_field = fields[0] elif isinstance(fields[0], (list, tuple)) and len(fields[0]): first_field = fields[0][0] @@ -55,9 +49,8 @@ class EmbeddedDocument(BaseDocument): to create a specialised version of the embedded document that will be stored in the same collection. To facilitate this behaviour a `_cls` field is added to documents (hidden though the MongoEngine interface). - To disable this behaviour and remove the dependence on the presence of - `_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` - dictionary. + To enable this behaviour set :attr:`allow_inheritance` to ``True`` in the + :attr:`meta` dictionary. """ __slots__ = ('_instance', ) @@ -80,6 +73,15 @@ class EmbeddedDocument(BaseDocument): def __ne__(self, other): return not self.__eq__(other) + def to_mongo(self, *args, **kwargs): + data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs) + + # remove _id from the SON if it's in it and it's None + if '_id' in data and data['_id'] is None: + del data['_id'] + + return data + def save(self, *args, **kwargs): self._instance.save(*args, **kwargs) @@ -104,9 +106,8 @@ class Document(BaseDocument): create a specialised version of the document that will be stored in the same collection. To facilitate this behaviour a `_cls` field is added to documents (hidden though the MongoEngine interface). - To disable this behaviour and remove the dependence on the presence of - `_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` - dictionary. + To enable this behaviourset :attr:`allow_inheritance` to ``True`` in the + :attr:`meta` dictionary. A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` @@ -147,26 +148,22 @@ class Document(BaseDocument): __slots__ = ('__objects',) - def pk(): - """Primary key alias - """ + @property + def pk(self): + """Get the primary key.""" + if 'id_field' not in self._meta: + return None + return getattr(self, self._meta['id_field']) - def fget(self): - if 'id_field' not in self._meta: - return None - return getattr(self, self._meta['id_field']) - - def fset(self, value): - return setattr(self, self._meta['id_field'], value) - - return property(fget, fset) - - pk = pk() + @pk.setter + def pk(self, value): + """Set the primary key.""" + return setattr(self, self._meta['id_field'], value) @classmethod def _get_db(cls): """Some Model using other db_alias""" - return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)) + return get_db(cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)) @classmethod def _get_collection(cls): @@ -209,31 +206,46 @@ class Document(BaseDocument): cls.ensure_indexes() return cls._collection - def modify(self, query={}, **update): + def to_mongo(self, *args, **kwargs): + data = super(Document, self).to_mongo(*args, **kwargs) + + # If '_id' is None, try and set it from self._data. If that + # doesn't exist either, remote '_id' from the SON completely. + if data['_id'] is None: + if self._data.get('id') is None: + del data['_id'] + else: + data['_id'] = self._data['id'] + + return data + + def modify(self, query=None, **update): """Perform an atomic update of the document in the database and reload the document object using updated version. Returns True if the document has been updated or False if the document in the database doesn't match the query. - .. note:: All unsaved changes that has been made to the document are + .. note:: All unsaved changes that have been made to the document are rejected if the method returns True. :param query: the update will be performed only if the document in the database matches the query :param update: Django-style update keyword arguments """ + if query is None: + query = {} if self.pk is None: - raise InvalidDocumentError("The document does not have a primary key.") + raise InvalidDocumentError('The document does not have a primary key.') - id_field = self._meta["id_field"] + id_field = self._meta['id_field'] query = query.copy() if isinstance(query, dict) else query.to_query(self) if id_field not in query: query[id_field] = self.pk elif query[id_field] != self.pk: - raise InvalidQueryError("Invalid document modify query: it must modify only this document.") + raise InvalidQueryError('Invalid document modify query: it must modify only this document.') updated = self._qs(**query).modify(new=True, **update) if updated is None: @@ -249,7 +261,7 @@ class Document(BaseDocument): def save(self, force_insert=False, validate=True, clean=True, write_concern=None, cascade=None, cascade_kwargs=None, - _refs=None, save_condition=None, **kwargs): + _refs=None, save_condition=None, signal_kwargs=None, **kwargs): """Save the :class:`~mongoengine.Document` to the database. If the document already exists, it will be updated, otherwise it will be created. @@ -275,6 +287,8 @@ class Document(BaseDocument): :param save_condition: only perform save if matching record in db satisfies condition(s) (e.g. version number). Raises :class:`OperationError` if the conditions are not satisfied + :parm signal_kwargs: (optional) kwargs dictionary to be passed to + the signal calls. .. versionchanged:: 0.5 In existing documents it only saves changed fields using @@ -294,114 +308,159 @@ class Document(BaseDocument): if the condition is satisfied in the current db record. .. versionchanged:: 0.10 :class:`OperationError` exception raised if save_condition fails. + .. versionchanged:: 0.10.1 + :class: save_condition failure now raises a `SaveConditionError` + .. versionchanged:: 0.10.7 + Add signal_kwargs argument """ - signals.pre_save.send(self.__class__, document=self) + if self._meta.get('abstract'): + raise InvalidDocumentError('Cannot save an abstract document.') + + signal_kwargs = signal_kwargs or {} + signals.pre_save.send(self.__class__, document=self, **signal_kwargs) if validate: self.validate(clean=clean) if write_concern is None: - write_concern = {"w": 1} + write_concern = {'w': 1} doc = self.to_mongo() created = ('_id' not in doc or self._created or force_insert) signals.pre_save_post_validation.send(self.__class__, document=self, - created=created) + created=created, **signal_kwargs) + + if self._meta.get('auto_create_index', True): + self.ensure_indexes() try: - collection = self._get_collection() - if self._meta.get('auto_create_index', True): - self.ensure_indexes() + # Save a new document or update an existing one if created: - if force_insert: - object_id = collection.insert(doc, **write_concern) - else: - object_id = collection.save(doc, **write_concern) - # In PyMongo 3.0, the save() call calls internally the _update() call - # but they forget to return the _id value passed back, therefore getting it back here - # Correct behaviour in 2.X and in 3.0.1+ versions - if not object_id and pymongo.version_tuple == (3, 0): - pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk) - object_id = self._qs.filter(pk=pk_as_mongo_obj).first() and \ - self._qs.filter(pk=pk_as_mongo_obj).first().pk + object_id = self._save_create(doc, force_insert, write_concern) else: - object_id = doc['_id'] - updates, removals = self._delta() - # Need to add shard key to query, or you get an error - if save_condition is not None: - select_dict = transform.query(self.__class__, - **save_condition) - else: - select_dict = {} - select_dict['_id'] = object_id - shard_key = self.__class__._meta.get('shard_key', tuple()) - for k in shard_key: - actual_key = self._db_field_map.get(k, k) - select_dict[actual_key] = doc[actual_key] - - def is_new_object(last_error): - if last_error is not None: - updated = last_error.get("updatedExisting") - if updated is not None: - return not updated - return created - - update_query = {} - - if updates: - update_query["$set"] = updates - if removals: - update_query["$unset"] = removals - if updates or removals: - upsert = save_condition is None - last_error = collection.update(select_dict, update_query, - upsert=upsert, **write_concern) - if not upsert and last_error['nModified'] == 0: - raise OperationError('Race condition preventing' - ' document update detected') - created = is_new_object(last_error) + object_id, created = self._save_update(doc, save_condition, + write_concern) if cascade is None: - cascade = self._meta.get( - 'cascade', False) or cascade_kwargs is not None + cascade = (self._meta.get('cascade', False) or + cascade_kwargs is not None) if cascade: kwargs = { - "force_insert": force_insert, - "validate": validate, - "write_concern": write_concern, - "cascade": cascade + 'force_insert': force_insert, + 'validate': validate, + 'write_concern': write_concern, + 'cascade': cascade } if cascade_kwargs: # Allow granular control over cascades kwargs.update(cascade_kwargs) kwargs['_refs'] = _refs self.cascade_save(**kwargs) - except pymongo.errors.DuplicateKeyError, err: + + except pymongo.errors.DuplicateKeyError as err: message = u'Tried to save duplicate unique keys (%s)' - raise NotUniqueError(message % unicode(err)) - except pymongo.errors.OperationFailure, err: + raise NotUniqueError(message % six.text_type(err)) + except pymongo.errors.OperationFailure as err: message = 'Could not save document (%s)' - if re.match('^E1100[01] duplicate key', unicode(err)): + if re.match('^E1100[01] duplicate key', six.text_type(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update message = u'Tried to save duplicate unique keys (%s)' - raise NotUniqueError(message % unicode(err)) - raise OperationError(message % unicode(err)) + raise NotUniqueError(message % six.text_type(err)) + raise OperationError(message % six.text_type(err)) + + # Make sure we store the PK on this document now that it's saved id_field = self._meta['id_field'] if created or id_field not in self._meta.get('shard_key', []): self[id_field] = self._fields[id_field].to_python(object_id) - signals.post_save.send(self.__class__, document=self, created=created) + signals.post_save.send(self.__class__, document=self, + created=created, **signal_kwargs) + self._clear_changed_fields() self._created = False + return self - def cascade_save(self, *args, **kwargs): - """Recursively saves any references / - generic references on an objects""" - _refs = kwargs.get('_refs', []) or [] + def _save_create(self, doc, force_insert, write_concern): + """Save a new document. + + Helper method, should only be used inside save(). + """ + collection = self._get_collection() + + if force_insert: + return collection.insert(doc, **write_concern) + + object_id = collection.save(doc, **write_concern) + + # In PyMongo 3.0, the save() call calls internally the _update() call + # but they forget to return the _id value passed back, therefore getting it back here + # Correct behaviour in 2.X and in 3.0.1+ versions + if not object_id and pymongo.version_tuple == (3, 0): + pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk) + object_id = ( + self._qs.filter(pk=pk_as_mongo_obj).first() and + self._qs.filter(pk=pk_as_mongo_obj).first().pk + ) # TODO doesn't this make 2 queries? + + return object_id + + def _save_update(self, doc, save_condition, write_concern): + """Update an existing document. + + Helper method, should only be used inside save(). + """ + collection = self._get_collection() + object_id = doc['_id'] + created = False + + select_dict = {} + if save_condition is not None: + select_dict = transform.query(self.__class__, **save_condition) + + select_dict['_id'] = object_id + + # Need to add shard key to query, or you get an error + shard_key = self._meta.get('shard_key', tuple()) + for k in shard_key: + path = self._lookup_field(k.split('.')) + actual_key = [p.db_field for p in path] + val = doc + for ak in actual_key: + val = val[ak] + select_dict['.'.join(actual_key)] = val + + updates, removals = self._delta() + update_query = {} + if updates: + update_query['$set'] = updates + if removals: + update_query['$unset'] = removals + if updates or removals: + upsert = save_condition is None + last_error = collection.update(select_dict, update_query, + upsert=upsert, **write_concern) + if not upsert and last_error['n'] == 0: + raise SaveConditionError('Race condition preventing' + ' document update detected') + if last_error is not None: + updated_existing = last_error.get('updatedExisting') + if updated_existing is False: + created = True + # !!! This is bad, means we accidentally created a new, + # potentially corrupted document. See + # https://github.com/MongoEngine/mongoengine/issues/564 + + return object_id, created + + def cascade_save(self, **kwargs): + """Recursively save any references and generic references on the + document. + """ + _refs = kwargs.get('_refs') or [] ReferenceField = _import_class('ReferenceField') GenericReferenceField = _import_class('GenericReferenceField') @@ -427,21 +486,27 @@ class Document(BaseDocument): @property def _qs(self): - """ - Returns the queryset to use for updating / reloading / deletions - """ + """Return the queryset to use for updating / reloading / deletions.""" if not hasattr(self, '__objects'): self.__objects = QuerySet(self, self._get_collection()) return self.__objects @property def _object_key(self): - """Dict to identify object in collection + """Get the query dict that can be used to fetch this object from + the database. Most of the time it's a simple PK lookup, but in + case of a sharded collection with a compound shard key, it can + contain a more complex query. """ select_dict = {'pk': self.pk} shard_key = self.__class__._meta.get('shard_key', tuple()) for k in shard_key: - select_dict[k] = getattr(self, k) + path = self._lookup_field(k.split('.')) + actual_key = [p.db_field for p in path] + val = self + for ak in actual_key: + val = getattr(val, ak) + select_dict['__'.join(actual_key)] = val return select_dict def update(self, **kwargs): @@ -451,11 +516,11 @@ class Document(BaseDocument): Raises :class:`OperationError` if called on an object that has not yet been saved. """ - if not self.pk: + if self.pk is None: if kwargs.get('upsert', False): query = self.to_mongo() - if "_cls" in query: - del query["_cls"] + if '_cls' in query: + del query['_cls'] return self._qs.filter(**query).update_one(**kwargs) else: raise OperationError( @@ -464,32 +529,38 @@ class Document(BaseDocument): # Need to add shard key to query, or you get an error return self._qs.filter(**self._object_key).update_one(**kwargs) - def delete(self, **write_concern): + def delete(self, signal_kwargs=None, **write_concern): """Delete the :class:`~mongoengine.Document` from the database. This will only take effect if the document has been previously saved. + :parm signal_kwargs: (optional) kwargs dictionary to be passed to + the signal calls. :param write_concern: Extra keyword arguments are passed down which will be used as options for the resultant ``getLastError`` command. For example, ``save(..., write_concern={w: 2, fsync: True}, ...)`` will wait until at least two servers have recorded the write and will force an fsync on the primary server. - """ - signals.pre_delete.send(self.__class__, document=self) - # Delete FileFields separately + .. versionchanged:: 0.10.7 + Add signal_kwargs argument + """ + signal_kwargs = signal_kwargs or {} + signals.pre_delete.send(self.__class__, document=self, **signal_kwargs) + + # Delete FileFields separately FileField = _import_class('FileField') for name, field in self._fields.iteritems(): - if isinstance(field, FileField): + if isinstance(field, FileField): getattr(self, name).delete() try: self._qs.filter( **self._object_key).delete(write_concern=write_concern, _from_doc_delete=True) - except pymongo.errors.OperationFailure, err: + except pymongo.errors.OperationFailure as err: message = u'Could not delete document (%s)' % err.message raise OperationError(message) - signals.post_delete.send(self.__class__, document=self) + signals.post_delete.send(self.__class__, document=self, **signal_kwargs) def switch_db(self, db_alias, keep_created=True): """ @@ -574,11 +645,12 @@ class Document(BaseDocument): if fields and isinstance(fields[0], int): max_depth = fields[0] fields = fields[1:] - elif "max_depth" in kwargs: - max_depth = kwargs["max_depth"] + elif 'max_depth' in kwargs: + max_depth = kwargs['max_depth'] + + if self.pk is None: + raise self.DoesNotExist('Document does not exist') - if not self.pk: - raise self.DoesNotExist("Document does not exist") obj = self._qs.read_preference(ReadPreference.PRIMARY).filter( **self._object_key).only(*fields).limit( 1).select_related(max_depth=max_depth) @@ -586,17 +658,22 @@ class Document(BaseDocument): if obj: obj = obj[0] else: - raise self.DoesNotExist("Document does not exist") + raise self.DoesNotExist('Document does not exist') - for field in self._fields_ordered: + for field in obj._data: if not fields or field in fields: try: setattr(self, field, self._reload(field, obj[field])) - except KeyError: - # If field is removed from the database while the object - # is in memory, a reload would cause a KeyError - # i.e. obj.update(unset__field=1) followed by obj.reload() - delattr(self, field) + except (KeyError, AttributeError): + try: + # If field is a special field, e.g. items is stored as _reserved_items, + # an KeyError is thrown. So try to retrieve the field from _data + setattr(self, field, self._reload(field, obj._data.get(field))) + except KeyError: + # If field is removed from the database while the object + # is in memory, a reload would cause a KeyError + # i.e. obj.update(unset__field=1) followed by obj.reload() + delattr(self, field) self._changed_fields = obj._changed_fields self._created = False @@ -623,8 +700,8 @@ class Document(BaseDocument): def to_dbref(self): """Returns an instance of :class:`~bson.dbref.DBRef` useful in `__raw__` queries.""" - if not self.pk: - msg = "Only saved documents can have a valid dbref" + if self.pk is None: + msg = 'Only saved documents can have a valid dbref' raise OperationError(msg) return DBRef(self.__class__._get_collection_name(), self.pk) @@ -650,10 +727,20 @@ class Document(BaseDocument): def drop_collection(cls): """Drops the entire collection associated with this :class:`~mongoengine.Document` type from the database. + + Raises :class:`OperationError` if the document has no collection set + (i.g. if it is `abstract`) + + .. versionchanged:: 0.10.7 + :class:`OperationError` exception raised if no collection available """ + col_name = cls._get_collection_name() + if not col_name: + raise OperationError('Document %s has no collection defined ' + '(is it abstract ?)' % cls) cls._collection = None db = cls._get_db() - db.drop_collection(cls._get_collection_name()) + db.drop_collection(col_name) @classmethod def create_index(cls, keys, background=False, **kwargs): @@ -669,7 +756,7 @@ class Document(BaseDocument): fields = index_spec.pop('fields') drop_dups = kwargs.get('drop_dups', False) if IS_PYMONGO_3 and drop_dups: - msg = "drop_dups is deprecated and is removed when using PyMongo 3+." + msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' warnings.warn(msg, DeprecationWarning) elif not IS_PYMONGO_3: index_spec['drop_dups'] = drop_dups @@ -695,7 +782,7 @@ class Document(BaseDocument): will be removed if PyMongo3+ is used """ if IS_PYMONGO_3 and drop_dups: - msg = "drop_dups is deprecated and is removed when using PyMongo 3+." + msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' warnings.warn(msg, DeprecationWarning) elif not IS_PYMONGO_3: kwargs.update({'drop_dups': drop_dups}) @@ -715,7 +802,7 @@ class Document(BaseDocument): index_opts = cls._meta.get('index_opts') or {} index_cls = cls._meta.get('index_cls', True) if IS_PYMONGO_3 and drop_dups: - msg = "drop_dups is deprecated and is removed when using PyMongo 3+." + msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' warnings.warn(msg, DeprecationWarning) collection = cls._get_collection() @@ -753,8 +840,7 @@ class Document(BaseDocument): # If _cls is being used (for polymorphism), it needs an index, # only if another index doesn't begin with _cls - if (index_cls and not cls_indexed and - cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True): + if index_cls and not cls_indexed and cls._meta.get('allow_inheritance'): # we shouldn't pass 'cls' to the collection.ensureIndex options # because of https://jira.mongodb.org/browse/SERVER-769 @@ -773,7 +859,6 @@ class Document(BaseDocument): """ Lists all of the indexes that should be created for given collection. It includes all the indexes from super- and sub-classes. """ - if cls._meta.get('abstract'): return [] @@ -824,16 +909,15 @@ class Document(BaseDocument): # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed if [(u'_id', 1)] not in indexes: indexes.append([(u'_id', 1)]) - if (cls._meta.get('index_cls', True) and - cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True): + if cls._meta.get('index_cls', True) and cls._meta.get('allow_inheritance'): indexes.append([(u'_cls', 1)]) return indexes @classmethod def compare_indexes(cls): - """ Compares the indexes defined in MongoEngine with the ones existing - in the database. Returns any missing/extra indexes. + """ Compares the indexes defined in MongoEngine with the ones + existing in the database. Returns any missing/extra indexes. """ required = cls.list_indexes() @@ -877,8 +961,9 @@ class DynamicDocument(Document): _dynamic = True def __delattr__(self, *args, **kwargs): - """Deletes the attribute by setting to None and allowing _delta to unset - it""" + """Delete the attribute by setting to None and allowing _delta + to unset it. + """ field_name = args[0] if field_name in self._dynamic_fields: setattr(self, field_name, None) @@ -900,8 +985,9 @@ class DynamicEmbeddedDocument(EmbeddedDocument): _dynamic = True def __delattr__(self, *args, **kwargs): - """Deletes the attribute by setting to None and allowing _delta to unset - it""" + """Delete the attribute by setting to None and allowing _delta + to unset it. + """ field_name = args[0] if field_name in self._fields: default = self._fields[field_name].default @@ -942,11 +1028,11 @@ class MapReduceDocument(object): if not isinstance(self.key, id_field_type): try: self.key = id_field_type(self.key) - except: - raise Exception("Could not cast key as %s" % + except Exception: + raise Exception('Could not cast key as %s' % id_field_type.__name__) - if not hasattr(self, "_key_object"): + if not hasattr(self, '_key_object'): self._key_object = self._document.objects.with_id(self.key) return self._key_object return self._key_object diff --git a/mongoengine/errors.py b/mongoengine/errors.py index b7fb7632..131596d1 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -1,12 +1,11 @@ from collections import defaultdict -from mongoengine.python_support import txt_type - +import six __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', 'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', 'OperationError', 'NotUniqueError', 'FieldDoesNotExist', - 'ValidationError') + 'ValidationError', 'SaveConditionError') class NotRegistered(Exception): @@ -41,14 +40,18 @@ class NotUniqueError(OperationError): pass +class SaveConditionError(OperationError): + pass + + class FieldDoesNotExist(Exception): """Raised when trying to set a field not declared in a :class:`~mongoengine.Document` or an :class:`~mongoengine.EmbeddedDocument`. To avoid this behavior on data loading, - you should the :attr:`strict` to ``False`` - in the :attr:`meta` dictionnary. + you should set the :attr:`strict` to ``False`` + in the :attr:`meta` dictionary. """ @@ -67,13 +70,13 @@ class ValidationError(AssertionError): field_name = None _message = None - def __init__(self, message="", **kwargs): + def __init__(self, message='', **kwargs): self.errors = kwargs.get('errors', {}) self.field_name = kwargs.get('field_name') self.message = message def __str__(self): - return txt_type(self.message) + return six.text_type(self.message) def __repr__(self): return '%s(%s,)' % (self.__class__.__name__, self.message) @@ -107,17 +110,20 @@ class ValidationError(AssertionError): errors_dict = {} if not source: return errors_dict + if isinstance(source, dict): for field_name, error in source.iteritems(): errors_dict[field_name] = build_dict(error) elif isinstance(source, ValidationError) and source.errors: return build_dict(source.errors) else: - return unicode(source) + return six.text_type(source) + return errors_dict if not self.errors: return {} + return build_dict(self.errors) def _format_errors(self): @@ -130,10 +136,10 @@ class ValidationError(AssertionError): value = ' '.join( [generate_key(v, k) for k, v in value.iteritems()]) - results = "%s.%s" % (prefix, value) if prefix else value + results = '%s.%s' % (prefix, value) if prefix else value return results error_dict = defaultdict(list) for k, v in self.to_dict().iteritems(): error_dict[generate_key(v)].append(k) - return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()]) + return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()]) diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 695f5caa..11425095 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -3,11 +3,15 @@ import decimal import itertools import re import time -import urllib2 import uuid import warnings from operator import itemgetter +from bson import Binary, DBRef, ObjectId, SON +import gridfs +import pymongo +import six + try: import dateutil except ImportError: @@ -15,18 +19,18 @@ except ImportError: else: import dateutil.parser -import pymongo -import gridfs -from bson import Binary, DBRef, SON, ObjectId +try: + from bson.int64 import Int64 +except ImportError: + Int64 = long -from mongoengine.errors import ValidationError -from mongoengine.python_support import (PY3, bin_type, txt_type, - str_types, StringIO) -from base import (BaseField, ComplexBaseField, ObjectIdField, GeoJsonBaseField, - get_document, BaseDocument) -from queryset import DO_NOTHING, QuerySet -from document import Document, EmbeddedDocument -from connection import get_db, DEFAULT_CONNECTION_NAME +from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField, + GeoJsonBaseField, ObjectIdField, get_document) +from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db +from mongoengine.document import Document, EmbeddedDocument +from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError +from mongoengine.python_support import StringIO +from mongoengine.queryset import DO_NOTHING, QuerySet try: from PIL import Image, ImageOps @@ -34,7 +38,7 @@ except ImportError: Image = None ImageOps = None -__all__ = [ +__all__ = ( 'StringField', 'URLField', 'EmailField', 'IntField', 'LongField', 'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField', 'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField', @@ -45,14 +49,14 @@ __all__ = [ 'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField', 'LineStringField', 'PolygonField', 'SequenceField', 'UUIDField', 'MultiPointField', 'MultiLineStringField', - 'MultiPolygonField', 'GeoJsonBaseField'] + 'MultiPolygonField', 'GeoJsonBaseField' +) RECURSIVE_REFERENCE_CONSTANT = 'self' class StringField(BaseField): - """A unicode string field. - """ + """A unicode string field.""" def __init__(self, regex=None, max_length=None, min_length=None, **kwargs): self.regex = re.compile(regex) if regex else None @@ -61,16 +65,16 @@ class StringField(BaseField): super(StringField, self).__init__(**kwargs) def to_python(self, value): - if isinstance(value, unicode): + if isinstance(value, six.text_type): return value try: value = value.decode('utf-8') - except: + except Exception: pass return value def validate(self, value): - if not isinstance(value, basestring): + if not isinstance(value, six.string_types): self.error('StringField only accepts string values') if self.max_length is not None and len(value) > self.max_length: @@ -86,7 +90,7 @@ class StringField(BaseField): return None def prepare_query_value(self, op, value): - if not isinstance(op, basestring): + if not isinstance(op, six.string_types): return value if op.lstrip('i') in ('startswith', 'endswith', 'contains', 'exact'): @@ -135,28 +139,17 @@ class URLField(StringField): # Check first if the scheme is valid scheme = value.split('://')[0].lower() if scheme not in self.schemes: - self.error('Invalid scheme {} in URL: {}'.format(scheme, value)) + self.error(u'Invalid scheme {} in URL: {}'.format(scheme, value)) return # Then check full URL if not self.url_regex.match(value): - self.error('Invalid URL: {}'.format(value)) + self.error(u'Invalid URL: {}'.format(value)) return - if self.verify_exists: - warnings.warn( - "The URLField verify_exists argument has intractable security " - "and performance issues. Accordingly, it has been deprecated.", - DeprecationWarning) - try: - request = urllib2.Request(value) - urllib2.urlopen(request) - except Exception, e: - self.error('This URL appears to be a broken link: %s' % e) - class EmailField(StringField): - """A field that validates input as an E-Mail-Address. + """A field that validates input as an email address. .. versionadded:: 0.4 """ @@ -172,13 +165,12 @@ class EmailField(StringField): def validate(self, value): if not EmailField.EMAIL_REGEX.match(value): - self.error('Invalid Mail-address: %s' % value) + self.error('Invalid email address: %s' % value) super(EmailField, self).validate(value) class IntField(BaseField): - """An 32-bit integer field. - """ + """32-bit integer field.""" def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value @@ -194,7 +186,7 @@ class IntField(BaseField): def validate(self, value): try: value = int(value) - except: + except Exception: self.error('%s could not be converted to int' % value) if self.min_value is not None and value < self.min_value: @@ -211,8 +203,7 @@ class IntField(BaseField): class LongField(BaseField): - """An 64-bit integer field. - """ + """64-bit integer field.""" def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value @@ -225,10 +216,13 @@ class LongField(BaseField): pass return value + def to_mongo(self, value): + return Int64(value) + def validate(self, value): try: value = long(value) - except: + except Exception: self.error('%s could not be converted to long' % value) if self.min_value is not None and value < self.min_value: @@ -245,8 +239,7 @@ class LongField(BaseField): class FloatField(BaseField): - """An floating point number field. - """ + """Floating point number field.""" def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value @@ -260,10 +253,14 @@ class FloatField(BaseField): return value def validate(self, value): - if isinstance(value, int): - value = float(value) + if isinstance(value, six.integer_types): + try: + value = float(value) + except OverflowError: + self.error('The value is too large to be converted to float') + if not isinstance(value, float): - self.error('FloatField only accepts float values') + self.error('FloatField only accepts float and integer values') if self.min_value is not None and value < self.min_value: self.error('Float value is too small') @@ -279,7 +276,7 @@ class FloatField(BaseField): class DecimalField(BaseField): - """A fixed-point decimal number field. + """Fixed-point decimal number field. .. versionchanged:: 0.8 .. versionadded:: 0.3 @@ -320,25 +317,25 @@ class DecimalField(BaseField): # Convert to string for python 2.6 before casting to Decimal try: - value = decimal.Decimal("%s" % value) + value = decimal.Decimal('%s' % value) except decimal.InvalidOperation: return value - return value.quantize(decimal.Decimal(".%s" % ("0" * self.precision)), rounding=self.rounding) + return value.quantize(decimal.Decimal('.%s' % ('0' * self.precision)), rounding=self.rounding) - def to_mongo(self, value, use_db_field=True): + def to_mongo(self, value): if value is None: return value if self.force_string: - return unicode(value) + return six.text_type(self.to_python(value)) return float(self.to_python(value)) def validate(self, value): if not isinstance(value, decimal.Decimal): - if not isinstance(value, basestring): - value = unicode(value) + if not isinstance(value, six.string_types): + value = six.text_type(value) try: value = decimal.Decimal(value) - except Exception, exc: + except Exception as exc: self.error('Could not convert value to decimal: %s' % exc) if self.min_value is not None and value < self.min_value: @@ -352,7 +349,7 @@ class DecimalField(BaseField): class BooleanField(BaseField): - """A boolean field type. + """Boolean field type. .. versionadded:: 0.1.2 """ @@ -370,7 +367,7 @@ class BooleanField(BaseField): class DateTimeField(BaseField): - """A datetime field. + """Datetime field. Uses the python-dateutil library if available alternatively use time.strptime to parse the dates. Note: python-dateutil's parser is fully featured and when @@ -398,7 +395,7 @@ class DateTimeField(BaseField): if callable(value): return value() - if not isinstance(value, basestring): + if not isinstance(value, six.string_types): return None # Attempt to parse a datetime: @@ -508,7 +505,7 @@ class ComplexDateTimeField(StringField): original_value = value try: return self._convert_from_string(value) - except: + except Exception: return original_value def to_mongo(self, value): @@ -525,16 +522,19 @@ class EmbeddedDocumentField(BaseField): """ def __init__(self, document_type, **kwargs): - if not isinstance(document_type, basestring): - if not issubclass(document_type, EmbeddedDocument): - self.error('Invalid embedded document class provided to an ' - 'EmbeddedDocumentField') + if ( + not isinstance(document_type, six.string_types) and + not issubclass(document_type, EmbeddedDocument) + ): + self.error('Invalid embedded document class provided to an ' + 'EmbeddedDocumentField') + self.document_type_obj = document_type super(EmbeddedDocumentField, self).__init__(**kwargs) @property def document_type(self): - if isinstance(self.document_type_obj, basestring): + if isinstance(self.document_type_obj, six.string_types): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -546,11 +546,10 @@ class EmbeddedDocumentField(BaseField): return self.document_type._from_son(value, _auto_dereference=self._auto_dereference) return value - def to_mongo(self, value, use_db_field=True, fields=[]): + def to_mongo(self, value, use_db_field=True, fields=None): if not isinstance(value, self.document_type): return value - return self.document_type.to_mongo(value, use_db_field, - fields=fields) + return self.document_type.to_mongo(value, use_db_field, fields) def validate(self, value, clean=True): """Make sure that the document instance is an instance of the @@ -566,8 +565,12 @@ class EmbeddedDocumentField(BaseField): return self.document_type._fields.get(member_name) def prepare_query_value(self, op, value): - if not isinstance(value, self.document_type): - value = self.document_type._from_son(value) + if value is not None and not isinstance(value, self.document_type): + try: + value = self.document_type._from_son(value) + except ValueError: + raise InvalidQueryError("Querying the embedded document '%s' failed, due to an invalid query value" % + (self.document_type._class_name,)) super(EmbeddedDocumentField, self).prepare_query_value(op, value) return self.to_mongo(value) @@ -600,11 +603,11 @@ class GenericEmbeddedDocumentField(BaseField): value.validate(clean=clean) - def to_mongo(self, document, use_db_field=True): + def to_mongo(self, document, use_db_field=True, fields=None): if document is None: return None - data = document.to_mongo(use_db_field) + data = document.to_mongo(use_db_field, fields) if '_cls' not in data: data['_cls'] = document._class_name return data @@ -616,19 +619,19 @@ class DynamicField(BaseField): Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data""" - def to_mongo(self, value): + def to_mongo(self, value, use_db_field=True, fields=None): """Convert a Python type to a MongoDB compatible type. """ - if isinstance(value, basestring): + if isinstance(value, six.string_types): return value if hasattr(value, 'to_mongo'): cls = value.__class__ - val = value.to_mongo() + val = value.to_mongo(use_db_field, fields) # If we its a document thats not inherited add _cls if isinstance(value, Document): - val = {"_ref": value.to_dbref(), "_cls": cls.__name__} + val = {'_ref': value.to_dbref(), '_cls': cls.__name__} if isinstance(value, EmbeddedDocument): val['_cls'] = cls.__name__ return val @@ -639,11 +642,11 @@ class DynamicField(BaseField): is_list = False if not hasattr(value, 'items'): is_list = True - value = dict([(k, v) for k, v in enumerate(value)]) + value = {k: v for k, v in enumerate(value)} data = {} for k, v in value.iteritems(): - data[k] = self.to_mongo(v) + data[k] = self.to_mongo(v, use_db_field, fields) value = data if is_list: # Convert back to a list @@ -663,12 +666,12 @@ class DynamicField(BaseField): return member_name def prepare_query_value(self, op, value): - if isinstance(value, basestring): + if isinstance(value, six.string_types): return StringField().prepare_query_value(op, value) return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value)) def validate(self, value, clean=True): - if hasattr(value, "validate"): + if hasattr(value, 'validate'): value.validate(clean=clean) @@ -688,21 +691,27 @@ class ListField(ComplexBaseField): super(ListField, self).__init__(**kwargs) def validate(self, value): - """Make sure that a list of valid fields is being used. - """ + """Make sure that a list of valid fields is being used.""" if (not isinstance(value, (list, tuple, QuerySet)) or - isinstance(value, basestring)): + isinstance(value, six.string_types)): self.error('Only lists and tuples may be used in a list field') super(ListField, self).validate(value) def prepare_query_value(self, op, value): if self.field: - if op in ('set', 'unset') and ( - not isinstance(value, basestring) and - not isinstance(value, BaseDocument) and - hasattr(value, '__iter__')): + + # If the value is iterable and it's not a string nor a + # BaseDocument, call prepare_query_value for each of its items. + if ( + op in ('set', 'unset', None) and + hasattr(value, '__iter__') and + not isinstance(value, six.string_types) and + not isinstance(value, BaseDocument) + ): return [self.field.prepare_query_value(op, v) for v in value] + return self.field.prepare_query_value(op, value) + return super(ListField, self).prepare_query_value(op, value) @@ -715,7 +724,6 @@ class EmbeddedDocumentListField(ListField): :class:`~mongoengine.EmbeddedDocument`. .. versionadded:: 0.9 - """ def __init__(self, document_type, **kwargs): @@ -755,8 +763,8 @@ class SortedListField(ListField): self._order_reverse = kwargs.pop('reverse') super(SortedListField, self).__init__(field, **kwargs) - def to_mongo(self, value): - value = super(SortedListField, self).to_mongo(value) + def to_mongo(self, value, use_db_field=True, fields=None): + value = super(SortedListField, self).to_mongo(value, use_db_field, fields) if self._ordering is not None: return sorted(value, key=itemgetter(self._ordering), reverse=self._order_reverse) @@ -764,17 +772,17 @@ class SortedListField(ListField): def key_not_string(d): - """ Helper function to recursively determine if any key in a dictionary is - not a string. + """Helper function to recursively determine if any key in a + dictionary is not a string. """ for k, v in d.items(): - if not isinstance(k, basestring) or (isinstance(v, dict) and key_not_string(v)): + if not isinstance(k, six.string_types) or (isinstance(v, dict) and key_not_string(v)): return True def key_has_dot_or_dollar(d): - """ Helper function to recursively determine if any key in a dictionary - contains a dot or a dollar sign. + """Helper function to recursively determine if any key in a + dictionary contains a dot or a dollar sign. """ for k, v in d.items(): if ('.' in k or '$' in k) or (isinstance(v, dict) and key_has_dot_or_dollar(v)): @@ -794,6 +802,7 @@ class DictField(ComplexBaseField): def __init__(self, basecls=None, field=None, *args, **kwargs): self.field = field + self._auto_dereference = False self.basecls = basecls or BaseField if not issubclass(self.basecls, BaseField): self.error('DictField only accepts dict values') @@ -801,14 +810,13 @@ class DictField(ComplexBaseField): super(DictField, self).__init__(*args, **kwargs) def validate(self, value): - """Make sure that a list of valid fields is being used. - """ + """Make sure that a list of valid fields is being used.""" if not isinstance(value, dict): self.error('Only dictionaries may be used in a DictField') if key_not_string(value): - msg = ("Invalid dictionary key - documents must " - "have only string keys") + msg = ('Invalid dictionary key - documents must ' + 'have only string keys') self.error(msg) if key_has_dot_or_dollar(value): self.error('Invalid dictionary key name - keys may not contain "."' @@ -823,14 +831,15 @@ class DictField(ComplexBaseField): 'istartswith', 'endswith', 'iendswith', 'exact', 'iexact'] - if op in match_operators and isinstance(value, basestring): + if op in match_operators and isinstance(value, six.string_types): return StringField().prepare_query_value(op, value) if hasattr(self.field, 'field'): if op in ('set', 'unset') and isinstance(value, dict): - return dict( - (k, self.field.prepare_query_value(op, v)) - for k, v in value.items()) + return { + k: self.field.prepare_query_value(op, v) + for k, v in value.items() + } return self.field.prepare_query_value(op, value) return super(DictField, self).prepare_query_value(op, value) @@ -862,12 +871,11 @@ class ReferenceField(BaseField): The options are: - * DO_NOTHING - don't do anything (default). - * NULLIFY - Updates the reference to null. - * CASCADE - Deletes the documents associated with the reference. - * DENY - Prevent the deletion of the reference object. - * PULL - Pull the reference from a :class:`~mongoengine.fields.ListField` - of references + * DO_NOTHING (0) - don't do anything (default). + * NULLIFY (1) - Updates the reference to null. + * CASCADE (2) - Deletes the documents associated with the reference. + * DENY (3) - Prevent the deletion of the reference object. + * PULL (4) - Pull the reference from a :class:`~mongoengine.fields.ListField` of references Alternative syntax for registering delete rules (useful when implementing bi-directional delete rules) @@ -878,11 +886,7 @@ class ReferenceField(BaseField): content = StringField() foo = ReferenceField('Foo') - Bar.register_delete_rule(Foo, 'bar', NULLIFY) - - .. note :: - `reverse_delete_rule` does not trigger pre / post delete signals to be - triggered. + Foo.register_delete_rule(Bar, 'foo', NULLIFY) .. versionchanged:: 0.5 added `reverse_delete_rule` """ @@ -895,11 +899,17 @@ class ReferenceField(BaseField): or as the :class:`~pymongo.objectid.ObjectId`.id . :param reverse_delete_rule: Determines what to do when the referring object is deleted + + .. note :: + A reference to an abstract document type is always stored as a + :class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`. """ - if not isinstance(document_type, basestring): - if not issubclass(document_type, (Document, basestring)): - self.error('Argument to ReferenceField constructor must be a ' - 'document class or a string') + if ( + not isinstance(document_type, six.string_types) and + not issubclass(document_type, Document) + ): + self.error('Argument to ReferenceField constructor must be a ' + 'document class or a string') self.dbref = dbref self.document_type_obj = document_type @@ -908,7 +918,7 @@ class ReferenceField(BaseField): @property def document_type(self): - if isinstance(self.document_type_obj, basestring): + if isinstance(self.document_type_obj, six.string_types): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -916,8 +926,7 @@ class ReferenceField(BaseField): return self.document_type_obj def __get__(self, instance, owner): - """Descriptor to allow lazy dereferencing. - """ + """Descriptor to allow lazy dereferencing.""" if instance is None: # Document class being used rather than a document object return self @@ -927,9 +936,16 @@ class ReferenceField(BaseField): self._auto_dereference = instance._fields[self.name]._auto_dereference # Dereference DBRefs if self._auto_dereference and isinstance(value, DBRef): - value = self.document_type._get_db().dereference(value) - if value is not None: - instance._data[self.name] = self.document_type._from_son(value) + if hasattr(value, 'cls'): + # Dereference using the class type specified in the reference + cls = get_document(value.cls) + else: + cls = self.document_type + dereferenced = cls._get_db().dereference(value) + if dereferenced is None: + raise DoesNotExist('Trying to dereference unknown document %s' % value) + else: + instance._data[self.name] = cls._from_son(dereferenced) return super(ReferenceField, self).__get__(instance, owner) @@ -939,28 +955,35 @@ class ReferenceField(BaseField): return document.id return document - id_field_name = self.document_type._meta['id_field'] - id_field = self.document_type._fields[id_field_name] - if isinstance(document, Document): # We need the id from the saved object to create the DBRef id_ = document.pk if id_ is None: self.error('You can only reference documents once they have' ' been saved to the database') + + # Use the attributes from the document instance, so that they + # override the attributes of this field's document type + cls = document else: id_ = document + cls = self.document_type + + id_field_name = cls._meta['id_field'] + id_field = cls._fields[id_field_name] id_ = id_field.to_mongo(id_) - if self.dbref: - collection = self.document_type._get_collection_name() + if self.document_type._meta.get('abstract'): + collection = cls._get_collection_name() + return DBRef(collection, id_, cls=cls._class_name) + elif self.dbref: + collection = cls._get_collection_name() return DBRef(collection, id_) return id_ def to_python(self, value): - """Convert a MongoDB-compatible type to a Python type. - """ + """Convert a MongoDB-compatible type to a Python type.""" if (not self.dbref and not isinstance(value, (DBRef, Document, EmbeddedDocument))): collection = self.document_type._get_collection_name() @@ -976,12 +999,19 @@ class ReferenceField(BaseField): def validate(self, value): if not isinstance(value, (self.document_type, DBRef)): - self.error("A ReferenceField only accepts DBRef or documents") + self.error('A ReferenceField only accepts DBRef or documents') if isinstance(value, Document) and value.id is None: self.error('You can only reference documents once they have been ' 'saved to the database') + if self.document_type._meta.get('abstract') and \ + not isinstance(value, self.document_type): + self.error( + '%s is not an instance of abstract reference type %s' % ( + self.document_type._class_name) + ) + def lookup_member(self, member_name): return self.document_type._fields.get(member_name) @@ -989,18 +1019,23 @@ class ReferenceField(BaseField): class CachedReferenceField(BaseField): """ A referencefield with cache fields to purpose pseudo-joins - + .. versionadded:: 0.9 """ - def __init__(self, document_type, fields=[], auto_sync=True, **kwargs): + def __init__(self, document_type, fields=None, auto_sync=True, **kwargs): """Initialises the Cached Reference Field. :param fields: A list of fields to be cached in document :param auto_sync: if True documents are auto updated. """ - if not isinstance(document_type, basestring) and \ - not issubclass(document_type, (Document, basestring)): + if fields is None: + fields = [] + + if ( + not isinstance(document_type, six.string_types) and + not issubclass(document_type, Document) + ): self.error('Argument to CachedReferenceField constructor must be a' ' document class or a string') @@ -1016,18 +1051,20 @@ class CachedReferenceField(BaseField): sender=self.document_type) def on_document_pre_save(self, sender, document, created, **kwargs): - if not created: - update_kwargs = dict( - ('set__%s__%s' % (self.name, k), v) - for k, v in document._delta()[0].items() - if k in self.fields) + if created: + return None - if update_kwargs: - filter_kwargs = {} - filter_kwargs[self.name] = document + update_kwargs = { + 'set__%s__%s' % (self.name, key): val + for key, val in document._delta()[0].items() + if key in self.fields + } + if update_kwargs: + filter_kwargs = {} + filter_kwargs[self.name] = document - self.owner_document.objects( - **filter_kwargs).update(**update_kwargs) + self.owner_document.objects( + **filter_kwargs).update(**update_kwargs) def to_python(self, value): if isinstance(value, dict): @@ -1040,7 +1077,7 @@ class CachedReferenceField(BaseField): @property def document_type(self): - if isinstance(self.document_type_obj, basestring): + if isinstance(self.document_type_obj, six.string_types): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -1057,13 +1094,15 @@ class CachedReferenceField(BaseField): self._auto_dereference = instance._fields[self.name]._auto_dereference # Dereference DBRefs if self._auto_dereference and isinstance(value, DBRef): - value = self.document_type._get_db().dereference(value) - if value is not None: - instance._data[self.name] = self.document_type._from_son(value) + dereferenced = self.document_type._get_db().dereference(value) + if dereferenced is None: + raise DoesNotExist('Trying to dereference unknown document %s' % value) + else: + instance._data[self.name] = self.document_type._from_son(dereferenced) return super(CachedReferenceField, self).__get__(instance, owner) - def to_mongo(self, document): + def to_mongo(self, document, use_db_field=True, fields=None): id_field_name = self.document_type._meta['id_field'] id_field = self.document_type._fields[id_field_name] @@ -1078,10 +1117,15 @@ class CachedReferenceField(BaseField): # TODO: should raise here or will fail next statement value = SON(( - ("_id", id_field.to_mongo(id_)), + ('_id', id_field.to_mongo(id_)), )) - value.update(dict(document.to_mongo(fields=self.fields))) + if fields: + new_fields = [f for f in self.fields if f in fields] + else: + new_fields = self.fields + + value.update(dict(document.to_mongo(use_db_field, fields=new_fields))) return value def prepare_query_value(self, op, value): @@ -1099,7 +1143,7 @@ class CachedReferenceField(BaseField): def validate(self, value): if not isinstance(value, self.document_type): - self.error("A CachedReferenceField only accepts documents") + self.error('A CachedReferenceField only accepts documents') if isinstance(value, Document) and value.id is None: self.error('You can only reference documents once they have been ' @@ -1147,13 +1191,13 @@ class GenericReferenceField(BaseField): # Keep the choices as a list of allowed Document class names if choices: for choice in choices: - if isinstance(choice, basestring): + if isinstance(choice, six.string_types): self.choices.append(choice) elif isinstance(choice, type) and issubclass(choice, Document): self.choices.append(choice._class_name) else: self.error('Invalid choices provided: must be a list of' - 'Document subclasses and/or basestrings') + 'Document subclasses and/or six.string_typess') def _validate_choices(self, value): if isinstance(value, dict): @@ -1172,7 +1216,11 @@ class GenericReferenceField(BaseField): self._auto_dereference = instance._fields[self.name]._auto_dereference if self._auto_dereference and isinstance(value, (dict, SON)): - instance._data[self.name] = self.dereference(value) + dereferenced = self.dereference(value) + if dereferenced is None: + raise DoesNotExist('Trying to dereference unknown document %s' % value) + else: + instance._data[self.name] = dereferenced return super(GenericReferenceField, self).__get__(instance, owner) @@ -1197,11 +1245,11 @@ class GenericReferenceField(BaseField): doc = doc_cls._from_son(doc) return doc - def to_mongo(self, document, use_db_field=True): + def to_mongo(self, document): if document is None: return None - if isinstance(document, (dict, SON)): + if isinstance(document, (dict, SON, ObjectId, DBRef)): return document id_field_name = document.__class__._meta['id_field'] @@ -1232,8 +1280,7 @@ class GenericReferenceField(BaseField): class BinaryField(BaseField): - """A binary data field. - """ + """A binary data field.""" def __init__(self, max_bytes=None, **kwargs): self.max_bytes = max_bytes @@ -1241,18 +1288,18 @@ class BinaryField(BaseField): def __set__(self, instance, value): """Handle bytearrays in python 3.1""" - if PY3 and isinstance(value, bytearray): - value = bin_type(value) + if six.PY3 and isinstance(value, bytearray): + value = six.binary_type(value) return super(BinaryField, self).__set__(instance, value) def to_mongo(self, value): return Binary(value) def validate(self, value): - if not isinstance(value, (bin_type, txt_type, Binary)): - self.error("BinaryField only accepts instances of " - "(%s, %s, Binary)" % ( - bin_type.__name__, txt_type.__name__)) + if not isinstance(value, (six.binary_type, six.text_type, Binary)): + self.error('BinaryField only accepts instances of ' + '(%s, %s, Binary)' % ( + six.binary_type.__name__, six.text_type.__name__)) if self.max_bytes is not None and len(value) > self.max_bytes: self.error('Binary value is too long') @@ -1336,16 +1383,18 @@ class GridFSProxy(object): get_db(self.db_alias), self.collection_name) return self._fs - def get(self, id=None): - if id: - self.grid_id = id + def get(self, grid_id=None): + if grid_id: + self.grid_id = grid_id + if self.grid_id is None: return None + try: if self.gridout is None: self.gridout = self.fs.get(self.grid_id) return self.gridout - except: + except Exception: # File has been deleted return None @@ -1383,8 +1432,8 @@ class GridFSProxy(object): else: try: return gridout.read(size) - except: - return "" + except Exception: + return '' def delete(self): # Delete file from GridFS, FileField still remains @@ -1416,9 +1465,8 @@ class FileField(BaseField): """ proxy_class = GridFSProxy - def __init__(self, - db_alias=DEFAULT_CONNECTION_NAME, - collection_name="fs", **kwargs): + def __init__(self, db_alias=DEFAULT_CONNECTION_NAME, collection_name='fs', + **kwargs): super(FileField, self).__init__(**kwargs) self.collection_name = collection_name self.db_alias = db_alias @@ -1440,15 +1488,17 @@ class FileField(BaseField): def __set__(self, instance, value): key = self.name - if ((hasattr(value, 'read') and not - isinstance(value, GridFSProxy)) or isinstance(value, str_types)): + if ( + (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or + isinstance(value, (six.binary_type, six.string_types)) + ): # using "FileField() = file/string" notation grid_file = instance._data.get(self.name) # If a file already exists, delete it if grid_file: try: grid_file.delete() - except: + except Exception: pass # Create a new proxy object as we don't already have one @@ -1510,7 +1560,7 @@ class ImageGridFsProxy(GridFSProxy): try: img = Image.open(file_obj) img_format = img.format - except Exception, e: + except Exception as e: raise ValidationError('Invalid image: %s' % e) # Progressive JPEG @@ -1619,10 +1669,10 @@ class ImageGridFsProxy(GridFSProxy): return self.fs.get(out.thumbnail_id) def write(self, *args, **kwargs): - raise RuntimeError("Please use \"put\" method instead") + raise RuntimeError('Please use "put" method instead') def writelines(self, *args, **kwargs): - raise RuntimeError("Please use \"put\" method instead") + raise RuntimeError('Please use "put" method instead') class ImproperlyConfigured(Exception): @@ -1647,14 +1697,17 @@ class ImageField(FileField): def __init__(self, size=None, thumbnail_size=None, collection_name='images', **kwargs): if not Image: - raise ImproperlyConfigured("PIL library was not found") + raise ImproperlyConfigured('PIL library was not found') params_size = ('width', 'height', 'force') - extra_args = dict(size=size, thumbnail_size=thumbnail_size) + extra_args = { + 'size': size, + 'thumbnail_size': thumbnail_size + } for att_name, att in extra_args.items(): value = None if isinstance(att, (tuple, list)): - if PY3: + if six.PY3: value = dict(itertools.zip_longest(params_size, att, fillvalue=None)) else: @@ -1682,17 +1735,17 @@ class SequenceField(BaseField): :param collection_name: Name of the counter collection (default 'mongoengine.counters') :param sequence_name: Name of the sequence in the collection (default 'ClassName.counter') :param value_decorator: Any callable to use as a counter (default int) - + Use any callable as `value_decorator` to transform calculated counter into any value suitable for your needs, e.g. string or hexadecimal representation of the default integer counter value. - + .. note:: - - In case the counter is defined in the abstract document, it will be - common to all inherited documents and the default sequence name will + + In case the counter is defined in the abstract document, it will be + common to all inherited documents and the default sequence name will be the class name of the abstract document. - + .. versionadded:: 0.5 .. versionchanged:: 0.8 added `value_decorator` """ @@ -1715,10 +1768,10 @@ class SequenceField(BaseField): Generate and Increment the counter """ sequence_name = self.get_sequence_name() - sequence_id = "%s.%s" % (sequence_name, self.name) + sequence_id = '%s.%s' % (sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] - counter = collection.find_and_modify(query={"_id": sequence_id}, - update={"$inc": {"next": 1}}, + counter = collection.find_and_modify(query={'_id': sequence_id}, + update={'$inc': {'next': 1}}, new=True, upsert=True) return self.value_decorator(counter['next']) @@ -1741,9 +1794,9 @@ class SequenceField(BaseField): as it is only fixed on set. """ sequence_name = self.get_sequence_name() - sequence_id = "%s.%s" % (sequence_name, self.name) + sequence_id = '%s.%s' % (sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] - data = collection.find_one({"_id": sequence_id}) + data = collection.find_one({'_id': sequence_id}) if data: return self.value_decorator(data['next'] + 1) @@ -1813,17 +1866,17 @@ class UUIDField(BaseField): if not self._binary: original_value = value try: - if not isinstance(value, basestring): - value = unicode(value) + if not isinstance(value, six.string_types): + value = six.text_type(value) return uuid.UUID(value) - except: + except Exception: return original_value return value def to_mongo(self, value): if not self._binary: - return unicode(value) - elif isinstance(value, basestring): + return six.text_type(value) + elif isinstance(value, six.string_types): return uuid.UUID(value) return value @@ -1834,11 +1887,11 @@ class UUIDField(BaseField): def validate(self, value): if not isinstance(value, uuid.UUID): - if not isinstance(value, basestring): + if not isinstance(value, six.string_types): value = str(value) try: uuid.UUID(value) - except Exception, exc: + except Exception as exc: self.error('Could not convert to UUID: %s' % exc) @@ -1856,19 +1909,18 @@ class GeoPointField(BaseField): _geo_index = pymongo.GEO2D def validate(self, value): - """Make sure that a geo-value is of type (x, y) - """ + """Make sure that a geo-value is of type (x, y)""" if not isinstance(value, (list, tuple)): self.error('GeoPointField can only accept tuples or lists ' 'of (x, y)') if not len(value) == 2: - self.error("Value (%s) must be a two-dimensional point" % + self.error('Value (%s) must be a two-dimensional point' % repr(value)) elif (not isinstance(value[0], (float, int)) or not isinstance(value[1], (float, int))): self.error( - "Both values (%s) in point must be float or int" % repr(value)) + 'Both values (%s) in point must be float or int' % repr(value)) class PointField(GeoJsonBaseField): @@ -1878,8 +1930,8 @@ class PointField(GeoJsonBaseField): .. code-block:: js - { "type" : "Point" , - "coordinates" : [x, y]} + {'type' : 'Point' , + 'coordinates' : [x, y]} You can either pass a dict with the full information or a list to set the value. @@ -1888,7 +1940,7 @@ class PointField(GeoJsonBaseField): .. versionadded:: 0.8 """ - _type = "Point" + _type = 'Point' class LineStringField(GeoJsonBaseField): @@ -1898,8 +1950,8 @@ class LineStringField(GeoJsonBaseField): .. code-block:: js - { "type" : "LineString" , - "coordinates" : [[x1, y1], [x1, y1] ... [xn, yn]]} + {'type' : 'LineString' , + 'coordinates' : [[x1, y1], [x1, y1] ... [xn, yn]]} You can either pass a dict with the full information or a list of points. @@ -1907,7 +1959,7 @@ class LineStringField(GeoJsonBaseField): .. versionadded:: 0.8 """ - _type = "LineString" + _type = 'LineString' class PolygonField(GeoJsonBaseField): @@ -1917,9 +1969,9 @@ class PolygonField(GeoJsonBaseField): .. code-block:: js - { "type" : "Polygon" , - "coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]], - [[x1, y1], [x1, y1] ... [xn, yn]]} + {'type' : 'Polygon' , + 'coordinates' : [[[x1, y1], [x1, y1] ... [xn, yn]], + [[x1, y1], [x1, y1] ... [xn, yn]]} You can either pass a dict with the full information or a list of LineStrings. The first LineString being the outside and the rest being @@ -1929,7 +1981,7 @@ class PolygonField(GeoJsonBaseField): .. versionadded:: 0.8 """ - _type = "Polygon" + _type = 'Polygon' class MultiPointField(GeoJsonBaseField): @@ -1939,8 +1991,8 @@ class MultiPointField(GeoJsonBaseField): .. code-block:: js - { "type" : "MultiPoint" , - "coordinates" : [[x1, y1], [x2, y2]]} + {'type' : 'MultiPoint' , + 'coordinates' : [[x1, y1], [x2, y2]]} You can either pass a dict with the full information or a list to set the value. @@ -1949,7 +2001,7 @@ class MultiPointField(GeoJsonBaseField): .. versionadded:: 0.9 """ - _type = "MultiPoint" + _type = 'MultiPoint' class MultiLineStringField(GeoJsonBaseField): @@ -1959,9 +2011,9 @@ class MultiLineStringField(GeoJsonBaseField): .. code-block:: js - { "type" : "MultiLineString" , - "coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]], - [[x1, y1], [x1, y1] ... [xn, yn]]]} + {'type' : 'MultiLineString' , + 'coordinates' : [[[x1, y1], [x1, y1] ... [xn, yn]], + [[x1, y1], [x1, y1] ... [xn, yn]]]} You can either pass a dict with the full information or a list of points. @@ -1969,7 +2021,7 @@ class MultiLineStringField(GeoJsonBaseField): .. versionadded:: 0.9 """ - _type = "MultiLineString" + _type = 'MultiLineString' class MultiPolygonField(GeoJsonBaseField): @@ -1979,14 +2031,14 @@ class MultiPolygonField(GeoJsonBaseField): .. code-block:: js - { "type" : "MultiPolygon" , - "coordinates" : [[ - [[x1, y1], [x1, y1] ... [xn, yn]], - [[x1, y1], [x1, y1] ... [xn, yn]] - ], [ - [[x1, y1], [x1, y1] ... [xn, yn]], - [[x1, y1], [x1, y1] ... [xn, yn]] - ] + {'type' : 'MultiPolygon' , + 'coordinates' : [[ + [[x1, y1], [x1, y1] ... [xn, yn]], + [[x1, y1], [x1, y1] ... [xn, yn]] + ], [ + [[x1, y1], [x1, y1] ... [xn, yn]], + [[x1, y1], [x1, y1] ... [xn, yn]] + ] } You can either pass a dict with the full information or a list @@ -1996,4 +2048,4 @@ class MultiPolygonField(GeoJsonBaseField): .. versionadded:: 0.9 """ - _type = "MultiPolygon" + _type = 'MultiPolygon' diff --git a/mongoengine/python_support.py b/mongoengine/python_support.py index 5bb9038d..e51e1bc9 100644 --- a/mongoengine/python_support.py +++ b/mongoengine/python_support.py @@ -1,7 +1,9 @@ -"""Helper functions and types to aid with Python 2.5 - 3 support.""" - -import sys +""" +Helper functions, constants, and types to aid with Python v2.7 - v3.x and +PyMongo v2.7 - v3.x support. +""" import pymongo +import six if pymongo.version_tuple[0] < 3: @@ -9,29 +11,15 @@ if pymongo.version_tuple[0] < 3: else: IS_PYMONGO_3 = True -PY3 = sys.version_info[0] == 3 -if PY3: - import codecs - from io import BytesIO as StringIO +# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. +StringIO = six.BytesIO - # return s converted to binary. b('test') should be equivalent to b'test' - def b(s): - return codecs.latin_1_encode(s)[0] - - bin_type = bytes - txt_type = str -else: +# Additionally for Py2, try to use the faster cStringIO, if available +if not six.PY3: try: - from cStringIO import StringIO + import cStringIO except ImportError: - from StringIO import StringIO - - # Conversion to binary only necessary in Python 3 - def b(s): - return s - - bin_type = str - txt_type = unicode - -str_types = (bin_type, txt_type) + pass + else: + StringIO = cStringIO.StringIO diff --git a/mongoengine/queryset/__init__.py b/mongoengine/queryset/__init__.py index 026a7acd..5219c39e 100644 --- a/mongoengine/queryset/__init__.py +++ b/mongoengine/queryset/__init__.py @@ -1,11 +1,17 @@ -from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned, - InvalidQueryError, OperationError, - NotUniqueError) +from mongoengine.errors import * from mongoengine.queryset.field_list import * from mongoengine.queryset.manager import * from mongoengine.queryset.queryset import * from mongoengine.queryset.transform import * from mongoengine.queryset.visitor import * -__all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ + - transform.__all__ + visitor.__all__) +# Expose just the public subset of all imported objects and constants. +__all__ = ( + 'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager', + 'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL', + + # Errors that might be related to a queryset, mostly here for backward + # compatibility + 'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned', + 'NotUniqueError', 'OperationError', +) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 0e183889..7e485686 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -7,20 +7,20 @@ import pprint import re import warnings -from bson import SON +from bson import SON, json_util from bson.code import Code -from bson import json_util import pymongo import pymongo.errors from pymongo.common import validate_read_preference +import six from mongoengine import signals +from mongoengine.base import get_document +from mongoengine.common import _import_class from mongoengine.connection import get_db from mongoengine.context_managers import switch_db -from mongoengine.common import _import_class -from mongoengine.base.common import get_document -from mongoengine.errors import (OperationError, NotUniqueError, - InvalidQueryError, LookUpError) +from mongoengine.errors import (InvalidQueryError, LookUpError, + NotUniqueError, OperationError) from mongoengine.python_support import IS_PYMONGO_3 from mongoengine.queryset import transform from mongoengine.queryset.field_list import QueryFieldList @@ -74,17 +74,19 @@ class BaseQuerySet(object): # subclasses of the class being used if document._meta.get('allow_inheritance') is True: if len(self._document._subclasses) == 1: - self._initial_query = {"_cls": self._document._subclasses[0]} + self._initial_query = {'_cls': self._document._subclasses[0]} else: self._initial_query = { - "_cls": {"$in": self._document._subclasses}} + '_cls': {'$in': self._document._subclasses}} self._loaded_fields = QueryFieldList(always_include=['_cls']) self._cursor_obj = None self._limit = None self._skip = None self._hint = -1 # Using -1 as None is a valid value for hint + self._batch_size = None self.only_fields = [] self._max_time_ms = None + self._comment = None def __call__(self, q_obj=None, class_check=True, read_preference=None, **query): @@ -105,8 +107,8 @@ class BaseQuerySet(object): if q_obj: # make sure proper query object is passed if not isinstance(q_obj, QNode): - msg = ("Not a query object: %s. " - "Did you intend to use key=value?" % q_obj) + msg = ('Not a query object: %s. ' + 'Did you intend to use key=value?' % q_obj) raise InvalidQueryError(msg) query &= q_obj @@ -123,9 +125,40 @@ class BaseQuerySet(object): return queryset - def __getitem__(self, key): - """Support skip and limit using getitem and slicing syntax. + def __getstate__(self): """ + Need for pickling queryset + + See https://github.com/MongoEngine/mongoengine/issues/442 + """ + + obj_dict = self.__dict__.copy() + + # don't picke collection, instead pickle collection params + obj_dict.pop('_collection_obj') + + # don't pickle cursor + obj_dict['_cursor_obj'] = None + + return obj_dict + + def __setstate__(self, obj_dict): + """ + Need for pickling queryset + + See https://github.com/MongoEngine/mongoengine/issues/442 + """ + + obj_dict['_collection_obj'] = obj_dict['_document']._get_collection() + + # update attributes + self.__dict__.update(obj_dict) + + # forse load cursor + # self._cursor + + def __getitem__(self, key): + """Support skip and limit using getitem and slicing syntax.""" queryset = self.clone() # Slice provided @@ -135,7 +168,7 @@ class BaseQuerySet(object): queryset._skip, queryset._limit = key.start, key.stop if key.start and key.stop: queryset._limit = key.stop - key.start - except IndexError, err: + except IndexError as err: # PyMongo raises an error if key.start == key.stop, catch it, # bin it, kill it. start = key.start or 0 @@ -168,19 +201,16 @@ class BaseQuerySet(object): raise NotImplementedError def _has_data(self): - """ Retrieves whether cursor has any data. """ - + """Return True if cursor has any data.""" queryset = self.order_by() return False if queryset.first() is None else True def __nonzero__(self): - """ Avoid to open all records in an if stmt in Py2. """ - + """Avoid to open all records in an if stmt in Py2.""" return self._has_data() def __bool__(self): - """ Avoid to open all records in an if stmt in Py3. """ - + """Avoid to open all records in an if stmt in Py3.""" return self._has_data() # Core functions @@ -208,7 +238,7 @@ class BaseQuerySet(object): queryset = self.clone() if queryset._search_text: raise OperationError( - "It is not possible to use search_text two times.") + 'It is not possible to use search_text two times.') query_kwargs = SON({'$search': text}) if language: @@ -237,7 +267,7 @@ class BaseQuerySet(object): try: result = queryset.next() except StopIteration: - msg = ("%s matching query does not exist." + msg = ('%s matching query does not exist.' % queryset._document._class_name) raise queryset._document.DoesNotExist(msg) try: @@ -245,6 +275,8 @@ class BaseQuerySet(object): except StopIteration: return result + # If we were able to retrieve the 2nd doc, rewind the cursor and + # raise the MultipleObjectsReturned exception. queryset.rewind() message = u'%d items returned, instead of 1' % queryset.count() raise queryset._document.MultipleObjectsReturned(message) @@ -257,8 +289,7 @@ class BaseQuerySet(object): return self._document(**kwargs).save() def first(self): - """Retrieve the first object matching the query. - """ + """Retrieve the first object matching the query.""" queryset = self.clone() try: result = queryset[0] @@ -266,7 +297,8 @@ class BaseQuerySet(object): result = None return result - def insert(self, doc_or_docs, load_bulk=True, write_concern=None): + def insert(self, doc_or_docs, load_bulk=True, + write_concern=None, signal_kwargs=None): """bulk insert documents :param doc_or_docs: a document or list of documents to be inserted @@ -279,11 +311,15 @@ class BaseQuerySet(object): ``insert(..., {w: 2, fsync: True})`` will wait until at least two servers have recorded the write and will force an fsync on each server being written to. + :parm signal_kwargs: (optional) kwargs dictionary to be passed to + the signal calls. By default returns document instances, set ``load_bulk`` to False to return just ``ObjectIds`` .. versionadded:: 0.5 + .. versionchanged:: 0.10.7 + Add signal_kwargs argument """ Document = _import_class('Document') @@ -296,35 +332,37 @@ class BaseQuerySet(object): return_one = True docs = [docs] - raw = [] for doc in docs: if not isinstance(doc, self._document): msg = ("Some documents inserted aren't instances of %s" % str(self._document)) raise OperationError(msg) if doc.pk and not doc._created: - msg = "Some documents have ObjectIds use doc.update() instead" + msg = 'Some documents have ObjectIds use doc.update() instead' raise OperationError(msg) - raw.append(doc.to_mongo()) - signals.pre_bulk_insert.send(self._document, documents=docs) + signal_kwargs = signal_kwargs or {} + signals.pre_bulk_insert.send(self._document, + documents=docs, **signal_kwargs) + + raw = [doc.to_mongo() for doc in docs] try: ids = self._collection.insert(raw, **write_concern) - except pymongo.errors.DuplicateKeyError, err: + except pymongo.errors.DuplicateKeyError as err: message = 'Could not save document (%s)' - raise NotUniqueError(message % unicode(err)) - except pymongo.errors.OperationFailure, err: + raise NotUniqueError(message % six.text_type(err)) + except pymongo.errors.OperationFailure as err: message = 'Could not save document (%s)' - if re.match('^E1100[01] duplicate key', unicode(err)): + if re.match('^E1100[01] duplicate key', six.text_type(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update message = u'Tried to save duplicate unique keys (%s)' - raise NotUniqueError(message % unicode(err)) - raise OperationError(message % unicode(err)) + raise NotUniqueError(message % six.text_type(err)) + raise OperationError(message % six.text_type(err)) if not load_bulk: signals.post_bulk_insert.send( - self._document, documents=docs, loaded=False) + self._document, documents=docs, loaded=False, **signal_kwargs) return return_one and ids[0] or ids documents = self.in_bulk(ids) @@ -332,7 +370,7 @@ class BaseQuerySet(object): for obj_id in ids: results.append(documents.get(obj_id)) signals.post_bulk_insert.send( - self._document, documents=results, loaded=True) + self._document, documents=results, loaded=True, **signal_kwargs) return return_one and results[0] or results def count(self, with_limit_and_skip=False): @@ -346,7 +384,8 @@ class BaseQuerySet(object): return 0 return self._cursor.count(with_limit_and_skip=with_limit_and_skip) - def delete(self, write_concern=None, _from_doc_delete=False, cascade_refs=None): + def delete(self, write_concern=None, _from_doc_delete=False, + cascade_refs=None): """Delete the documents matched by the query. :param write_concern: Extra keyword arguments are passed down which @@ -369,8 +408,9 @@ class BaseQuerySet(object): # Handle deletes where skips or limits have been applied or # there is an untriggered delete signal has_delete_signal = signals.signals_available and ( - signals.pre_delete.has_receivers_for(self._document) or - signals.post_delete.has_receivers_for(self._document)) + signals.pre_delete.has_receivers_for(doc) or + signals.post_delete.has_receivers_for(doc) + ) call_document_delete = (queryset._skip or queryset._limit or has_delete_signal) and not _from_doc_delete @@ -383,35 +423,44 @@ class BaseQuerySet(object): return cnt delete_rules = doc._meta.get('delete_rules') or {} + delete_rules = list(delete_rules.items()) + # Check for DENY rules before actually deleting/nullifying any other # references - for rule_entry in delete_rules: + for rule_entry, rule in delete_rules: document_cls, field_name = rule_entry if document_cls._meta.get('abstract'): continue - rule = doc._meta['delete_rules'][rule_entry] - if rule == DENY and document_cls.objects( - **{field_name + '__in': self}).count() > 0: - msg = ("Could not delete document (%s.%s refers to it)" - % (document_cls.__name__, field_name)) - raise OperationError(msg) - for rule_entry in delete_rules: + if rule == DENY: + refs = document_cls.objects(**{field_name + '__in': self}) + if refs.limit(1).count() > 0: + raise OperationError( + 'Could not delete document (%s.%s refers to it)' + % (document_cls.__name__, field_name) + ) + + # Check all the other rules + for rule_entry, rule in delete_rules: document_cls, field_name = rule_entry if document_cls._meta.get('abstract'): continue - rule = doc._meta['delete_rules'][rule_entry] + if rule == CASCADE: cascade_refs = set() if cascade_refs is None else cascade_refs - for ref in queryset: - cascade_refs.add(ref.id) - ref_q = document_cls.objects(**{field_name + '__in': self, 'id__nin': cascade_refs}) - ref_q_count = ref_q.count() - if ref_q_count > 0: - ref_q.delete(write_concern=write_concern, cascade_refs=cascade_refs) + # Handle recursive reference + if doc._collection == document_cls._collection: + for ref in queryset: + cascade_refs.add(ref.id) + refs = document_cls.objects(**{field_name + '__in': self, + 'pk__nin': cascade_refs}) + if refs.count() > 0: + refs.delete(write_concern=write_concern, + cascade_refs=cascade_refs) elif rule == NULLIFY: document_cls.objects(**{field_name + '__in': self}).update( - write_concern=write_concern, **{'unset__%s' % field_name: 1}) + write_concern=write_concern, + **{'unset__%s' % field_name: 1}) elif rule == PULL: document_cls.objects(**{field_name + '__in': self}).update( write_concern=write_concern, @@ -419,13 +468,13 @@ class BaseQuerySet(object): result = queryset._collection.remove(queryset._query, **write_concern) if result: - return result.get("n") + return result.get('n') def update(self, upsert=False, multi=True, write_concern=None, full_result=False, **update): """Perform an atomic update on the fields matched by the query. - :param upsert: Any existing document with that "_id" is overwritten. + :param upsert: insert if document doesn't exist (default ``False``) :param multi: Update multiple documents. :param write_concern: Extra keyword arguments are passed down which will be used as options for the resultant @@ -440,7 +489,7 @@ class BaseQuerySet(object): .. versionadded:: 0.2 """ if not update and not upsert: - raise OperationError("No update parameters, would remove data") + raise OperationError('No update parameters, would remove data') if write_concern is None: write_concern = {} @@ -453,9 +502,9 @@ class BaseQuerySet(object): # then ensure we add _cls to the update operation if upsert and '_cls' in query: if '$set' in update: - update["$set"]["_cls"] = queryset._document._class_name + update['$set']['_cls'] = queryset._document._class_name else: - update["$set"] = {"_cls": queryset._document._class_name} + update['$set'] = {'_cls': queryset._document._class_name} try: result = queryset._collection.update(query, update, multi=multi, upsert=upsert, **write_concern) @@ -463,18 +512,45 @@ class BaseQuerySet(object): return result elif result: return result['n'] - except pymongo.errors.DuplicateKeyError, err: - raise NotUniqueError(u'Update failed (%s)' % unicode(err)) - except pymongo.errors.OperationFailure, err: - if unicode(err) == u'multi not coded yet': + except pymongo.errors.DuplicateKeyError as err: + raise NotUniqueError(u'Update failed (%s)' % six.text_type(err)) + except pymongo.errors.OperationFailure as err: + if six.text_type(err) == u'multi not coded yet': message = u'update() method requires MongoDB 1.1.3+' raise OperationError(message) - raise OperationError(u'Update failed (%s)' % unicode(err)) + raise OperationError(u'Update failed (%s)' % six.text_type(err)) + + def upsert_one(self, write_concern=None, **update): + """Overwrite or add the first document matched by the query. + + :param write_concern: Extra keyword arguments are passed down which + will be used as options for the resultant + ``getLastError`` command. For example, + ``save(..., write_concern={w: 2, fsync: True}, ...)`` will + wait until at least two servers have recorded the write and + will force an fsync on the primary server. + :param update: Django-style update keyword arguments + + :returns the new or overwritten document + + .. versionadded:: 0.10.2 + """ + + atomic_update = self.update(multi=False, upsert=True, + write_concern=write_concern, + full_result=True, **update) + + if atomic_update['updatedExisting']: + document = self.get() + else: + document = self._document.objects.with_id(atomic_update['upserted']) + return document def update_one(self, upsert=False, write_concern=None, **update): - """Perform an atomic update on first field matched by the query. + """Perform an atomic update on the fields of the first document + matched by the query. - :param upsert: Any existing document with that "_id" is overwritten. + :param upsert: insert if document doesn't exist (default ``False``) :param write_concern: Extra keyword arguments are passed down which will be used as options for the resultant ``getLastError`` command. For example, @@ -513,11 +589,11 @@ class BaseQuerySet(object): """ if remove and new: - raise OperationError("Conflicting parameters: remove and new") + raise OperationError('Conflicting parameters: remove and new') if not update and not upsert and not remove: raise OperationError( - "No update parameters, must either update or remove") + 'No update parameters, must either update or remove') queryset = self.clone() query = queryset._query @@ -528,7 +604,7 @@ class BaseQuerySet(object): try: if IS_PYMONGO_3: if full_response: - msg = "With PyMongo 3+, it is not possible anymore to get the full response." + msg = 'With PyMongo 3+, it is not possible anymore to get the full response.' warnings.warn(msg, DeprecationWarning) if remove: result = queryset._collection.find_one_and_delete( @@ -546,14 +622,14 @@ class BaseQuerySet(object): result = queryset._collection.find_and_modify( query, update, upsert=upsert, sort=sort, remove=remove, new=new, full_response=full_response, **self._cursor_args) - except pymongo.errors.DuplicateKeyError, err: - raise NotUniqueError(u"Update failed (%s)" % err) - except pymongo.errors.OperationFailure, err: - raise OperationError(u"Update failed (%s)" % err) + except pymongo.errors.DuplicateKeyError as err: + raise NotUniqueError(u'Update failed (%s)' % err) + except pymongo.errors.OperationFailure as err: + raise OperationError(u'Update failed (%s)' % err) if full_response: - if result["value"] is not None: - result["value"] = self._document._from_son(result["value"], only_fields=self.only_fields) + if result['value'] is not None: + result['value'] = self._document._from_son(result['value'], only_fields=self.only_fields) else: if result is not None: result = self._document._from_son(result, only_fields=self.only_fields) @@ -571,7 +647,7 @@ class BaseQuerySet(object): """ queryset = self.clone() if not queryset._query_obj.empty: - msg = "Cannot use a filter whilst using `with_id`" + msg = 'Cannot use a filter whilst using `with_id`' raise InvalidQueryError(msg) return queryset.filter(pk=object_id).first() @@ -615,7 +691,7 @@ class BaseQuerySet(object): Only return instances of this document and not any inherited documents """ if self._document._meta.get('allow_inheritance') is True: - self._initial_query = {"_cls": self._document._class_name} + self._initial_query = {'_cls': self._document._class_name} return self @@ -631,39 +707,36 @@ class BaseQuerySet(object): with switch_db(self._document, alias) as cls: collection = cls._get_collection() - return self.clone_into(self.__class__(self._document, collection)) + return self._clone_into(self.__class__(self._document, collection)) def clone(self): - """Creates a copy of the current - :class:`~mongoengine.queryset.QuerySet` + """Create a copy of the current queryset.""" + return self._clone_into(self.__class__(self._document, self._collection_obj)) - .. versionadded:: 0.5 + def _clone_into(self, new_qs): + """Copy all of the relevant properties of this queryset to + a new queryset (which has to be an instance of + :class:`~mongoengine.queryset.base.BaseQuerySet`). """ - return self.clone_into(self.__class__(self._document, self._collection_obj)) - - def clone_into(self, cls): - """Creates a copy of the current - :class:`~mongoengine.queryset.base.BaseQuerySet` into another child class - """ - if not isinstance(cls, BaseQuerySet): + if not isinstance(new_qs, BaseQuerySet): raise OperationError( - '%s is not a subclass of BaseQuerySet' % cls.__name__) + '%s is not a subclass of BaseQuerySet' % new_qs.__name__) copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', '_where_clause', '_loaded_fields', '_ordering', '_snapshot', '_timeout', '_class_check', '_slave_okay', '_read_preference', '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', '_limit', '_skip', '_hint', '_auto_dereference', - '_search_text', 'only_fields', '_max_time_ms') + '_search_text', 'only_fields', '_max_time_ms', '_comment') for prop in copy_props: val = getattr(self, prop) - setattr(cls, prop, copy.copy(val)) + setattr(new_qs, prop, copy.copy(val)) if self._cursor_obj: - cls._cursor_obj = self._cursor_obj.clone() + new_qs._cursor_obj = self._cursor_obj.clone() - return cls + return new_qs def select_related(self, max_depth=1): """Handles dereferencing of :class:`~bson.dbref.DBRef` objects or @@ -685,7 +758,11 @@ class BaseQuerySet(object): """ queryset = self.clone() queryset._limit = n if n != 0 else 1 - # Return self to allow chaining + + # If a cursor object has already been created, apply the limit to it. + if queryset._cursor_obj: + queryset._cursor_obj.limit(queryset._limit) + return queryset def skip(self, n): @@ -696,6 +773,11 @@ class BaseQuerySet(object): """ queryset = self.clone() queryset._skip = n + + # If a cursor object has already been created, apply the skip to it. + if queryset._cursor_obj: + queryset._cursor_obj.skip(queryset._skip) + return queryset def hint(self, index=None): @@ -713,6 +795,29 @@ class BaseQuerySet(object): """ queryset = self.clone() queryset._hint = index + + # If a cursor object has already been created, apply the hint to it. + if queryset._cursor_obj: + queryset._cursor_obj.hint(queryset._hint) + + return queryset + + def batch_size(self, size): + """Limit the number of documents returned in a single batch (each + batch requires a round trip to the server). + + See http://api.mongodb.com/python/current/api/pymongo/cursor.html#pymongo.cursor.Cursor.batch_size + for details. + + :param size: desired size of each batch. + """ + queryset = self.clone() + queryset._batch_size = size + + # If a cursor object has already been created, apply the batch size to it. + if queryset._cursor_obj: + queryset._cursor_obj.batch_size(queryset._batch_size) + return queryset def distinct(self, field): @@ -728,49 +833,56 @@ class BaseQuerySet(object): .. versionchanged:: 0.6 - Improved db_field refrence handling """ queryset = self.clone() + try: field = self._fields_to_dbfields([field]).pop() - finally: - distinct = self._dereference(queryset._cursor.distinct(field), 1, - name=field, instance=self._document) + except LookUpError: + pass - doc_field = self._document._fields.get(field.split('.', 1)[0]) - instance = False - # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) - EmbeddedDocumentField = _import_class('EmbeddedDocumentField') - ListField = _import_class('ListField') - GenericEmbeddedDocumentField = _import_class('GenericEmbeddedDocumentField') - if isinstance(doc_field, ListField): - doc_field = getattr(doc_field, "field", doc_field) - if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): - instance = getattr(doc_field, "document_type", False) - # handle distinct on subdocuments - if '.' in field: - for field_part in field.split('.')[1:]: - # if looping on embedded document, get the document type instance - if instance and isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): - doc_field = instance - # now get the subdocument - doc_field = getattr(doc_field, field_part, doc_field) - # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) - if isinstance(doc_field, ListField): - doc_field = getattr(doc_field, "field", doc_field) - if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): - instance = getattr(doc_field, "document_type", False) - if instance and isinstance(doc_field, (EmbeddedDocumentField, - GenericEmbeddedDocumentField)): - distinct = [instance(**doc) for doc in distinct] - return distinct + distinct = self._dereference(queryset._cursor.distinct(field), 1, + name=field, instance=self._document) + + doc_field = self._document._fields.get(field.split('.', 1)[0]) + instance = None + + # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) + EmbeddedDocumentField = _import_class('EmbeddedDocumentField') + ListField = _import_class('ListField') + GenericEmbeddedDocumentField = _import_class('GenericEmbeddedDocumentField') + if isinstance(doc_field, ListField): + doc_field = getattr(doc_field, 'field', doc_field) + if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): + instance = getattr(doc_field, 'document_type', None) + + # handle distinct on subdocuments + if '.' in field: + for field_part in field.split('.')[1:]: + # if looping on embedded document, get the document type instance + if instance and isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): + doc_field = instance + # now get the subdocument + doc_field = getattr(doc_field, field_part, doc_field) + # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) + if isinstance(doc_field, ListField): + doc_field = getattr(doc_field, 'field', doc_field) + if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): + instance = getattr(doc_field, 'document_type', None) + + if instance and isinstance(doc_field, (EmbeddedDocumentField, + GenericEmbeddedDocumentField)): + distinct = [instance(**doc) for doc in distinct] + + return distinct def only(self, *fields): """Load only a subset of this document's fields. :: - post = BlogPost.objects(...).only("title", "author.name") + post = BlogPost.objects(...).only('title', 'author.name') .. note :: `only()` is chainable and will perform a union :: So with the following it will fetch both: `title` and `author.name`:: - post = BlogPost.objects.only("title").only("author.name") + post = BlogPost.objects.only('title').only('author.name') :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any field filters. @@ -780,19 +892,19 @@ class BaseQuerySet(object): .. versionadded:: 0.3 .. versionchanged:: 0.5 - Added subfield support """ - fields = dict([(f, QueryFieldList.ONLY) for f in fields]) + fields = {f: QueryFieldList.ONLY for f in fields} self.only_fields = fields.keys() return self.fields(True, **fields) def exclude(self, *fields): """Opposite to .only(), exclude some document's fields. :: - post = BlogPost.objects(...).exclude("comments") + post = BlogPost.objects(...).exclude('comments') .. note :: `exclude()` is chainable and will perform a union :: So with the following it will exclude both: `title` and `author.name`:: - post = BlogPost.objects.exclude("title").exclude("author.name") + post = BlogPost.objects.exclude('title').exclude('author.name') :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any field filters. @@ -801,28 +913,34 @@ class BaseQuerySet(object): .. versionadded:: 0.5 """ - fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields]) + fields = {f: QueryFieldList.EXCLUDE for f in fields} return self.fields(**fields) def fields(self, _only_called=False, **kwargs): - """Manipulate how you load this document's fields. Used by `.only()` - and `.exclude()` to manipulate which fields to retrieve. Fields also - allows for a greater level of control for example: + """Manipulate how you load this document's fields. Used by `.only()` + and `.exclude()` to manipulate which fields to retrieve. If called + directly, use a set of kwargs similar to the MongoDB projection + document. For example: - Retrieving a Subrange of Array Elements: + Include only a subset of fields: - You can use the $slice operator to retrieve a subrange of elements in - an array. For example to get the first 5 comments:: + posts = BlogPost.objects(...).fields(author=1, title=1) - post = BlogPost.objects(...).fields(slice__comments=5) + Exclude a specific field: - :param kwargs: A dictionary identifying what to include + posts = BlogPost.objects(...).fields(comments=0) + + To retrieve a subrange of array elements: + + posts = BlogPost.objects(...).fields(slice__comments=5) + + :param kwargs: A set keywors arguments identifying what to include. .. versionadded:: 0.5 """ # Check for an operator and transform to mongo-style if there is - operators = ["slice"] + operators = ['slice'] cleaned_fields = [] for key, value in kwargs.items(): parts = key.split('__') @@ -832,7 +950,20 @@ class BaseQuerySet(object): key = '.'.join(parts) cleaned_fields.append((key, value)) - fields = sorted(cleaned_fields, key=operator.itemgetter(1)) + # Sort fields by their values, explicitly excluded fields first, then + # explicitly included, and then more complicated operators such as + # $slice. + def _sort_key(field_tuple): + key, value = field_tuple + if isinstance(value, (int)): + return value # 0 for exclusion, 1 for inclusion + else: + return 2 # so that complex values appear last + + fields = sorted(cleaned_fields, key=_sort_key) + + # Clone the queryset, group all fields by their value, convert + # each of them to db_fields, and set the queryset's _loaded_fields queryset = self.clone() for value, group in itertools.groupby(fields, lambda x: x[1]): fields = [field for field, value in group] @@ -846,7 +977,7 @@ class BaseQuerySet(object): """Include all fields. Reset all previously calls of .only() or .exclude(). :: - post = BlogPost.objects.exclude("comments").all_fields() + post = BlogPost.objects.exclude('comments').all_fields() .. versionadded:: 0.5 """ @@ -858,15 +989,41 @@ class BaseQuerySet(object): def order_by(self, *keys): """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The order may be specified by prepending each of the keys by a + or a -. - Ascending order is assumed. + Ascending order is assumed. If no keys are passed, existing ordering + is cleared instead. :param keys: fields to order the query results by; keys may be prefixed with **+** or **-** to determine the ordering direction """ queryset = self.clone() - queryset._ordering = queryset._get_order_by(keys) + + old_ordering = queryset._ordering + new_ordering = queryset._get_order_by(keys) + + if queryset._cursor_obj: + + # If a cursor object has already been created, apply the sort to it + if new_ordering: + queryset._cursor_obj.sort(new_ordering) + + # If we're trying to clear a previous explicit ordering, we need + # to clear the cursor entirely (because PyMongo doesn't allow + # clearing an existing sort on a cursor). + elif old_ordering: + queryset._cursor_obj = None + + queryset._ordering = new_ordering + return queryset + def comment(self, text): + """Add a comment to the query. + + See https://docs.mongodb.com/manual/reference/method/cursor.comment/#cursor.comment + for details. + """ + return self._chainable_method('comment', text) + def explain(self, format=False): """Return an explain plan record for the :class:`~mongoengine.queryset.QuerySet`\ 's cursor. @@ -874,8 +1031,15 @@ class BaseQuerySet(object): :param format: format the plan before returning it """ plan = self._cursor.explain() + + # TODO remove this option completely - it's useless. If somebody + # wants to pretty-print the output, they easily can. if format: + msg = ('"format" param of BaseQuerySet.explain has been ' + 'deprecated and will be removed in future versions.') + warnings.warn(msg, DeprecationWarning) plan = pprint.pformat(plan) + return plan # DEPRECATED. Has no more impact on PyMongo 3+ @@ -888,7 +1052,7 @@ class BaseQuerySet(object): .. deprecated:: Ignored with PyMongo 3+ """ if IS_PYMONGO_3: - msg = "snapshot is deprecated as it has no impact when using PyMongo 3+." + msg = 'snapshot is deprecated as it has no impact when using PyMongo 3+.' warnings.warn(msg, DeprecationWarning) queryset = self.clone() queryset._snapshot = enabled @@ -914,7 +1078,7 @@ class BaseQuerySet(object): .. deprecated:: Ignored with PyMongo 3+ """ if IS_PYMONGO_3: - msg = "slave_okay is deprecated as it has no impact when using PyMongo 3+." + msg = 'slave_okay is deprecated as it has no impact when using PyMongo 3+.' warnings.warn(msg, DeprecationWarning) queryset = self.clone() queryset._slave_okay = enabled @@ -929,6 +1093,7 @@ class BaseQuerySet(object): validate_read_preference('read_preference', read_preference) queryset = self.clone() queryset._read_preference = read_preference + queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_preference return queryset def scalar(self, *fields): @@ -975,7 +1140,7 @@ class BaseQuerySet(object): :param ms: the number of milliseconds before killing the query on the server """ - return self._chainable_method("max_time_ms", ms) + return self._chainable_method('max_time_ms', ms) # JSON Helpers @@ -1058,19 +1223,19 @@ class BaseQuerySet(object): MapReduceDocument = _import_class('MapReduceDocument') - if not hasattr(self._collection, "map_reduce"): - raise NotImplementedError("Requires MongoDB >= 1.7.1") + if not hasattr(self._collection, 'map_reduce'): + raise NotImplementedError('Requires MongoDB >= 1.7.1') map_f_scope = {} if isinstance(map_f, Code): map_f_scope = map_f.scope - map_f = unicode(map_f) + map_f = six.text_type(map_f) map_f = Code(queryset._sub_js_fields(map_f), map_f_scope) reduce_f_scope = {} if isinstance(reduce_f, Code): reduce_f_scope = reduce_f.scope - reduce_f = unicode(reduce_f) + reduce_f = six.text_type(reduce_f) reduce_f_code = queryset._sub_js_fields(reduce_f) reduce_f = Code(reduce_f_code, reduce_f_scope) @@ -1080,7 +1245,7 @@ class BaseQuerySet(object): finalize_f_scope = {} if isinstance(finalize_f, Code): finalize_f_scope = finalize_f.scope - finalize_f = unicode(finalize_f) + finalize_f = six.text_type(finalize_f) finalize_f_code = queryset._sub_js_fields(finalize_f) finalize_f = Code(finalize_f_code, finalize_f_scope) mr_args['finalize'] = finalize_f @@ -1096,7 +1261,7 @@ class BaseQuerySet(object): else: map_reduce_function = 'map_reduce' - if isinstance(output, basestring): + if isinstance(output, six.string_types): mr_args['out'] = output elif isinstance(output, dict): @@ -1109,7 +1274,7 @@ class BaseQuerySet(object): break else: - raise OperationError("actionData not specified for output") + raise OperationError('actionData not specified for output') db_alias = output.get('db_alias') remaing_args = ['db', 'sharded', 'nonAtomic'] @@ -1201,66 +1366,29 @@ class BaseQuerySet(object): def sum(self, field): """Sum over the values of the specified field. - :param field: the field to sum over; use dot-notation to refer to + :param field: the field to sum over; use dot notation to refer to embedded document fields - - .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work - with sharding. """ - map_func = """ - function() { - var path = '{{~%(field)s}}'.split('.'), - field = this; - - for (p in path) { - if (typeof field != 'undefined') - field = field[path[p]]; - else - break; - } - - if (field && field.constructor == Array) { - field.forEach(function(item) { - emit(1, item||0); - }); - } else if (typeof field != 'undefined') { - emit(1, field||0); - } - } - """ % dict(field=field) - - reduce_func = Code(""" - function(key, values) { - var sum = 0; - for (var i in values) { - sum += values[i]; - } - return sum; - } - """) - - for result in self.map_reduce(map_func, reduce_func, output='inline'): - return result.value - else: - return 0 - - def aggregate_sum(self, field): - """Sum over the values of the specified field. - - :param field: the field to sum over; use dot-notation to refer to - embedded document fields - - This method is more performant than the regular `sum`, because it uses - the aggregation framework instead of map-reduce. - """ - result = self._document._get_collection().aggregate([ + db_field = self._fields_to_dbfields([field]).pop() + pipeline = [ {'$match': self._query}, - {'$group': {'_id': 'sum', 'total': {'$sum': '$' + field}}} - ]) + {'$group': {'_id': 'sum', 'total': {'$sum': '$' + db_field}}} + ] + + # if we're performing a sum over a list field, we sum up all the + # elements in the list, hence we need to $unwind the arrays first + ListField = _import_class('ListField') + field_parts = field.split('.') + field_instances = self._document._lookup_field(field_parts) + if isinstance(field_instances[-1], ListField): + pipeline.insert(1, {'$unwind': '$' + field}) + + result = self._document._get_collection().aggregate(pipeline) if IS_PYMONGO_3: - result = list(result) + result = tuple(result) else: result = result.get('result') + if result: return result[0]['total'] return 0 @@ -1268,73 +1396,27 @@ class BaseQuerySet(object): def average(self, field): """Average over the values of the specified field. - :param field: the field to average over; use dot-notation to refer to + :param field: the field to average over; use dot notation to refer to embedded document fields - - .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work - with sharding. """ - map_func = """ - function() { - var path = '{{~%(field)s}}'.split('.'), - field = this; - - for (p in path) { - if (typeof field != 'undefined') - field = field[path[p]]; - else - break; - } - - if (field && field.constructor == Array) { - field.forEach(function(item) { - emit(1, {t: item||0, c: 1}); - }); - } else if (typeof field != 'undefined') { - emit(1, {t: field||0, c: 1}); - } - } - """ % dict(field=field) - - reduce_func = Code(""" - function(key, values) { - var out = {t: 0, c: 0}; - for (var i in values) { - var value = values[i]; - out.t += value.t; - out.c += value.c; - } - return out; - } - """) - - finalize_func = Code(""" - function(key, value) { - return value.t / value.c; - } - """) - - for result in self.map_reduce(map_func, reduce_func, - finalize_f=finalize_func, output='inline'): - return result.value - else: - return 0 - - def aggregate_average(self, field): - """Average over the values of the specified field. - - :param field: the field to average over; use dot-notation to refer to - embedded document fields - - This method is more performant than the regular `average`, because it - uses the aggregation framework instead of map-reduce. - """ - result = self._document._get_collection().aggregate([ + db_field = self._fields_to_dbfields([field]).pop() + pipeline = [ {'$match': self._query}, - {'$group': {'_id': 'avg', 'total': {'$avg': '$' + field}}} - ]) + {'$group': {'_id': 'avg', 'total': {'$avg': '$' + db_field}}} + ] + + # if we're performing an average over a list field, we average out + # all the elements in the list, hence we need to $unwind the arrays + # first + ListField = _import_class('ListField') + field_parts = field.split('.') + field_instances = self._document._lookup_field(field_parts) + if isinstance(field_instances[-1], ListField): + pipeline.insert(1, {'$unwind': '$' + field}) + + result = self._document._get_collection().aggregate(pipeline) if IS_PYMONGO_3: - result = list(result) + result = tuple(result) else: result = result.get('result') if result: @@ -1351,7 +1433,7 @@ class BaseQuerySet(object): Can only do direct simple mappings and cannot map across :class:`~mongoengine.fields.ReferenceField` or :class:`~mongoengine.fields.GenericReferenceField` for more complex - counting a manual map reduce call would is required. + counting a manual map reduce call is required. If the field is a :class:`~mongoengine.fields.ListField`, the items within each list will be counted individually. @@ -1377,10 +1459,13 @@ class BaseQuerySet(object): raise StopIteration raw_doc = self._cursor.next() + if self._as_pymongo: return self._get_as_pymongo(raw_doc) - doc = self._document._from_son(raw_doc, - _auto_dereference=self._auto_dereference, only_fields=self.only_fields) + + doc = self._document._from_son( + raw_doc, _auto_dereference=self._auto_dereference, + only_fields=self.only_fields) if self._scalar: return self._get_scalar(doc) @@ -1390,7 +1475,6 @@ class BaseQuerySet(object): def rewind(self): """Rewind the cursor to its unevaluated state. - .. versionadded:: 0.3 """ self._iter = False @@ -1422,10 +1506,10 @@ class BaseQuerySet(object): # snapshot is not handled at all by PyMongo 3+ # TODO: evaluate similar possibilities using modifiers if self._snapshot: - msg = "The snapshot option is not anymore available with PyMongo 3+" + msg = 'The snapshot option is not anymore available with PyMongo 3+' warnings.warn(msg, DeprecationWarning) cursor_args = { - 'no_cursor_timeout': self._timeout + 'no_cursor_timeout': not self._timeout } if self._loaded_fields: cursor_args[fields_name] = self._loaded_fields.as_dict() @@ -1434,38 +1518,60 @@ class BaseQuerySet(object): if fields_name not in cursor_args: cursor_args[fields_name] = {} - cursor_args[fields_name]['_text_score'] = {'$meta': "textScore"} + cursor_args[fields_name]['_text_score'] = {'$meta': 'textScore'} return cursor_args @property def _cursor(self): - if self._cursor_obj is None: + """Return a PyMongo cursor object corresponding to this queryset.""" + # If _cursor_obj already exists, return it immediately. + if self._cursor_obj is not None: + return self._cursor_obj + + # Create a new PyMongo cursor. + # XXX In PyMongo 3+, we define the read preference on a collection + # level, not a cursor level. Thus, we need to get a cloned collection + # object using `with_options` first. + if IS_PYMONGO_3 and self._read_preference is not None: + self._cursor_obj = self._collection\ + .with_options(read_preference=self._read_preference)\ + .find(self._query, **self._cursor_args) + else: self._cursor_obj = self._collection.find(self._query, **self._cursor_args) - # Apply where clauses to cursor - if self._where_clause: - where_clause = self._sub_js_fields(self._where_clause) - self._cursor_obj.where(where_clause) + # Apply "where" clauses to cursor + if self._where_clause: + where_clause = self._sub_js_fields(self._where_clause) + self._cursor_obj.where(where_clause) - if self._ordering: - # Apply query ordering - self._cursor_obj.sort(self._ordering) - elif self._ordering is None and self._document._meta['ordering']: - # Otherwise, apply the ordering from the document model, unless - # it's been explicitly cleared via order_by with no arguments - order = self._get_order_by(self._document._meta['ordering']) - self._cursor_obj.sort(order) + # Apply ordering to the cursor. + # XXX self._ordering can be equal to: + # * None if we didn't explicitly call order_by on this queryset. + # * A list of PyMongo-style sorting tuples. + # * An empty list if we explicitly called order_by() without any + # arguments. This indicates that we want to clear the default + # ordering. + if self._ordering: + # explicit ordering + self._cursor_obj.sort(self._ordering) + elif self._ordering is None and self._document._meta['ordering']: + # default ordering + order = self._get_order_by(self._document._meta['ordering']) + self._cursor_obj.sort(order) - if self._limit is not None: - self._cursor_obj.limit(self._limit) + if self._limit is not None: + self._cursor_obj.limit(self._limit) - if self._skip is not None: - self._cursor_obj.skip(self._skip) + if self._skip is not None: + self._cursor_obj.skip(self._skip) - if self._hint != -1: - self._cursor_obj.hint(self._hint) + if self._hint != -1: + self._cursor_obj.hint(self._hint) + + if self._batch_size is not None: + self._cursor_obj.batch_size(self._batch_size) return self._cursor_obj @@ -1478,8 +1584,8 @@ class BaseQuerySet(object): if self._mongo_query is None: self._mongo_query = self._query_obj.to_query(self._document) if self._class_check and self._initial_query: - if "_cls" in self._mongo_query: - self._mongo_query = {"$and": [self._initial_query, self._mongo_query]} + if '_cls' in self._mongo_query: + self._mongo_query = {'$and': [self._initial_query, self._mongo_query]} else: self._mongo_query.update(self._initial_query) return self._mongo_query @@ -1491,8 +1597,7 @@ class BaseQuerySet(object): return self.__dereference def no_dereference(self): - """Turn off any dereferencing for the results of this queryset. - """ + """Turn off any dereferencing for the results of this queryset.""" queryset = self.clone() queryset._auto_dereference = False return queryset @@ -1521,7 +1626,7 @@ class BaseQuerySet(object): emit(null, 1); } } - """ % dict(field=field) + """ % {'field': field} reduce_func = """ function(key, values) { var total = 0; @@ -1543,8 +1648,8 @@ class BaseQuerySet(object): if normalize: count = sum(frequencies.values()) - frequencies = dict([(k, float(v) / count) - for k, v in frequencies.items()]) + frequencies = {k: float(v) / count + for k, v in frequencies.items()} return frequencies @@ -1596,10 +1701,10 @@ class BaseQuerySet(object): } """ total, data, types = self.exec_js(freq_func, field) - values = dict([(types.get(k), int(v)) for k, v in data.iteritems()]) + values = {types.get(k): int(v) for k, v in data.iteritems()} if normalize: - values = dict([(k, float(v) / total) for k, v in values.items()]) + values = {k: float(v) / total for k, v in values.items()} frequencies = {} for k, v in values.iteritems(): @@ -1621,14 +1726,14 @@ class BaseQuerySet(object): for x in document._subclasses][1:] for field in fields: try: - field = ".".join(f.db_field for f in + field = '.'.join(f.db_field for f in document._lookup_field(field.split('.'))) ret.append(field) - except LookUpError, err: + except LookUpError as err: found = False for subdoc in subclasses: try: - subfield = ".".join(f.db_field for f in + subfield = '.'.join(f.db_field for f in subdoc._lookup_field(field.split('.'))) ret.append(subfield) found = True @@ -1641,7 +1746,12 @@ class BaseQuerySet(object): return ret def _get_order_by(self, keys): - """Creates a list of order by fields + """Given a list of MongoEngine-style sort keys, return a list + of sorting tuples that can be applied to a PyMongo cursor. For + example: + + >>> qs._get_order_by(['-last_name', 'first_name']) + [('last_name', -1), ('first_name', 1)] """ key_list = [] for key in keys: @@ -1649,23 +1759,25 @@ class BaseQuerySet(object): continue if key == '$text_score': - key_list.append(('_text_score', {'$meta': "textScore"})) + key_list.append(('_text_score', {'$meta': 'textScore'})) continue direction = pymongo.ASCENDING if key[0] == '-': direction = pymongo.DESCENDING + if key[0] in ('-', '+'): key = key[1:] + key = key.replace('__', '.') try: key = self._document._translate_field_name(key) - except: + except Exception: + # TODO this exception should be more specific pass + key_list.append((key, direction)) - if self._cursor_obj and key_list: - self._cursor_obj.sort(key_list) return key_list def _get_scalar(self, doc): @@ -1721,7 +1833,7 @@ class BaseQuerySet(object): # If we need to coerce types, we need to determine the # type of this field and use the corresponding # .to_python(...) - from mongoengine.fields import EmbeddedDocumentField + EmbeddedDocumentField = _import_class('EmbeddedDocumentField') obj = self._document for chunk in path.split('.'): @@ -1755,7 +1867,7 @@ class BaseQuerySet(object): field_name = match.group(1).split('.') fields = self._document._lookup_field(field_name) # Substitute the correct name for the field into the javascript - return ".".join([f.db_field for f in fields]) + return '.'.join([f.db_field for f in fields]) code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, @@ -1763,24 +1875,35 @@ class BaseQuerySet(object): return code def _chainable_method(self, method_name, val): + """Call a particular method on the PyMongo cursor call a particular chainable method + with the provided value. + """ queryset = self.clone() - method = getattr(queryset._cursor, method_name) - method(val) - setattr(queryset, "_" + method_name, val) + + # Get an existing cursor object or create a new one + cursor = queryset._cursor + + # Find the requested method on the cursor and call it with the + # provided value + getattr(cursor, method_name)(val) + + # Cache the value on the queryset._{method_name} + setattr(queryset, '_' + method_name, val) + return queryset # Deprecated def ensure_index(self, **kwargs): """Deprecated use :func:`Document.ensure_index`""" - msg = ("Doc.objects()._ensure_index() is deprecated. " - "Use Doc.ensure_index() instead.") + msg = ('Doc.objects()._ensure_index() is deprecated. ' + 'Use Doc.ensure_index() instead.') warnings.warn(msg, DeprecationWarning) self._document.__class__.ensure_index(**kwargs) return self def _ensure_indexes(self): """Deprecated use :func:`~Document.ensure_indexes`""" - msg = ("Doc.objects()._ensure_indexes() is deprecated. " - "Use Doc.ensure_indexes() instead.") + msg = ('Doc.objects()._ensure_indexes() is deprecated. ' + 'Use Doc.ensure_indexes() instead.') warnings.warn(msg, DeprecationWarning) self._document.__class__.ensure_indexes() diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py index c10ad552..0524c3bb 100644 --- a/mongoengine/queryset/field_list.py +++ b/mongoengine/queryset/field_list.py @@ -67,7 +67,7 @@ class QueryFieldList(object): return bool(self.fields) def as_dict(self): - field_list = dict((field, self.value) for field in self.fields) + field_list = {field: self.value for field in self.fields} if self.slice: field_list.update(self.slice) if self._id is not None: diff --git a/mongoengine/queryset/manager.py b/mongoengine/queryset/manager.py index 47c2143d..199205e9 100644 --- a/mongoengine/queryset/manager.py +++ b/mongoengine/queryset/manager.py @@ -29,7 +29,7 @@ class QuerySetManager(object): Document.objects is accessed. """ if instance is not None: - # Document class being used rather than a document object + # Document object being used rather than a document class return self # owner is the document that contains the QuerySetManager diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 6e5f7220..b5d2765b 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1,6 +1,6 @@ from mongoengine.errors import OperationError -from mongoengine.queryset.base import (BaseQuerySet, DO_NOTHING, NULLIFY, - CASCADE, DENY, PULL) +from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING, + NULLIFY, PULL) __all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') @@ -27,9 +27,10 @@ class QuerySet(BaseQuerySet): in batches of ``ITER_CHUNK_SIZE``. If ``self._has_more`` the cursor hasn't been exhausted so cache then - batch. Otherwise iterate the result_cache. + batch. Otherwise iterate the result_cache. """ self._iter = True + if self._has_more: return self._iter_results() @@ -38,44 +39,60 @@ class QuerySet(BaseQuerySet): def __len__(self): """Since __len__ is called quite frequently (for example, as part of - list(qs) we populate the result cache and cache the length. + list(qs)), we populate the result cache and cache the length. """ if self._len is not None: return self._len + + # Populate the result cache with *all* of the docs in the cursor if self._has_more: - # populate the cache list(self._iter_results()) + # Cache the length of the complete result cache and return it self._len = len(self._result_cache) return self._len def __repr__(self): - """Provides the string representation of the QuerySet - """ + """Provide a string representation of the QuerySet""" if self._iter: return '.. queryset mid-iteration ..' self._populate_cache() data = self._result_cache[:REPR_OUTPUT_SIZE + 1] if len(data) > REPR_OUTPUT_SIZE: - data[-1] = "...(remaining elements truncated)..." + data[-1] = '...(remaining elements truncated)...' return repr(data) def _iter_results(self): """A generator for iterating over the result cache. - Also populates the cache if there are more possible results to yield. - Raises StopIteration when there are no more results""" + Also populates the cache if there are more possible results to + yield. Raises StopIteration when there are no more results. + """ if self._result_cache is None: self._result_cache = [] + pos = 0 while True: - upper = len(self._result_cache) - while pos < upper: + + # For all positions lower than the length of the current result + # cache, serve the docs straight from the cache w/o hitting the + # database. + # XXX it's VERY important to compute the len within the `while` + # condition because the result cache might expand mid-iteration + # (e.g. if we call len(qs) inside a loop that iterates over the + # queryset). Fortunately len(list) is O(1) in Python, so this + # doesn't cause performance issues. + while pos < len(self._result_cache): yield self._result_cache[pos] pos += 1 + + # Raise StopIteration if we already established there were no more + # docs in the db cursor. if not self._has_more: raise StopIteration + + # Otherwise, populate more of the cache and repeat. if len(self._result_cache) <= pos: self._populate_cache() @@ -86,12 +103,22 @@ class QuerySet(BaseQuerySet): """ if self._result_cache is None: self._result_cache = [] - if self._has_more: - try: - for i in xrange(ITER_CHUNK_SIZE): - self._result_cache.append(self.next()) - except StopIteration: - self._has_more = False + + # Skip populating the cache if we already established there are no + # more docs to pull from the database. + if not self._has_more: + return + + # Pull in ITER_CHUNK_SIZE docs from the database and store them in + # the result cache. + try: + for _ in xrange(ITER_CHUNK_SIZE): + self._result_cache.append(self.next()) + except StopIteration: + # Getting this exception means there are no more docs in the + # db cursor. Set _has_more to False so that we can use that + # information in other places. + self._has_more = False def count(self, with_limit_and_skip=False): """Count the selected elements in the query. @@ -109,13 +136,15 @@ class QuerySet(BaseQuerySet): return self._len def no_cache(self): - """Convert to a non_caching queryset + """Convert to a non-caching queryset .. versionadded:: 0.8.3 Convert to non caching queryset """ if self._result_cache is not None: - raise OperationError("QuerySet already cached") - return self.clone_into(QuerySetNoCache(self._document, self._collection)) + raise OperationError('QuerySet already cached') + + return self._clone_into(QuerySetNoCache(self._document, + self._collection)) class QuerySetNoCache(BaseQuerySet): @@ -126,7 +155,7 @@ class QuerySetNoCache(BaseQuerySet): .. versionadded:: 0.8.3 Convert to caching queryset """ - return self.clone_into(QuerySet(self._document, self._collection)) + return self._clone_into(QuerySet(self._document, self._collection)) def __repr__(self): """Provides the string representation of the QuerySet @@ -137,13 +166,14 @@ class QuerySetNoCache(BaseQuerySet): return '.. queryset mid-iteration ..' data = [] - for i in xrange(REPR_OUTPUT_SIZE + 1): + for _ in xrange(REPR_OUTPUT_SIZE + 1): try: data.append(self.next()) except StopIteration: break + if len(data) > REPR_OUTPUT_SIZE: - data[-1] = "...(remaining elements truncated)..." + data[-1] = '...(remaining elements truncated)...' self.rewind() return repr(data) diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 03f3acf0..bb04ee37 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -1,11 +1,13 @@ from collections import defaultdict +from bson import ObjectId, SON +from bson.dbref import DBRef import pymongo -from bson import SON +import six -from mongoengine.base.fields import UPDATE_OPERATORS -from mongoengine.connection import get_connection +from mongoengine.base import UPDATE_OPERATORS from mongoengine.common import _import_class +from mongoengine.connection import get_connection from mongoengine.errors import InvalidQueryError from mongoengine.python_support import IS_PYMONGO_3 @@ -26,13 +28,13 @@ MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS) -def query(_doc_cls=None, **query): - """Transform a query from Django-style format to Mongo format. - """ +# TODO make this less complex +def query(_doc_cls=None, **kwargs): + """Transform a query from Django-style format to Mongo format.""" mongo_query = {} merge_query = defaultdict(list) - for key, value in sorted(query.items()): - if key == "__raw__": + for key, value in sorted(kwargs.items()): + if key == '__raw__': mongo_query.update(value) continue @@ -44,8 +46,8 @@ def query(_doc_cls=None, **query): if len(parts) > 1 and parts[-1] in MATCH_OPERATORS: op = parts.pop() - # Allw to escape operator-like field name by __ - if len(parts) > 1 and parts[-1] == "": + # Allow to escape operator-like field name by __ + if len(parts) > 1 and parts[-1] == '': parts.pop() negate = False @@ -57,16 +59,17 @@ def query(_doc_cls=None, **query): # Switch field names to proper names [set in Field(name='foo')] try: fields = _doc_cls._lookup_field(parts) - except Exception, e: + except Exception as e: raise InvalidQueryError(e) parts = [] CachedReferenceField = _import_class('CachedReferenceField') + GenericReferenceField = _import_class('GenericReferenceField') cleaned_fields = [] for field in fields: append_field = True - if isinstance(field, basestring): + if isinstance(field, six.string_types): parts.append(field) append_field = False # is last and CachedReferenceField @@ -84,9 +87,9 @@ def query(_doc_cls=None, **query): singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] singular_ops += STRING_OPERATORS if op in singular_ops: - if isinstance(field, basestring): + if isinstance(field, six.string_types): if (op in STRING_OPERATORS and - isinstance(value, basestring)): + isinstance(value, six.string_types)): StringField = _import_class('StringField') value = StringField.prepare_query_value(op, value) else: @@ -98,20 +101,51 @@ def query(_doc_cls=None, **query): value = value['_id'] elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): - # 'in', 'nin' and 'all' require a list of values - value = [field.prepare_query_value(op, v) for v in value] + # Raise an error if the in/nin/all/near param is not iterable. We need a + # special check for BaseDocument, because - although it's iterable - using + # it as such in the context of this method is most definitely a mistake. + BaseDocument = _import_class('BaseDocument') + if isinstance(value, BaseDocument): + raise TypeError("When using the `in`, `nin`, `all`, or " + "`near`-operators you can\'t use a " + "`Document`, you must wrap your object " + "in a list (object -> [object]).") + elif not hasattr(value, '__iter__'): + raise TypeError("The `in`, `nin`, `all`, or " + "`near`-operators must be applied to an " + "iterable (e.g. a list).") + else: + value = [field.prepare_query_value(op, v) for v in value] + + # If we're querying a GenericReferenceField, we need to alter the + # key depending on the value: + # * If the value is a DBRef, the key should be "field_name._ref". + # * If the value is an ObjectId, the key should be "field_name._ref.$id". + if isinstance(field, GenericReferenceField): + if isinstance(value, DBRef): + parts[-1] += '._ref' + elif isinstance(value, ObjectId): + parts[-1] += '._ref.$id' # if op and op not in COMPARISON_OPERATORS: if op: if op in GEO_OPERATORS: value = _geo_operator(field, op, value) - elif op in CUSTOM_OPERATORS: - if op in ('elem_match', 'match'): - value = field.prepare_query_value(op, value) - value = {"$elemMatch": value} + elif op in ('match', 'elemMatch'): + ListField = _import_class('ListField') + EmbeddedDocumentField = _import_class('EmbeddedDocumentField') + if ( + isinstance(value, dict) and + isinstance(field, ListField) and + isinstance(field.field, EmbeddedDocumentField) + ): + value = query(field.field.document_type, **value) else: - NotImplementedError("Custom method '%s' has not " - "been implemented" % op) + value = field.prepare_query_value(op, value) + value = {'$elemMatch': value} + elif op in CUSTOM_OPERATORS: + NotImplementedError('Custom method "%s" has not ' + 'been implemented' % op) elif op not in STRING_OPERATORS: value = {'$' + op: value} @@ -120,11 +154,13 @@ def query(_doc_cls=None, **query): for i, part in indices: parts.insert(i, part) + key = '.'.join(parts) + if op is None or key not in mongo_query: mongo_query[key] = value elif key in mongo_query: - if key in mongo_query and isinstance(mongo_query[key], dict): + if isinstance(mongo_query[key], dict): mongo_query[key].update(value) # $max/minDistance needs to come last - convert to SON value_dict = mongo_query[key] @@ -174,15 +210,16 @@ def query(_doc_cls=None, **query): def update(_doc_cls=None, **update): - """Transform an update spec from Django-style format to Mongo format. + """Transform an update spec from Django-style format to Mongo + format. """ mongo_update = {} for key, value in update.items(): - if key == "__raw__": + if key == '__raw__': mongo_update.update(value) continue parts = key.split('__') - # if there is no operator, default to "set" + # if there is no operator, default to 'set' if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: parts.insert(0, 'set') # Check for an operator and transform to mongo-style if there is @@ -196,22 +233,25 @@ def update(_doc_cls=None, **update): # Support decrement by flipping a positive value's sign # and using 'inc' op = 'inc' - if value > 0: - value = -value + value = -value elif op == 'add_to_set': op = 'addToSet' elif op == 'set_on_insert': - op = "setOnInsert" + op = 'setOnInsert' match = None if parts[-1] in COMPARISON_OPERATORS: match = parts.pop() + # Allow to escape operator-like field name by __ + if len(parts) > 1 and parts[-1] == '': + parts.pop() + if _doc_cls: # Switch field names to proper names [set in Field(name='foo')] try: fields = _doc_cls._lookup_field(parts) - except Exception, e: + except Exception as e: raise InvalidQueryError(e) parts = [] @@ -219,7 +259,7 @@ def update(_doc_cls=None, **update): appended_sub_field = False for field in fields: append_field = True - if isinstance(field, basestring): + if isinstance(field, six.string_types): # Convert the S operator to $ if field == 'S': field = '$' @@ -240,7 +280,7 @@ def update(_doc_cls=None, **update): else: field = cleaned_fields[-1] - GeoJsonBaseField = _import_class("GeoJsonBaseField") + GeoJsonBaseField = _import_class('GeoJsonBaseField') if isinstance(field, GeoJsonBaseField): value = field.to_mongo(value) @@ -254,7 +294,7 @@ def update(_doc_cls=None, **update): value = [field.prepare_query_value(op, v) for v in value] elif field.required or value is not None: value = field.prepare_query_value(op, value) - elif op == "unset": + elif op == 'unset': value = 1 if match: @@ -264,16 +304,16 @@ def update(_doc_cls=None, **update): key = '.'.join(parts) if not op: - raise InvalidQueryError("Updates must supply an operation " - "eg: set__FIELD=value") + raise InvalidQueryError('Updates must supply an operation ' + 'eg: set__FIELD=value') if 'pull' in op and '.' in key: # Dot operators don't work on pull operations # unless they point to a list field # Otherwise it uses nested dict syntax if op == 'pullAll': - raise InvalidQueryError("pullAll operations only support " - "a single field depth") + raise InvalidQueryError('pullAll operations only support ' + 'a single field depth') # Look for the last list field and use dot notation until there field_classes = [c.__class__ for c in cleaned_fields] @@ -284,7 +324,7 @@ def update(_doc_cls=None, **update): # Then process as normal last_listField = len( cleaned_fields) - field_classes.index(ListField) - key = ".".join(parts[:last_listField]) + key = '.'.join(parts[:last_listField]) parts = parts[last_listField:] parts.insert(0, key) @@ -292,7 +332,7 @@ def update(_doc_cls=None, **update): for key in parts: value = {key: value} elif op == 'addToSet' and isinstance(value, list): - value = {key: {"$each": value}} + value = {key: {'$each': value}} else: value = {key: value} key = '$' + op @@ -306,74 +346,82 @@ def update(_doc_cls=None, **update): def _geo_operator(field, op, value): - """Helper to return the query for a given geo query""" - if op == "max_distance": + """Helper to return the query for a given geo query.""" + if op == 'max_distance': value = {'$maxDistance': value} - elif op == "min_distance": + elif op == 'min_distance': value = {'$minDistance': value} elif field._geo_index == pymongo.GEO2D: - if op == "within_distance": + if op == 'within_distance': value = {'$within': {'$center': value}} - elif op == "within_spherical_distance": + elif op == 'within_spherical_distance': value = {'$within': {'$centerSphere': value}} - elif op == "within_polygon": + elif op == 'within_polygon': value = {'$within': {'$polygon': value}} - elif op == "near": + elif op == 'near': value = {'$near': value} - elif op == "near_sphere": + elif op == 'near_sphere': value = {'$nearSphere': value} elif op == 'within_box': value = {'$within': {'$box': value}} else: - raise NotImplementedError("Geo method '%s' has not " - "been implemented for a GeoPointField" % op) + raise NotImplementedError('Geo method "%s" has not been ' + 'implemented for a GeoPointField' % op) else: - if op == "geo_within": - value = {"$geoWithin": _infer_geometry(value)} - elif op == "geo_within_box": - value = {"$geoWithin": {"$box": value}} - elif op == "geo_within_polygon": - value = {"$geoWithin": {"$polygon": value}} - elif op == "geo_within_center": - value = {"$geoWithin": {"$center": value}} - elif op == "geo_within_sphere": - value = {"$geoWithin": {"$centerSphere": value}} - elif op == "geo_intersects": - value = {"$geoIntersects": _infer_geometry(value)} - elif op == "near": + if op == 'geo_within': + value = {'$geoWithin': _infer_geometry(value)} + elif op == 'geo_within_box': + value = {'$geoWithin': {'$box': value}} + elif op == 'geo_within_polygon': + value = {'$geoWithin': {'$polygon': value}} + elif op == 'geo_within_center': + value = {'$geoWithin': {'$center': value}} + elif op == 'geo_within_sphere': + value = {'$geoWithin': {'$centerSphere': value}} + elif op == 'geo_intersects': + value = {'$geoIntersects': _infer_geometry(value)} + elif op == 'near': value = {'$near': _infer_geometry(value)} else: - raise NotImplementedError("Geo method '%s' has not " - "been implemented for a %s " % (op, field._name)) + raise NotImplementedError( + 'Geo method "%s" has not been implemented for a %s ' + % (op, field._name) + ) return value def _infer_geometry(value): - """Helper method that tries to infer the $geometry shape for a given value""" + """Helper method that tries to infer the $geometry shape for a + given value. + """ if isinstance(value, dict): - if "$geometry" in value: + if '$geometry' in value: return value elif 'coordinates' in value and 'type' in value: - return {"$geometry": value} - raise InvalidQueryError("Invalid $geometry dictionary should have " - "type and coordinates keys") + return {'$geometry': value} + raise InvalidQueryError('Invalid $geometry dictionary should have ' + 'type and coordinates keys') elif isinstance(value, (list, set)): # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? + # TODO: should both TypeError and IndexError be alike interpreted? + try: value[0][0][0] - return {"$geometry": {"type": "Polygon", "coordinates": value}} - except: - pass - try: - value[0][0] - return {"$geometry": {"type": "LineString", "coordinates": value}} - except: - pass - try: - value[0] - return {"$geometry": {"type": "Point", "coordinates": value}} - except: + return {'$geometry': {'type': 'Polygon', 'coordinates': value}} + except (TypeError, IndexError): pass - raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary " - "or (nested) lists of coordinate(s)") + try: + value[0][0] + return {'$geometry': {'type': 'LineString', 'coordinates': value}} + except (TypeError, IndexError): + pass + + try: + value[0] + return {'$geometry': {'type': 'Point', 'coordinates': value}} + except (TypeError, IndexError): + pass + + raise InvalidQueryError('Invalid $geometry data. Can be either a ' + 'dictionary or (nested) lists of coordinate(s)') diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 84365f56..bcf93a13 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -69,9 +69,9 @@ class QueryCompilerVisitor(QNodeVisitor): self.document = document def visit_combination(self, combination): - operator = "$and" + operator = '$and' if combination.operation == combination.OR: - operator = "$or" + operator = '$or' return {operator: combination.children} def visit_query(self, query): @@ -79,8 +79,7 @@ class QueryCompilerVisitor(QNodeVisitor): class QNode(object): - """Base class for nodes in query trees. - """ + """Base class for nodes in query trees.""" AND = 0 OR = 1 @@ -94,7 +93,8 @@ class QNode(object): raise NotImplementedError def _combine(self, other, operation): - """Combine this node with another node into a QCombination object. + """Combine this node with another node into a QCombination + object. """ if getattr(other, 'empty', True): return self @@ -116,8 +116,8 @@ class QNode(object): class QCombination(QNode): - """Represents the combination of several conditions by a given logical - operator. + """Represents the combination of several conditions by a given + logical operator. """ def __init__(self, operation, children): diff --git a/mongoengine/signals.py b/mongoengine/signals.py index 95532f03..a892dec0 100644 --- a/mongoengine/signals.py +++ b/mongoengine/signals.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- - -__all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation', - 'post_save', 'pre_delete', 'post_delete'] +__all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation', + 'post_save', 'pre_delete', 'post_delete') signals_available = False try: @@ -29,11 +27,12 @@ except ImportError: 'because the blinker library is ' 'not installed.') - send = lambda *a, **kw: None + send = lambda *a, **kw: None # noqa connect = disconnect = has_receivers_for = receivers_for = \ temporarily_connected_to = _fail del _fail + # the namespace for code signals. If you are not mongoengine code, do # not put signals in here. Create your own namespace instead. _signals = Namespace() diff --git a/requirements.txt b/requirements.txt index 03935868..854ed26d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,5 @@ -pymongo>=2.7.1 nose +pymongo>=2.7.1 +six==1.10.0 +flake8 +flake8-import-order diff --git a/setup.cfg b/setup.cfg index e59f0fe2..eabe3271 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,8 +1,11 @@ [nosetests] -rednose = 1 -verbosity = 2 -detailed-errors = 1 -cover-erase = 1 -cover-branches = 1 -cover-package = mongoengine -tests = tests +verbosity=2 +detailed-errors=1 +tests=tests +cover-package=mongoengine + +[flake8] +ignore=E501,F401,F403,F405,I201 +exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests +max-complexity=47 +application-import-names=mongoengine,tests diff --git a/setup.py b/setup.py index f2afa8ee..fa682d20 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ import os import sys -from setuptools import setup, find_packages +from setuptools import find_packages, setup # Hack to silence atexit traceback in newer python versions try: @@ -8,20 +8,25 @@ try: except ImportError: pass -DESCRIPTION = 'MongoEngine is a Python Object-Document ' + \ -'Mapper for working with MongoDB.' -LONG_DESCRIPTION = None +DESCRIPTION = ( + 'MongoEngine is a Python Object-Document ' + 'Mapper for working with MongoDB.' +) + try: - LONG_DESCRIPTION = open('README.rst').read() -except: - pass + with open('README.rst') as fin: + LONG_DESCRIPTION = fin.read() +except Exception: + LONG_DESCRIPTION = None def get_version(version_tuple): - if not isinstance(version_tuple[-1], int): - return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1] + """Return the version tuple as a string, e.g. for (0, 10, 7), + return '0.10.7'. + """ return '.'.join(map(str, version_tuple)) + # Dirty hack to get version number from monogengine/__init__.py - we can't # import it as it depends on PyMongo and PyMongo isn't installed until this # file is read @@ -37,48 +42,46 @@ CLASSIFIERS = [ 'Operating System :: OS Independent', 'Programming Language :: Python', "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", 'Topic :: Database', 'Topic :: Software Development :: Libraries :: Python Modules', ] -extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])} +extra_opts = { + 'packages': find_packages(exclude=['tests', 'tests.*']), + 'tests_require': ['nose', 'coverage==4.2', 'blinker', 'Pillow>=2.0.0'] +} if sys.version_info[0] == 3: extra_opts['use_2to3'] = True - extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0'] - if "test" in sys.argv or "nosetests" in sys.argv: + if 'test' in sys.argv or 'nosetests' in sys.argv: extra_opts['packages'] = find_packages() - extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} + extra_opts['package_data'] = { + 'tests': ['fields/mongoengine.png', 'fields/mongodb_leaf.png']} else: - # coverage 4 does not support Python 3.2 anymore - extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0', 'python-dateutil'] + extra_opts['tests_require'] += ['python-dateutil'] - if sys.version_info[0] == 2 and sys.version_info[1] == 6: - extra_opts['tests_require'].append('unittest2') - -setup(name='mongoengine', - version=VERSION, - author='Harry Marr', - author_email='harry.marr@{nospam}gmail.com', - maintainer="Ross Lawley", - maintainer_email="ross.lawley@{nospam}gmail.com", - url='http://mongoengine.org/', - download_url='https://github.com/MongoEngine/mongoengine/tarball/master', - license='MIT', - include_package_data=True, - description=DESCRIPTION, - long_description=LONG_DESCRIPTION, - platforms=['any'], - classifiers=CLASSIFIERS, - install_requires=['pymongo>=2.7.1'], - test_suite='nose.collector', - setup_requires=['nose', 'rednose'], # Allow proper nose usage with setuptols and tox - **extra_opts +setup( + name='mongoengine', + version=VERSION, + author='Harry Marr', + author_email='harry.marr@{nospam}gmail.com', + maintainer="Ross Lawley", + maintainer_email="ross.lawley@{nospam}gmail.com", + url='http://mongoengine.org/', + download_url='https://github.com/MongoEngine/mongoengine/tarball/master', + license='MIT', + include_package_data=True, + description=DESCRIPTION, + long_description=LONG_DESCRIPTION, + platforms=['any'], + classifiers=CLASSIFIERS, + install_requires=['pymongo>=2.7.1', 'six'], + test_suite='nose.collector', + **extra_opts ) diff --git a/tests/__init__.py b/tests/__init__.py index b24df5d2..eab0ddc7 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -2,4 +2,3 @@ from all_warnings import AllWarnings from document import * from queryset import * from fields import * -from migration import * diff --git a/tests/all_warnings/__init__.py b/tests/all_warnings/__init__.py index 53ce638c..3aebe4ba 100644 --- a/tests/all_warnings/__init__.py +++ b/tests/all_warnings/__init__.py @@ -3,8 +3,6 @@ This test has been put into a module. This is because it tests warnings that only get triggered on first hit. This way we can ensure its imported into the top level and called first by the test suite. """ -import sys -sys.path[0:0] = [""] import unittest import warnings diff --git a/tests/document/__init__.py b/tests/document/__init__.py index 1acc9f4b..f71376ea 100644 --- a/tests/document/__init__.py +++ b/tests/document/__init__.py @@ -1,5 +1,3 @@ -import sys -sys.path[0:0] = [""] import unittest from class_methods import * diff --git a/tests/document/class_methods.py b/tests/document/class_methods.py index 5da474ac..dd3addb7 100644 --- a/tests/document/class_methods.py +++ b/tests/document/class_methods.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -import sys -sys.path[0:0] = [""] import unittest from mongoengine import * diff --git a/tests/document/delta.py b/tests/document/delta.py index cd37f415..add4fe8d 100644 --- a/tests/document/delta.py +++ b/tests/document/delta.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -import sys -sys.path[0:0] = [""] import unittest from bson import SON diff --git a/tests/document/dynamic.py b/tests/document/dynamic.py index e4289bd2..a478df42 100644 --- a/tests/document/dynamic.py +++ b/tests/document/dynamic.py @@ -1,6 +1,4 @@ import unittest -import sys -sys.path[0:0] = [""] from mongoengine import * from mongoengine.connection import get_db @@ -88,6 +86,18 @@ class DynamicTest(unittest.TestCase): p.update(unset__misc=1) p.reload() + def test_reload_dynamic_field(self): + self.Person.objects.delete() + p = self.Person.objects.create() + p.update(age=1) + + self.assertEqual(len(p._data), 3) + self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name']) + + p.reload() + self.assertEqual(len(p._data), 4) + self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name']) + def test_dynamic_document_queries(self): """Ensure we can query dynamic fields""" p = self.Person() @@ -131,11 +141,9 @@ class DynamicTest(unittest.TestCase): def test_three_level_complex_data_lookups(self): """Ensure you can query three level document dynamic fields""" - p = self.Person() - p.misc = {'hello': {'hello2': 'world'}} - p.save() - # from pprint import pprint as pp; import pdb; pdb.set_trace(); - print self.Person.objects(misc__hello__hello2='world') + p = self.Person.objects.create( + misc={'hello': {'hello2': 'world'}} + ) self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count()) def test_complex_embedded_document_validation(self): diff --git a/tests/document/indexes.py b/tests/document/indexes.py index ccc6cf44..af93e7db 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -2,7 +2,6 @@ import unittest import sys -sys.path[0:0] = [""] import pymongo @@ -32,10 +31,7 @@ class IndexesTest(unittest.TestCase): self.Person = Person def tearDown(self): - for collection in self.db.collection_names(): - if 'system.' in collection: - continue - self.db.drop_collection(collection) + self.connection.drop_database(self.db) def test_indexes_document(self): """Ensure that indexes are used when meta[indexes] is specified for @@ -560,8 +556,8 @@ class IndexesTest(unittest.TestCase): BlogPost.drop_collection() - for i in xrange(0, 10): - tags = [("tag %i" % n) for n in xrange(0, i % 2)] + for i in range(0, 10): + tags = [("tag %i" % n) for n in range(0, i % 2)] BlogPost(tags=tags).save() self.assertEqual(BlogPost.objects.count(), 10) @@ -822,33 +818,34 @@ class IndexesTest(unittest.TestCase): name = StringField(required=True) term = StringField(required=True) - class Report(Document): + class ReportEmbedded(Document): key = EmbeddedDocumentField(CompoundKey, primary_key=True) text = StringField() - Report.drop_collection() - my_key = CompoundKey(name="n", term="ok") - report = Report(text="OK", key=my_key).save() + report = ReportEmbedded(text="OK", key=my_key).save() self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, report.to_mongo()) - self.assertEqual(report, Report.objects.get(pk=my_key)) + self.assertEqual(report, ReportEmbedded.objects.get(pk=my_key)) def test_compound_key_dictfield(self): - class Report(Document): + class ReportDictField(Document): key = DictField(primary_key=True) text = StringField() - Report.drop_collection() - my_key = {"name": "n", "term": "ok"} - report = Report(text="OK", key=my_key).save() + report = ReportDictField(text="OK", key=my_key).save() self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, report.to_mongo()) - self.assertEqual(report, Report.objects.get(pk=my_key)) + + # We can't directly call ReportDictField.objects.get(pk=my_key), + # because dicts are unordered, and if the order in MongoDB is + # different than the one in `my_key`, this test will fail. + self.assertEqual(report, ReportDictField.objects.get(pk__name=my_key['name'])) + self.assertEqual(report, ReportDictField.objects.get(pk__term=my_key['term'])) def test_string_indexes(self): @@ -863,6 +860,20 @@ class IndexesTest(unittest.TestCase): self.assertTrue([('provider_ids.foo', 1)] in info) self.assertTrue([('provider_ids.bar', 1)] in info) + def test_sparse_compound_indexes(self): + + class MyDoc(Document): + provider_ids = DictField() + meta = { + "indexes": [{'fields': ("provider_ids.foo", "provider_ids.bar"), + 'sparse': True}], + } + + info = MyDoc.objects._collection.index_information() + self.assertEqual([('provider_ids.foo', 1), ('provider_ids.bar', 1)], + info['provider_ids.foo_1_provider_ids.bar_1']['key']) + self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse']) + def test_text_indexes(self): class Book(Document): @@ -895,26 +906,38 @@ class IndexesTest(unittest.TestCase): Issue #812 """ + # Use a new connection and database since dropping the database could + # cause concurrent tests to fail. + connection = connect(db='tempdatabase', + alias='test_indexes_after_database_drop') + class BlogPost(Document): title = StringField() slug = StringField(unique=True) - BlogPost.drop_collection() + meta = {'db_alias': 'test_indexes_after_database_drop'} - # Create Post #1 - post1 = BlogPost(title='test1', slug='test') - post1.save() + try: + BlogPost.drop_collection() - # Drop the Database - self.connection.drop_database(BlogPost._get_db().name) + # Create Post #1 + post1 = BlogPost(title='test1', slug='test') + post1.save() - # Re-create Post #1 - post1 = BlogPost(title='test1', slug='test') - post1.save() + # Drop the Database + connection.drop_database('tempdatabase') + + # Re-create Post #1 + post1 = BlogPost(title='test1', slug='test') + post1.save() + + # Create Post #2 + post2 = BlogPost(title='test2', slug='test') + self.assertRaises(NotUniqueError, post2.save) + finally: + # Drop the temporary database at the end + connection.drop_database('tempdatabase') - # Create Post #2 - post2 = BlogPost(title='test2', slug='test') - self.assertRaises(NotUniqueError, post2.save) def test_index_dont_send_cls_option(self): """ diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py index 7673a103..2897e1d1 100644 --- a/tests/document/inheritance.py +++ b/tests/document/inheritance.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -import sys -sys.path[0:0] = [""] import unittest import warnings @@ -253,19 +251,17 @@ class InheritanceTest(unittest.TestCase): self.assertEqual(classes, [Human]) def test_allow_inheritance(self): - """Ensure that inheritance may be disabled on simple classes and that - _cls and _subclasses will not be used. + """Ensure that inheritance is disabled by default on simple + classes and that _cls will not be used. """ - class Animal(Document): name = StringField() - def create_dog_class(): + # can't inherit because Animal didn't explicitly allow inheritance + with self.assertRaises(ValueError): class Dog(Animal): pass - self.assertRaises(ValueError, create_dog_class) - # Check that _cls etc aren't present on simple documents dog = Animal(name='dog').save() self.assertEqual(dog.to_mongo().keys(), ['_id', 'name']) @@ -275,17 +271,15 @@ class InheritanceTest(unittest.TestCase): self.assertFalse('_cls' in obj) def test_cant_turn_off_inheritance_on_subclass(self): - """Ensure if inheritance is on in a subclass you cant turn it off + """Ensure if inheritance is on in a subclass you cant turn it off. """ - class Animal(Document): name = StringField() meta = {'allow_inheritance': True} - def create_mammal_class(): + with self.assertRaises(ValueError): class Mammal(Animal): meta = {'allow_inheritance': False} - self.assertRaises(ValueError, create_mammal_class) def test_allow_inheritance_abstract_document(self): """Ensure that abstract documents can set inheritance rules and that @@ -298,10 +292,9 @@ class InheritanceTest(unittest.TestCase): class Animal(FinalDocument): name = StringField() - def create_mammal_class(): + with self.assertRaises(ValueError): class Mammal(Animal): pass - self.assertRaises(ValueError, create_mammal_class) # Check that _cls isn't present in simple documents doc = Animal(name='dog') @@ -360,29 +353,26 @@ class InheritanceTest(unittest.TestCase): self.assertEqual(berlin.pk, berlin.auto_id_0) def test_abstract_document_creation_does_not_fail(self): - class City(Document): continent = StringField() meta = {'abstract': True, 'allow_inheritance': False} + bkk = City(continent='asia') self.assertEqual(None, bkk.pk) # TODO: expected error? Shouldn't we create a new error type? - self.assertRaises(KeyError, lambda: setattr(bkk, 'pk', 1)) + with self.assertRaises(KeyError): + setattr(bkk, 'pk', 1) def test_allow_inheritance_embedded_document(self): - """Ensure embedded documents respect inheritance - """ - + """Ensure embedded documents respect inheritance.""" class Comment(EmbeddedDocument): content = StringField() - def create_special_comment(): + with self.assertRaises(ValueError): class SpecialComment(Comment): pass - self.assertRaises(ValueError, create_special_comment) - doc = Comment(content='test') self.assertFalse('_cls' in doc.to_mongo()) @@ -411,7 +401,7 @@ class InheritanceTest(unittest.TestCase): try: class MyDocument(DateCreatedDocument, DateUpdatedDocument): pass - except: + except Exception: self.assertTrue(False, "Couldn't create MyDocument class") def test_abstract_documents(self): @@ -454,11 +444,11 @@ class InheritanceTest(unittest.TestCase): self.assertEqual(Guppy._get_collection_name(), 'fish') self.assertEqual(Human._get_collection_name(), 'human') - def create_bad_abstract(): + # ensure that a subclass of a non-abstract class can't be abstract + with self.assertRaises(ValueError): class EvilHuman(Human): evil = BooleanField(default=True) meta = {'abstract': True} - self.assertRaises(ValueError, create_bad_abstract) def test_abstract_embedded_documents(self): # 789: EmbeddedDocument shouldn't inherit abstract diff --git a/tests/document/instance.py b/tests/document/instance.py index ea7d5668..9b52c809 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -1,26 +1,24 @@ # -*- coding: utf-8 -*- -import sys -sys.path[0:0] = [""] - import bson import os import pickle import unittest import uuid +import weakref from datetime import datetime from bson import DBRef, ObjectId from tests import fixtures from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest, - PickleDyanmicEmbedded, PickleDynamicTest) + PickleDynamicEmbedded, PickleDynamicTest) from mongoengine import * +from mongoengine.base import get_document, _document_registry +from mongoengine.connection import get_db from mongoengine.errors import (NotRegistered, InvalidDocumentError, InvalidQueryError, NotUniqueError, - FieldDoesNotExist) + FieldDoesNotExist, SaveConditionError) from mongoengine.queryset import NULLIFY, Q -from mongoengine.connection import get_db -from mongoengine.base import get_document from mongoengine.context_managers import switch_db, query_counter from mongoengine import signals @@ -30,6 +28,8 @@ TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), __all__ = ("InstanceTest",) + + class InstanceTest(unittest.TestCase): def setUp(self): @@ -63,6 +63,14 @@ class InstanceTest(unittest.TestCase): list(self.Person._get_collection().find().sort("id")), sorted(docs, key=lambda doc: doc["_id"])) + def assertHasInstance(self, field, instance): + self.assertTrue(hasattr(field, "_instance")) + self.assertTrue(field._instance is not None) + if isinstance(field._instance, weakref.ProxyType): + self.assertTrue(field._instance.__eq__(instance)) + else: + self.assertEqual(field._instance, instance) + def test_capped_collection(self): """Ensure that capped collections work properly. """ @@ -91,21 +99,18 @@ class InstanceTest(unittest.TestCase): self.assertEqual(options['size'], 4096) # Check that the document cannot be redefined with different options - def recreate_log_document(): - class Log(Document): - date = DateTimeField(default=datetime.now) - meta = { - 'max_documents': 11, - } - # Create the collection by accessing Document.objects - Log.objects - self.assertRaises(InvalidCollectionError, recreate_log_document) + class Log(Document): + date = DateTimeField(default=datetime.now) + meta = { + 'max_documents': 11, + } - Log.drop_collection() + # Accessing Document.objects creates the collection + with self.assertRaises(InvalidCollectionError): + Log.objects def test_capped_collection_default(self): - """Ensure that capped collections defaults work properly. - """ + """Ensure that capped collections defaults work properly.""" class Log(Document): date = DateTimeField(default=datetime.now) meta = { @@ -123,16 +128,14 @@ class InstanceTest(unittest.TestCase): self.assertEqual(options['size'], 10 * 2**20) # Check that the document with default value can be recreated - def recreate_log_document(): - class Log(Document): - date = DateTimeField(default=datetime.now) - meta = { - 'max_documents': 10, - } - # Create the collection by accessing Document.objects - Log.objects - recreate_log_document() - Log.drop_collection() + class Log(Document): + date = DateTimeField(default=datetime.now) + meta = { + 'max_documents': 10, + } + + # Create the collection by accessing Document.objects + Log.objects def test_capped_collection_no_max_size_problems(self): """Ensure that capped collections with odd max_size work properly. @@ -155,16 +158,14 @@ class InstanceTest(unittest.TestCase): self.assertTrue(options['size'] >= 10000) # Check that the document with odd max_size value can be recreated - def recreate_log_document(): - class Log(Document): - date = DateTimeField(default=datetime.now) - meta = { - 'max_size': 10000, - } - # Create the collection by accessing Document.objects - Log.objects - recreate_log_document() - Log.drop_collection() + class Log(Document): + date = DateTimeField(default=datetime.now) + meta = { + 'max_size': 10000, + } + + # Create the collection by accessing Document.objects + Log.objects def test_repr(self): """Ensure that unicode representation works @@ -275,7 +276,7 @@ class InstanceTest(unittest.TestCase): list_stats = [] - for i in xrange(10): + for i in range(10): s = Stats() s.save() list_stats.append(s) @@ -345,14 +346,14 @@ class InstanceTest(unittest.TestCase): self.assertEqual(User._fields['username'].db_field, '_id') self.assertEqual(User._meta['id_field'], 'username') - def create_invalid_user(): - User(name='test').save() # no primary key field - self.assertRaises(ValidationError, create_invalid_user) + # test no primary key field + self.assertRaises(ValidationError, User(name='test').save) - def define_invalid_user(): + # define a subclass with a different primary key field than the + # parent + with self.assertRaises(ValueError): class EmailUser(User): email = StringField(primary_key=True) - self.assertRaises(ValueError, define_invalid_user) class EmailUser(User): email = StringField() @@ -400,12 +401,10 @@ class InstanceTest(unittest.TestCase): # Mimic Place and NicePlace definitions being in a different file # and the NicePlace model not being imported in at query time. - from mongoengine.base import _document_registry del(_document_registry['Place.NicePlace']) - def query_without_importing_nice_place(): - print Place.objects.all() - self.assertRaises(NotRegistered, query_without_importing_nice_place) + with self.assertRaises(NotRegistered): + list(Place.objects.all()) def test_document_registry_regressions(self): @@ -436,6 +435,15 @@ class InstanceTest(unittest.TestCase): person.to_dbref() + def test_save_abstract_document(self): + """Saving an abstract document should fail.""" + class Doc(Document): + name = StringField() + meta = {'abstract': True} + + with self.assertRaises(InvalidDocumentError): + Doc(name='aaa').save() + def test_reload(self): """Ensure that attributes may be reloaded. """ @@ -473,6 +481,20 @@ class InstanceTest(unittest.TestCase): doc.reload() Animal.drop_collection() + def test_reload_sharded_nested(self): + class SuperPhylum(EmbeddedDocument): + name = StringField() + + class Animal(Document): + superphylum = EmbeddedDocumentField(SuperPhylum) + meta = {'shard_key': ('superphylum.name',)} + + Animal.drop_collection() + doc = Animal(superphylum=SuperPhylum(name='Deuterostomia')) + doc.save() + doc.reload() + Animal.drop_collection() + def test_reload_referencing(self): """Ensures reloading updates weakrefs correctly """ @@ -546,6 +568,28 @@ class InstanceTest(unittest.TestCase): except Exception: self.assertFalse("Threw wrong exception") + def test_reload_of_non_strict_with_special_field_name(self): + """Ensures reloading works for documents with meta strict == False + """ + class Post(Document): + meta = { + 'strict': False + } + title = StringField() + items = ListField() + + Post.drop_collection() + + Post._get_collection().insert({ + "title": "Items eclipse", + "items": ["more lorem", "even more ipsum"] + }) + + post = Post.objects.first() + post.reload() + self.assertEqual(post.title, "Items eclipse") + self.assertEqual(post.items, ["more lorem", "even more ipsum"]) + def test_dictionary_access(self): """Ensure that dictionary-style field access works properly. """ @@ -608,10 +652,12 @@ class InstanceTest(unittest.TestCase): embedded_field = EmbeddedDocumentField(Embedded) Doc.drop_collection() - Doc(embedded_field=Embedded(string="Hi")).save() + doc = Doc(embedded_field=Embedded(string="Hi")) + self.assertHasInstance(doc.embedded_field, doc) + doc.save() doc = Doc.objects.get() - self.assertEqual(doc, doc.embedded_field._instance) + self.assertHasInstance(doc.embedded_field, doc) def test_embedded_document_complex_instance(self): """Ensure that embedded documents in complex fields can reference @@ -623,10 +669,25 @@ class InstanceTest(unittest.TestCase): embedded_field = ListField(EmbeddedDocumentField(Embedded)) Doc.drop_collection() - Doc(embedded_field=[Embedded(string="Hi")]).save() + doc = Doc(embedded_field=[Embedded(string="Hi")]) + self.assertHasInstance(doc.embedded_field[0], doc) + doc.save() doc = Doc.objects.get() - self.assertEqual(doc, doc.embedded_field[0]._instance) + self.assertHasInstance(doc.embedded_field[0], doc) + + def test_embedded_document_complex_instance_no_use_db_field(self): + """Ensure that use_db_field is propagated to list of Emb Docs + """ + class Embedded(EmbeddedDocument): + string = StringField(db_field='s') + + class Doc(Document): + embedded_field = ListField(EmbeddedDocumentField(Embedded)) + + d = Doc(embedded_field=[Embedded(string="Hi")]).to_mongo( + use_db_field=False).to_dict() + self.assertEqual(d['embedded_field'], [{'string': 'Hi'}]) def test_instance_is_set_on_setattr(self): @@ -639,11 +700,28 @@ class InstanceTest(unittest.TestCase): Account.drop_collection() acc = Account() acc.email = Email(email='test@example.com') - self.assertTrue(hasattr(acc._data["email"], "_instance")) + self.assertHasInstance(acc._data["email"], acc) acc.save() acc1 = Account.objects.first() - self.assertTrue(hasattr(acc1._data["email"], "_instance")) + self.assertHasInstance(acc1._data["email"], acc1) + + def test_instance_is_set_on_setattr_on_embedded_document_list(self): + + class Email(EmbeddedDocument): + email = EmailField() + + class Account(Document): + emails = EmbeddedDocumentListField(Email) + + Account.drop_collection() + acc = Account() + acc.emails = [Email(email='test@example.com')] + self.assertHasInstance(acc._data["emails"][0], acc) + acc.save() + + acc1 = Account.objects.first() + self.assertHasInstance(acc1._data["emails"][0], acc1) def test_document_clean(self): class TestDocument(Document): @@ -664,7 +742,7 @@ class InstanceTest(unittest.TestCase): try: t.save() - except ValidationError, e: + except ValidationError as e: expect_msg = "Draft entries may not have a publication date." self.assertTrue(expect_msg in e.message) self.assertEqual(e.to_dict(), {'__all__': expect_msg}) @@ -703,7 +781,7 @@ class InstanceTest(unittest.TestCase): t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25, z=15)) try: t.save() - except ValidationError, e: + except ValidationError as e: expect_msg = "Value of z != x + y" self.assertTrue(expect_msg in e.message) self.assertEqual(e.to_dict(), {'doc': {'__all__': expect_msg}}) @@ -717,8 +795,10 @@ class InstanceTest(unittest.TestCase): def test_modify_empty(self): doc = self.Person(name="bob", age=10).save() - self.assertRaises( - InvalidDocumentError, lambda: self.Person().modify(set__age=10)) + + with self.assertRaises(InvalidDocumentError): + self.Person().modify(set__age=10) + self.assertDbEqual([dict(doc.to_mongo())]) def test_modify_invalid_query(self): @@ -726,9 +806,8 @@ class InstanceTest(unittest.TestCase): doc2 = self.Person(name="jim", age=20).save() docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())] - self.assertRaises( - InvalidQueryError, - lambda: doc1.modify(dict(id=doc2.id), set__value=20)) + with self.assertRaises(InvalidQueryError): + doc1.modify({'id': doc2.id}, set__value=20) self.assertDbEqual(docs) @@ -737,7 +816,7 @@ class InstanceTest(unittest.TestCase): doc2 = self.Person(name="jim", age=20).save() docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())] - assert not doc1.modify(dict(name=doc2.name), set__age=100) + assert not doc1.modify({'name': doc2.name}, set__age=100) self.assertDbEqual(docs) @@ -746,7 +825,7 @@ class InstanceTest(unittest.TestCase): doc2 = self.Person(id=ObjectId(), name="jim", age=20) docs = [dict(doc1.to_mongo())] - assert not doc2.modify(dict(name=doc2.name), set__age=100) + assert not doc2.modify({'name': doc2.name}, set__age=100) self.assertDbEqual(docs) @@ -1021,7 +1100,7 @@ class InstanceTest(unittest.TestCase): flip(w1) self.assertTrue(w1.toggle) self.assertEqual(w1.count, 1) - self.assertRaises(OperationError, + self.assertRaises(SaveConditionError, w1.save, save_condition={'save_id': UUID(42)}) w1.reload() self.assertFalse(w1.toggle) @@ -1050,7 +1129,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(w1.count, 2) flip(w2) flip(w2) - self.assertRaises(OperationError, + self.assertRaises(SaveConditionError, w2.save, save_condition={'save_id': old_id}) w2.reload() self.assertFalse(w2.toggle) @@ -1063,7 +1142,7 @@ class InstanceTest(unittest.TestCase): self.assertTrue(w1.toggle) self.assertEqual(w1.count, 3) flip(w1) - self.assertRaises(OperationError, + self.assertRaises(SaveConditionError, w1.save, save_condition={'count__gte': w1.count}) w1.reload() self.assertTrue(w1.toggle) @@ -1153,6 +1232,19 @@ class InstanceTest(unittest.TestCase): self.assertEqual(person.name, None) self.assertEqual(person.age, None) + def test_update_rename_operator(self): + """Test the $rename operator.""" + coll = self.Person._get_collection() + doc = self.Person(name='John').save() + raw_doc = coll.find_one({'_id': doc.pk}) + self.assertEqual(set(raw_doc.keys()), set(['_id', '_cls', 'name'])) + + doc.update(rename__name='first_name') + raw_doc = coll.find_one({'_id': doc.pk}) + self.assertEqual(set(raw_doc.keys()), + set(['_id', '_cls', 'first_name'])) + self.assertEqual(raw_doc['first_name'], 'John') + def test_inserts_if_you_set_the_pk(self): p1 = self.Person(name='p1', id=bson.ObjectId()).save() p2 = self.Person(name='p2') @@ -1212,12 +1304,11 @@ class InstanceTest(unittest.TestCase): def test_document_update(self): - def update_not_saved_raises(): + # try updating a non-saved document + with self.assertRaises(OperationError): person = self.Person(name='dcrosta') person.update(set__name='Dan Crosta') - self.assertRaises(OperationError, update_not_saved_raises) - author = self.Person(name='dcrosta') author.save() @@ -1227,19 +1318,17 @@ class InstanceTest(unittest.TestCase): p1 = self.Person.objects.first() self.assertEqual(p1.name, author.name) - def update_no_value_raises(): + # try sending an empty update + with self.assertRaises(OperationError): person = self.Person.objects.first() person.update() - self.assertRaises(OperationError, update_no_value_raises) - - def update_no_op_should_default_to_set(): - person = self.Person.objects.first() - person.update(name="Dan") - person.reload() - return person.name - - self.assertEqual("Dan", update_no_op_should_default_to_set()) + # update that doesn't explicitly specify an operator should default + # to 'set__' + person = self.Person.objects.first() + person.update(name="Dan") + person.reload() + self.assertEqual("Dan", person.name) def test_update_unique_field(self): class Doc(Document): @@ -1248,8 +1337,8 @@ class InstanceTest(unittest.TestCase): doc1 = Doc(name="first").save() doc2 = Doc(name="second").save() - self.assertRaises(NotUniqueError, lambda: - doc2.update(set__name=doc1.name)) + with self.assertRaises(NotUniqueError): + doc2.update(set__name=doc1.name) def test_embedded_update(self): """ @@ -1767,15 +1856,13 @@ class InstanceTest(unittest.TestCase): def test_duplicate_db_fields_raise_invalid_document_error(self): """Ensure a InvalidDocumentError is thrown if duplicate fields - declare the same db_field""" - - def throw_invalid_document_error(): + declare the same db_field. + """ + with self.assertRaises(InvalidDocumentError): class Foo(Document): name = StringField() name2 = StringField(db_field='name') - self.assertRaises(InvalidDocumentError, throw_invalid_document_error) - def test_invalid_son(self): """Raise an error if loading invalid data""" class Occurrence(EmbeddedDocument): @@ -1787,11 +1874,17 @@ class InstanceTest(unittest.TestCase): forms = ListField(StringField(), default=list) occurs = ListField(EmbeddedDocumentField(Occurrence), default=list) - def raise_invalid_document(): - Word._from_son({'stem': [1, 2, 3], 'forms': 1, 'count': 'one', - 'occurs': {"hello": None}}) + with self.assertRaises(InvalidDocumentError): + Word._from_son({ + 'stem': [1, 2, 3], + 'forms': 1, + 'count': 'one', + 'occurs': {"hello": None} + }) - self.assertRaises(InvalidDocumentError, raise_invalid_document) + # Tests for issue #1438: https://github.com/MongoEngine/mongoengine/issues/1438 + with self.assertRaises(ValueError): + Word._from_son('this is not a valid SON dict') def test_reverse_delete_rule_cascade_and_nullify(self): """Ensure that a referenced document is also deleted upon deletion. @@ -1825,6 +1918,62 @@ class InstanceTest(unittest.TestCase): author.delete() self.assertEqual(BlogPost.objects.count(), 0) + def test_reverse_delete_rule_with_custom_id_field(self): + """Ensure that a referenced document with custom primary key + is also deleted upon deletion. + """ + class User(Document): + name = StringField(primary_key=True) + + class Book(Document): + author = ReferenceField(User, reverse_delete_rule=CASCADE) + reviewer = ReferenceField(User, reverse_delete_rule=NULLIFY) + + User.drop_collection() + Book.drop_collection() + + user = User(name='Mike').save() + reviewer = User(name='John').save() + book = Book(author=user, reviewer=reviewer).save() + + reviewer.delete() + self.assertEqual(Book.objects.count(), 1) + self.assertEqual(Book.objects.get().reviewer, None) + + user.delete() + self.assertEqual(Book.objects.count(), 0) + + def test_reverse_delete_rule_with_shared_id_among_collections(self): + """Ensure that cascade delete rule doesn't mix id among collections. + """ + class User(Document): + id = IntField(primary_key=True) + + class Book(Document): + id = IntField(primary_key=True) + author = ReferenceField(User, reverse_delete_rule=CASCADE) + + User.drop_collection() + Book.drop_collection() + + user_1 = User(id=1).save() + user_2 = User(id=2).save() + book_1 = Book(id=1, author=user_2).save() + book_2 = Book(id=2, author=user_1).save() + + user_2.delete() + # Deleting user_2 should also delete book_1 but not book_2 + self.assertEqual(Book.objects.count(), 1) + self.assertEqual(Book.objects.get(), book_2) + + user_3 = User(id=3).save() + book_3 = Book(id=3, author=user_3).save() + + user_3.delete() + # Deleting user_3 should also delete book_3 + self.assertEqual(Book.objects.count(), 1) + self.assertEqual(Book.objects.get(), book_2) + def test_reverse_delete_rule_with_document_inheritance(self): """Ensure that a referenced document is also deleted upon deletion of a child document. @@ -1966,8 +2115,7 @@ class InstanceTest(unittest.TestCase): self.assertEqual(Bar.objects.get().foo, None) def test_invalid_reverse_delete_rule_raise_errors(self): - - def throw_invalid_document_error(): + with self.assertRaises(InvalidDocumentError): class Blog(Document): content = StringField() authors = MapField(ReferenceField( @@ -1977,21 +2125,15 @@ class InstanceTest(unittest.TestCase): self.Person, reverse_delete_rule=NULLIFY)) - self.assertRaises(InvalidDocumentError, throw_invalid_document_error) - - def throw_invalid_document_error_embedded(): + with self.assertRaises(InvalidDocumentError): class Parents(EmbeddedDocument): father = ReferenceField('Person', reverse_delete_rule=DENY) mother = ReferenceField('Person', reverse_delete_rule=DENY) - self.assertRaises( - InvalidDocumentError, throw_invalid_document_error_embedded) - def test_reverse_delete_rule_cascade_recurs(self): """Ensure that a chain of documents is also deleted upon cascaded deletion. """ - class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) @@ -2180,7 +2322,7 @@ class InstanceTest(unittest.TestCase): pickle_doc = PickleDynamicTest( name="test", number=1, string="One", lists=['1', '2']) - pickle_doc.embedded = PickleDyanmicEmbedded(foo="Bar") + pickle_doc.embedded = PickleDynamicEmbedded(foo="Bar") pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved pickle_doc.save() @@ -2207,15 +2349,14 @@ class InstanceTest(unittest.TestCase): pickle_doc.save() pickle_doc.delete() - def test_throw_invalid_document_error(self): - - # test handles people trying to upsert - def throw_invalid_document_error(): + def test_override_method_with_field(self): + """Test creating a field with a field name that would override + the "validate" method. + """ + with self.assertRaises(InvalidDocumentError): class Blog(Document): validate = DictField() - self.assertRaises(InvalidDocumentError, throw_invalid_document_error) - def test_mutating_documents(self): class B(EmbeddedDocument): @@ -2678,10 +2819,34 @@ class InstanceTest(unittest.TestCase): log.log = "Saving" log.save() - def change_shard_key(): + # try to change the shard key + with self.assertRaises(OperationError): log.machine = "127.0.0.1" - self.assertRaises(OperationError, change_shard_key) + def test_shard_key_in_embedded_document(self): + class Foo(EmbeddedDocument): + foo = StringField() + + class Bar(Document): + meta = { + 'shard_key': ('foo.foo',) + } + foo = EmbeddedDocumentField(Foo) + bar = StringField() + + foo_doc = Foo(foo='hello') + bar_doc = Bar(foo=foo_doc, bar='world') + bar_doc.save() + + self.assertTrue(bar_doc.id is not None) + + bar_doc.bar = 'baz' + bar_doc.save() + + # try to change the shard key + with self.assertRaises(OperationError): + bar_doc.foo.foo = 'something' + bar_doc.save() def test_shard_key_primary(self): class LogEntry(Document): @@ -2703,11 +2868,10 @@ class InstanceTest(unittest.TestCase): log.log = "Saving" log.save() - def change_shard_key(): + # try to change the shard key + with self.assertRaises(OperationError): log.machine = "127.0.0.1" - self.assertRaises(OperationError, change_shard_key) - def test_kwargs_simple(self): class Embedded(EmbeddedDocument): @@ -2765,6 +2929,20 @@ class InstanceTest(unittest.TestCase): self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 42) + def test_positional_creation_embedded(self): + """Ensure that embedded document may be created using positional arguments. + """ + job = self.Job("Test Job", 4) + self.assertEqual(job.name, "Test Job") + self.assertEqual(job.years, 4) + + def test_mixed_creation_embedded(self): + """Ensure that embedded document may be created using mixed arguments. + """ + job = self.Job("Test Job", years=4) + self.assertEqual(job.name, "Test Job") + self.assertEqual(job.years, 4) + def test_mixed_creation_dynamic(self): """Ensure that document may be created using mixed arguments. """ @@ -2778,11 +2956,9 @@ class InstanceTest(unittest.TestCase): def test_bad_mixed_creation(self): """Ensure that document gives correct error when duplicating arguments """ - def construct_bad_instance(): + with self.assertRaises(TypeError): return self.Person("Test User", 42, name="Bad User") - self.assertRaises(TypeError, construct_bad_instance) - def test_data_contains_id_field(self): """Ensure that asking for _data returns 'id' """ @@ -2941,6 +3117,17 @@ class InstanceTest(unittest.TestCase): p4 = Person.objects()[0] p4.save() self.assertEquals(p4.height, 189) + + # However the default will not be fixed in DB + self.assertEquals(Person.objects(height=189).count(), 0) + + # alter DB for the new default + coll = Person._get_collection() + for person in Person.objects.as_pymongo(): + if 'height' not in person: + person['height'] = 189 + coll.save(person) + self.assertEquals(Person.objects(height=189).count(), 1) def test_from_son(self): @@ -3014,5 +3201,20 @@ class InstanceTest(unittest.TestCase): self.assertEqual(b._instance, a) self.assertEqual(idx, 2) + def test_falsey_pk(self): + """Ensure that we can create and update a document with Falsey PK. + """ + class Person(Document): + age = IntField(primary_key=True) + height = FloatField() + + person = Person() + person.age = 0 + person.height = 1.89 + person.save() + + person.update(set__height=2.0) + + if __name__ == '__main__': unittest.main() diff --git a/tests/document/json_serialisation.py b/tests/document/json_serialisation.py index f47b5de5..110f1e14 100644 --- a/tests/document/json_serialisation.py +++ b/tests/document/json_serialisation.py @@ -1,6 +1,3 @@ -import sys -sys.path[0:0] = [""] - import unittest import uuid diff --git a/tests/document/validation.py b/tests/document/validation.py index ba03366e..105bc8b0 100644 --- a/tests/document/validation.py +++ b/tests/document/validation.py @@ -1,7 +1,4 @@ # -*- coding: utf-8 -*- -import sys -sys.path[0:0] = [""] - import unittest from datetime import datetime @@ -60,7 +57,7 @@ class ValidatorErrorTest(unittest.TestCase): try: User().validate() - except ValidationError, e: + except ValidationError as e: self.assertTrue("User:None" in e.message) self.assertEqual(e.to_dict(), { 'username': 'Field is required', @@ -70,7 +67,7 @@ class ValidatorErrorTest(unittest.TestCase): user.name = None try: user.save() - except ValidationError, e: + except ValidationError as e: self.assertTrue("User:RossC0" in e.message) self.assertEqual(e.to_dict(), { 'name': 'Field is required'}) @@ -118,7 +115,7 @@ class ValidatorErrorTest(unittest.TestCase): try: Doc(id="bad").validate() - except ValidationError, e: + except ValidationError as e: self.assertTrue("SubDoc:None" in e.message) self.assertEqual(e.to_dict(), { "e": {'val': 'OK could not be converted to int'}}) @@ -136,7 +133,7 @@ class ValidatorErrorTest(unittest.TestCase): doc.e.val = "OK" try: doc.save() - except ValidationError, e: + except ValidationError as e: self.assertTrue("Doc:test" in e.message) self.assertEqual(e.to_dict(), { "e": {'val': 'OK could not be converted to int'}}) @@ -156,14 +153,14 @@ class ValidatorErrorTest(unittest.TestCase): s = SubDoc() - self.assertRaises(ValidationError, lambda: s.validate()) + self.assertRaises(ValidationError, s.validate) d1.e = s d2.e = s del d1 - self.assertRaises(ValidationError, lambda: d2.validate()) + self.assertRaises(ValidationError, d2.validate) def test_parent_reference_in_child_document(self): """ diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 9f9db25d..318c0c59 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -1,9 +1,4 @@ # -*- coding: utf-8 -*- -import sys -from nose.plugins.skip import SkipTest - -sys.path[0:0] = [""] - import datetime import unittest import uuid @@ -11,6 +6,9 @@ import math import itertools import re +from nose.plugins.skip import SkipTest +import six + try: import dateutil except ImportError: @@ -19,27 +17,22 @@ except ImportError: from decimal import Decimal from bson import Binary, DBRef, ObjectId +try: + from bson.int64 import Int64 +except ImportError: + Int64 = long from mongoengine import * from mongoengine.connection import get_db -from mongoengine.base import _document_registry -from mongoengine.base.datastructures import BaseDict, EmbeddedDocumentList -from mongoengine.errors import NotRegistered -from mongoengine.python_support import PY3, b, bin_type +from mongoengine.base import (BaseDict, BaseField, EmbeddedDocumentList, + _document_registry) + +from tests.utils import MongoDBTestCase __all__ = ("FieldTest", "EmbeddedDocumentListFieldTestCase") -class FieldTest(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() - - def tearDown(self): - self.db.drop_collection('fs.files') - self.db.drop_collection('fs.chunks') - self.db.drop_collection('mongoengine.counters') +class FieldTest(MongoDBTestCase): def test_default_values_nothing_set(self): """Ensure that default field values are used when creating a document. @@ -225,9 +218,9 @@ class FieldTest(unittest.TestCase): self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime)) def test_not_required_handles_none_from_database(self): - """Ensure that every fields can handle null values from the database. + """Ensure that every field can handle null values from the + database. """ - class HandleNoneFields(Document): str_fld = StringField(required=True) int_fld = IntField(required=True) @@ -304,6 +297,24 @@ class FieldTest(unittest.TestCase): person.id = '497ce96f395f2f052a494fd4' person.validate() + def test_db_field_validation(self): + """Ensure that db_field doesn't accept invalid values.""" + + # dot in the name + with self.assertRaises(ValueError): + class User(Document): + name = StringField(db_field='user.name') + + # name starting with $ + with self.assertRaises(ValueError): + class User(Document): + name = StringField(db_field='$name') + + # name containing a null character + with self.assertRaises(ValueError): + class User(Document): + name = StringField(db_field='name\0') + def test_string_validation(self): """Ensure that invalid values cannot be assigned to string fields. """ @@ -330,11 +341,12 @@ class FieldTest(unittest.TestCase): person.validate() def test_url_validation(self): - """Ensure that URLFields validate urls properly. - """ + """Ensure that URLFields validate urls properly.""" class Link(Document): url = URLField() + Link.drop_collection() + link = Link() link.url = 'google' self.assertRaises(ValidationError, link.validate) @@ -342,6 +354,27 @@ class FieldTest(unittest.TestCase): link.url = 'http://www.google.com:8080' link.validate() + def test_unicode_url_validation(self): + """Ensure unicode URLs are validated properly.""" + class Link(Document): + url = URLField() + + Link.drop_collection() + + link = Link() + link.url = u'http://привет.com' + + # TODO fix URL validation - this *IS* a valid URL + # For now we just want to make sure that the error message is correct + try: + link.validate() + self.assertTrue(False) + except ValidationError as e: + self.assertEqual( + unicode(e), + u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])" + ) + def test_url_scheme_validation(self): """Ensure that URLFields validate urls with specific schemes properly. """ @@ -399,20 +432,37 @@ class FieldTest(unittest.TestCase): class Person(Document): height = FloatField(min_value=0.1, max_value=3.5) + class BigPerson(Document): + height = FloatField() + person = Person() person.height = 1.89 person.validate() person.height = '2.0' self.assertRaises(ValidationError, person.validate) + person.height = 0.01 self.assertRaises(ValidationError, person.validate) + person.height = 4.0 self.assertRaises(ValidationError, person.validate) person_2 = Person(height='something invalid') self.assertRaises(ValidationError, person_2.validate) + big_person = BigPerson() + + for value, value_type in enumerate(six.integer_types): + big_person.height = value_type(value) + big_person.validate() + + big_person.height = 2 ** 500 + big_person.validate() + + big_person.height = 2 ** 100000 # Too big for a float value + self.assertRaises(ValidationError, big_person.validate) + def test_decimal_validation(self): """Ensure that invalid values cannot be assigned to decimal fields. """ @@ -459,27 +509,41 @@ class FieldTest(unittest.TestCase): def test_decimal_storage(self): class Person(Document): - btc = DecimalField(precision=4) + float_value = DecimalField(precision=4) + string_value = DecimalField(precision=4, force_string=True) Person.drop_collection() - Person(btc=10).save() - Person(btc=10.1).save() - Person(btc=10.11).save() - Person(btc="10.111").save() - Person(btc=Decimal("10.1111")).save() - Person(btc=Decimal("10.11111")).save() + values_to_store = [10, 10.1, 10.11, "10.111", Decimal("10.1111"), Decimal("10.11111")] + for store_at_creation in [True, False]: + for value in values_to_store: + # to_python is called explicitly if values were sent in the kwargs of __init__ + if store_at_creation: + Person(float_value=value, string_value=value).save() + else: + person = Person.objects.create() + person.float_value = value + person.string_value = value + person.save() # How its stored - expected = [{'btc': 10.0}, {'btc': 10.1}, {'btc': 10.11}, - {'btc': 10.111}, {'btc': 10.1111}, {'btc': 10.1111}] + expected = [ + {'float_value': 10.0, 'string_value': '10.0000'}, + {'float_value': 10.1, 'string_value': '10.1000'}, + {'float_value': 10.11, 'string_value': '10.1100'}, + {'float_value': 10.111, 'string_value': '10.1110'}, + {'float_value': 10.1111, 'string_value': '10.1111'}, + {'float_value': 10.1111, 'string_value': '10.1111'}] + expected.extend(expected) actual = list(Person.objects.exclude('id').as_pymongo()) self.assertEqual(expected, actual) # How it comes out locally expected = [Decimal('10.0000'), Decimal('10.1000'), Decimal('10.1100'), Decimal('10.1110'), Decimal('10.1111'), Decimal('10.1111')] - actual = list(Person.objects().scalar('btc')) - self.assertEqual(expected, actual) + expected.extend(expected) + for field_name in ['float_value', 'string_value']: + actual = list(Person.objects().scalar(field_name)) + self.assertEqual(expected, actual) def test_boolean_validation(self): """Ensure that invalid values cannot be assigned to boolean fields. @@ -615,8 +679,8 @@ class FieldTest(unittest.TestCase): # Post UTC - microseconds are rounded (down) nearest millisecond and # dropped - d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999) - d2 = datetime.datetime(1970, 01, 01, 00, 00, 01) + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) + d2 = datetime.datetime(1970, 1, 1, 0, 0, 1) log = LogEntry() log.date = d1 log.save() @@ -625,15 +689,15 @@ class FieldTest(unittest.TestCase): self.assertEqual(log.date, d2) # Post UTC - microseconds are rounded (down) nearest millisecond - d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999) - d2 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9000) + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) + d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000) log.date = d1 log.save() log.reload() self.assertNotEqual(log.date, d1) self.assertEqual(log.date, d2) - if not PY3: + if not six.PY3: # Pre UTC dates microseconds below 1000 are dropped # This does not seem to be true in PY3 d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) @@ -653,7 +717,7 @@ class FieldTest(unittest.TestCase): LogEntry.drop_collection() - d1 = datetime.datetime(1970, 01, 01, 00, 00, 01) + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1) log = LogEntry() log.date = d1 log.validate() @@ -670,8 +734,8 @@ class FieldTest(unittest.TestCase): LogEntry.drop_collection() # create 60 log entries - for i in xrange(1950, 2010): - d = datetime.datetime(i, 01, 01, 00, 00, 01) + for i in range(1950, 2010): + d = datetime.datetime(i, 1, 1, 0, 0, 1) LogEntry(date=d).save() self.assertEqual(LogEntry.objects.count(), 60) @@ -718,7 +782,7 @@ class FieldTest(unittest.TestCase): # Post UTC - microseconds are rounded (down) nearest millisecond and # dropped - with default datetimefields - d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999) + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) log = LogEntry() log.date = d1 log.save() @@ -727,7 +791,7 @@ class FieldTest(unittest.TestCase): # Post UTC - microseconds are rounded (down) nearest millisecond - with # default datetimefields - d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999) + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) log.date = d1 log.save() log.reload() @@ -744,7 +808,7 @@ class FieldTest(unittest.TestCase): # Pre UTC microseconds above 1000 is wonky - with default datetimefields # log.date has an invalid microsecond value so I can't construct # a date to compare. - for i in xrange(1001, 3113, 33): + for i in range(1001, 3113, 33): d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) log.date = d1 log.save() @@ -754,7 +818,7 @@ class FieldTest(unittest.TestCase): self.assertEqual(log, log1) # Test string padding - microsecond = map(int, [math.pow(10, x) for x in xrange(6)]) + microsecond = map(int, [math.pow(10, x) for x in range(6)]) mm = dd = hh = ii = ss = [1, 10] for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): @@ -776,7 +840,7 @@ class FieldTest(unittest.TestCase): LogEntry.drop_collection() - d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999) + d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) log = LogEntry() log.date = d1 log.save() @@ -787,8 +851,8 @@ class FieldTest(unittest.TestCase): LogEntry.drop_collection() # create 60 log entries - for i in xrange(1950, 2010): - d = datetime.datetime(i, 01, 01, 00, 00, 01, 999) + for i in range(1950, 2010): + d = datetime.datetime(i, 1, 1, 0, 0, 1, 999) LogEntry(date=d).save() self.assertEqual(LogEntry.objects.count(), 60) @@ -1009,6 +1073,7 @@ class FieldTest(unittest.TestCase): self.assertEqual( BlogPost.objects.filter(info__100__test__exact='test').count(), 0) + # test queries by list post = BlogPost() post.info = ['1', '2'] post.save() @@ -1020,8 +1085,297 @@ class FieldTest(unittest.TestCase): post.info *= 2 post.save() self.assertEqual(BlogPost.objects(info=['1', '2', '3', '4', '1', '2', '3', '4']).count(), 1) + BlogPost.drop_collection() + def test_list_field_manipulative_operators(self): + """Ensure that ListField works with standard list operators that manipulate the list. + """ + class BlogPost(Document): + ref = StringField() + info = ListField(StringField()) + + BlogPost.drop_collection() + + post = BlogPost() + post.ref = "1234" + post.info = ['0', '1', '2', '3', '4', '5'] + post.save() + + def reset_post(): + post.info = ['0', '1', '2', '3', '4', '5'] + post.save() + + # '__add__(listB)' + # listA+listB + # operator.add(listA, listB) + reset_post() + temp = ['a', 'b'] + post.info = post.info + temp + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + + # '__delitem__(index)' + # aka 'del list[index]' + # aka 'operator.delitem(list, index)' + reset_post() + del post.info[2] # del from middle ('2') + self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + + # '__delitem__(slice(i, j))' + # aka 'del list[i:j]' + # aka 'operator.delitem(list, slice(i,j))' + reset_post() + del post.info[1:3] # removes '1', '2' + self.assertEqual(post.info, ['0', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '3', '4', '5']) + + # '__iadd__' + # aka 'list += list' + reset_post() + temp = ['a', 'b'] + post.info += temp + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + + # '__imul__' + # aka 'list *= number' + reset_post() + post.info *= 2 + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + + # '__mul__' + # aka 'listA*listB' + reset_post() + post.info = post.info * 2 + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + + # '__rmul__' + # aka 'listB*listA' + reset_post() + post.info = 2 * post.info + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + + # '__setitem__(index, value)' + # aka 'list[index]=value' + # aka 'setitem(list, value)' + reset_post() + post.info[4] = 'a' + self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + + # '__setitem__(slice(i, j), listB)' + # aka 'listA[i:j] = listB' + # aka 'setitem(listA, slice(i, j), listB)' + reset_post() + post.info[1:3] = ['h', 'e', 'l', 'l', 'o'] + self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + + # 'append' + reset_post() + post.info.append('h') + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h']) + + # 'extend' + reset_post() + post.info.extend(['h', 'e', 'l', 'l', 'o']) + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h', 'e', 'l', 'l', 'o']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h', 'e', 'l', 'l', 'o']) + # 'insert' + + # 'pop' + reset_post() + x = post.info.pop(2) + y = post.info.pop() + self.assertEqual(post.info, ['0', '1', '3', '4']) + self.assertEqual(x, '2') + self.assertEqual(y, '5') + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '3', '4']) + + # 'remove' + reset_post() + post.info.remove('2') + self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + post.save() + post.reload() + self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + + # 'reverse' + reset_post() + post.info.reverse() + self.assertEqual(post.info, ['5', '4', '3', '2', '1', '0']) + post.save() + post.reload() + self.assertEqual(post.info, ['5', '4', '3', '2', '1', '0']) + + # 'sort': though this operator method does manipulate the list, it is tested in + # the 'test_list_field_lexicograpic_operators' function + BlogPost.drop_collection() + + def test_list_field_invalid_operators(self): + class BlogPost(Document): + ref = StringField() + info = ListField(StringField()) + post = BlogPost() + post.ref = "1234" + post.info = ['0', '1', '2', '3', '4', '5'] + # '__hash__' + # aka 'hash(list)' + # # assert TypeError + self.assertRaises(TypeError, lambda: hash(post.info)) + + def test_list_field_lexicographic_operators(self): + """Ensure that ListField works with standard list operators that do lexigraphic ordering. + """ + class BlogPost(Document): + ref = StringField() + text_info = ListField(StringField()) + oid_info = ListField(ObjectIdField()) + bool_info = ListField(BooleanField()) + BlogPost.drop_collection() + + blogSmall = BlogPost(ref="small") + blogSmall.text_info = ["a", "a", "a"] + blogSmall.bool_info = [False, False] + blogSmall.save() + blogSmall.reload() + + blogLargeA = BlogPost(ref="big") + blogLargeA.text_info = ["a", "z", "j"] + blogLargeA.bool_info = [False, True] + blogLargeA.save() + blogLargeA.reload() + + blogLargeB = BlogPost(ref="big2") + blogLargeB.text_info = ["a", "z", "j"] + blogLargeB.oid_info = [ + "54495ad94c934721ede76f90", + "54495ad94c934721ede76d23", + "54495ad94c934721ede76d00" + ] + blogLargeB.bool_info = [False, True] + blogLargeB.save() + blogLargeB.reload() + # '__eq__' aka '==' + self.assertEqual(blogLargeA.text_info, blogLargeB.text_info) + self.assertEqual(blogLargeA.bool_info, blogLargeB.bool_info) + # '__ge__' aka '>=' + self.assertGreaterEqual(blogLargeA.text_info, blogSmall.text_info) + self.assertGreaterEqual(blogLargeA.text_info, blogLargeB.text_info) + self.assertGreaterEqual(blogLargeA.bool_info, blogSmall.bool_info) + self.assertGreaterEqual(blogLargeA.bool_info, blogLargeB.bool_info) + # '__gt__' aka '>' + self.assertGreaterEqual(blogLargeA.text_info, blogSmall.text_info) + self.assertGreaterEqual(blogLargeA.bool_info, blogSmall.bool_info) + # '__le__' aka '<=' + self.assertLessEqual(blogSmall.text_info, blogLargeB.text_info) + self.assertLessEqual(blogLargeA.text_info, blogLargeB.text_info) + self.assertLessEqual(blogSmall.bool_info, blogLargeB.bool_info) + self.assertLessEqual(blogLargeA.bool_info, blogLargeB.bool_info) + # '__lt__' aka '<' + self.assertLess(blogSmall.text_info, blogLargeB.text_info) + self.assertLess(blogSmall.bool_info, blogLargeB.bool_info) + # '__ne__' aka '!=' + self.assertNotEqual(blogSmall.text_info, blogLargeB.text_info) + self.assertNotEqual(blogSmall.bool_info, blogLargeB.bool_info) + # 'sort' + blogLargeB.bool_info = [True, False, True, False] + blogLargeB.text_info.sort() + blogLargeB.oid_info.sort() + blogLargeB.bool_info.sort() + sorted_target_list = [ + ObjectId("54495ad94c934721ede76d00"), + ObjectId("54495ad94c934721ede76d23"), + ObjectId("54495ad94c934721ede76f90") + ] + self.assertEqual(blogLargeB.text_info, ["a", "j", "z"]) + self.assertEqual(blogLargeB.oid_info, sorted_target_list) + self.assertEqual(blogLargeB.bool_info, [False, False, True, True]) + blogLargeB.save() + blogLargeB.reload() + self.assertEqual(blogLargeB.text_info, ["a", "j", "z"]) + self.assertEqual(blogLargeB.oid_info, sorted_target_list) + self.assertEqual(blogLargeB.bool_info, [False, False, True, True]) + + BlogPost.drop_collection() + + def test_list_assignment(self): + """Ensure that list field element assignment and slicing work + """ + class BlogPost(Document): + info = ListField() + + BlogPost.drop_collection() + + post = BlogPost() + post.info = ['e1', 'e2', 3, '4', 5] + post.save() + + post.info[0] = 1 + post.save() + post.reload() + self.assertEqual(post.info[0], 1) + + post.info[1:3] = ['n2', 'n3'] + post.save() + post.reload() + self.assertEqual(post.info, [1, 'n2', 'n3', '4', 5]) + + post.info[-1] = 'n5' + post.save() + post.reload() + self.assertEqual(post.info, [1, 'n2', 'n3', '4', 'n5']) + + post.info[-2] = 4 + post.save() + post.reload() + self.assertEqual(post.info, [1, 'n2', 'n3', 4, 'n5']) + + post.info[1:-1] = [2] + post.save() + post.reload() + self.assertEqual(post.info, [1, 2, 'n5']) + + post.info[:-1] = [1, 'n2', 'n3', 4] + post.save() + post.reload() + self.assertEqual(post.info, [1, 'n2', 'n3', 4, 'n5']) + + post.info[-4:3] = [2, 3] + post.save() + post.reload() + self.assertEqual(post.info, [1, 2, 3, 4, 'n5']) + def test_list_field_passed_in_value(self): class Foo(Document): bars = ListField(ReferenceField("Bar")) @@ -1048,12 +1402,11 @@ class FieldTest(unittest.TestCase): e.mapping = [1] e.save() - def create_invalid_mapping(): + # try creating an invalid mapping + with self.assertRaises(ValidationError): e.mapping = ["abc"] e.save() - self.assertRaises(ValidationError, create_invalid_mapping) - Simple.drop_collection() def test_list_field_rejects_strings(self): @@ -1136,6 +1489,19 @@ class FieldTest(unittest.TestCase): simple = simple.reload() self.assertEqual(simple.widgets, [4]) + def test_list_field_with_negative_indices(self): + + class Simple(Document): + widgets = ListField() + + simple = Simple(widgets=[1, 2, 3, 4]).save() + simple.widgets[-1] = 5 + self.assertEqual(['widgets.3'], simple._changed_fields) + simple.save() + + simple = simple.reload() + self.assertEqual(simple.widgets, [1, 2, 3, 5]) + def test_list_field_complex(self): """Ensure that the list fields can handle the complex types.""" @@ -1257,6 +1623,44 @@ class FieldTest(unittest.TestCase): BlogPost.drop_collection() + def test_dictfield_dump_document(self): + """Ensure a DictField can handle another document's dump + """ + class Doc(Document): + field = DictField() + + class ToEmbed(Document): + id = IntField(primary_key=True, default=1) + recursive = DictField() + + class ToEmbedParent(Document): + id = IntField(primary_key=True, default=1) + recursive = DictField() + + meta = {'allow_inheritance': True} + + class ToEmbedChild(ToEmbedParent): + pass + + to_embed_recursive = ToEmbed(id=1).save() + to_embed = ToEmbed( + id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save() + doc = Doc(field=to_embed.to_mongo().to_dict()) + doc.save() + assert isinstance(doc.field, dict) + assert doc.field == {'_id': 2, 'recursive': {'_id': 1, 'recursive': {}}} + # Same thing with a Document with a _cls field + to_embed_recursive = ToEmbedChild(id=1).save() + to_embed_child = ToEmbedChild( + id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save() + doc = Doc(field=to_embed_child.to_mongo().to_dict()) + doc.save() + assert isinstance(doc.field, dict) + assert doc.field == { + '_id': 2, '_cls': 'ToEmbedParent.ToEmbedChild', + 'recursive': {'_id': 1, '_cls': 'ToEmbedParent.ToEmbedChild', 'recursive': {}} + } + def test_dictfield_strict(self): """Ensure that dict field handles validation if provided a strict field type.""" @@ -1269,12 +1673,11 @@ class FieldTest(unittest.TestCase): e.mapping['someint'] = 1 e.save() - def create_invalid_mapping(): + # try creating an invalid mapping + with self.assertRaises(ValidationError): e.mapping['somestring'] = "abc" e.save() - self.assertRaises(ValidationError, create_invalid_mapping) - Simple.drop_collection() def test_dictfield_complex(self): @@ -1347,11 +1750,10 @@ class FieldTest(unittest.TestCase): self.assertEqual(BaseDict, type(e.mapping)) self.assertEqual({"ints": [3, 4]}, e.mapping) - def create_invalid_mapping(): + # try creating an invalid mapping + with self.assertRaises(ValueError): e.update(set__mapping={"somestrings": ["foo", "bar", ]}) - self.assertRaises(ValueError, create_invalid_mapping) - Simple.drop_collection() def test_mapfield(self): @@ -1366,18 +1768,14 @@ class FieldTest(unittest.TestCase): e.mapping['someint'] = 1 e.save() - def create_invalid_mapping(): + with self.assertRaises(ValidationError): e.mapping['somestring'] = "abc" e.save() - self.assertRaises(ValidationError, create_invalid_mapping) - - def create_invalid_class(): + with self.assertRaises(ValidationError): class NoDeclaredType(Document): mapping = MapField() - self.assertRaises(ValidationError, create_invalid_class) - Simple.drop_collection() def test_complex_mapfield(self): @@ -1406,14 +1804,10 @@ class FieldTest(unittest.TestCase): self.assertTrue(isinstance(e2.mapping['somestring'], StringSetting)) self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting)) - def create_invalid_mapping(): + with self.assertRaises(ValidationError): e.mapping['someint'] = 123 e.save() - self.assertRaises(ValidationError, create_invalid_mapping) - - Extensible.drop_collection() - def test_embedded_mapfield_db_field(self): class Embedded(EmbeddedDocument): @@ -1477,6 +1871,29 @@ class FieldTest(unittest.TestCase): actions__friends__operation='drink', actions__friends__object='beer').count()) + def test_map_field_unicode(self): + + class Info(EmbeddedDocument): + description = StringField() + value_list = ListField(field=StringField()) + + class BlogPost(Document): + info_dict = MapField(field=EmbeddedDocumentField(Info)) + + BlogPost.drop_collection() + + tree = BlogPost(info_dict={ + u"éééé": { + 'description': u"VALUE: éééé" + } + }) + + tree.save() + + self.assertEqual(BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description, u"VALUE: éééé") + + BlogPost.drop_collection() + def test_embedded_db_field(self): class Embedded(EmbeddedDocument): @@ -1513,6 +1930,8 @@ class FieldTest(unittest.TestCase): name = StringField() preferences = EmbeddedDocumentField(PersonPreferences) + Person.drop_collection() + person = Person(name='Test User') person.preferences = 'My Preferences' self.assertRaises(ValidationError, person.validate) @@ -1545,12 +1964,70 @@ class FieldTest(unittest.TestCase): content = StringField() author = EmbeddedDocumentField(User) + BlogPost.drop_collection() + post = BlogPost(content='What I did today...') post.author = PowerUser(name='Test User', power=47) post.save() self.assertEqual(47, BlogPost.objects.first().author.power) + def test_embedded_document_inheritance_with_list(self): + """Ensure that nested list of subclassed embedded documents is + handled correctly. + """ + + class Group(EmbeddedDocument): + name = StringField() + content = ListField(StringField()) + + class Basedoc(Document): + groups = ListField(EmbeddedDocumentField(Group)) + meta = {'abstract': True} + + class User(Basedoc): + doctype = StringField(require=True, default='userdata') + + User.drop_collection() + + content = ['la', 'le', 'lu'] + group = Group(name='foo', content=content) + foobar = User(groups=[group]) + foobar.save() + + self.assertEqual(content, User.objects.first().groups[0].content) + + def test_reference_miss(self): + """Ensure an exception is raised when dereferencing unknown document + """ + + class Foo(Document): + pass + + class Bar(Document): + ref = ReferenceField(Foo) + generic_ref = GenericReferenceField() + + Foo.drop_collection() + Bar.drop_collection() + + foo = Foo().save() + bar = Bar(ref=foo, generic_ref=foo).save() + + # Reference is no longer valid + foo.delete() + bar = Bar.objects.get() + self.assertRaises(DoesNotExist, getattr, bar, 'ref') + self.assertRaises(DoesNotExist, getattr, bar, 'generic_ref') + + # When auto_dereference is disabled, there is no trouble returning DBRef + bar = Bar.objects.get() + expected = foo.to_dbref() + bar._fields['ref']._auto_dereference = False + self.assertEqual(bar.ref, expected) + bar._fields['generic_ref']._auto_dereference = False + self.assertEqual(bar.generic_ref, {'_ref': expected, '_cls': 'Foo'}) + def test_reference_validation(self): """Ensure that invalid docment objects cannot be assigned to reference fields. @@ -1617,7 +2094,7 @@ class FieldTest(unittest.TestCase): 'parent': "50a234ea469ac1eda42d347d"}) mongoed = p1.to_mongo() self.assertTrue(isinstance(mongoed['parent'], ObjectId)) - + def test_cached_reference_field_get_and_save(self): """ Tests #1047: CachedReferenceField creates DBRefs on to_python, but can't save them on to_mongo @@ -1629,11 +2106,11 @@ class FieldTest(unittest.TestCase): class Ocorrence(Document): person = StringField() animal = CachedReferenceField(Animal) - + Animal.drop_collection() Ocorrence.drop_collection() - - Ocorrence(person="testte", + + Ocorrence(person="testte", animal=Animal(name="Leopard", tag="heavy").save()).save() p = Ocorrence.objects.get() p.person = 'new_testte' @@ -1816,7 +2293,7 @@ class FieldTest(unittest.TestCase): }) def test_cached_reference_fields_on_embedded_documents(self): - def build(): + with self.assertRaises(InvalidDocumentError): class Test(Document): name = StringField() @@ -1825,8 +2302,6 @@ class FieldTest(unittest.TestCase): 'test': CachedReferenceField(Test) }) - self.assertRaises(InvalidDocumentError, build) - def test_cached_reference_auto_sync(self): class Person(Document): TYPES = ( @@ -2243,6 +2718,91 @@ class FieldTest(unittest.TestCase): Member.drop_collection() BlogPost.drop_collection() + def test_drop_abstract_document(self): + """Ensure that an abstract document cannot be dropped given it + has no underlying collection. + """ + class AbstractDoc(Document): + name = StringField() + meta = {"abstract": True} + + self.assertRaises(OperationError, AbstractDoc.drop_collection) + + def test_reference_class_with_abstract_parent(self): + """Ensure that a class with an abstract parent can be referenced. + """ + class Sibling(Document): + name = StringField() + meta = {"abstract": True} + + class Sister(Sibling): + pass + + class Brother(Sibling): + sibling = ReferenceField(Sibling) + + Sister.drop_collection() + Brother.drop_collection() + + sister = Sister(name="Alice") + sister.save() + brother = Brother(name="Bob", sibling=sister) + brother.save() + + self.assertEquals(Brother.objects[0].sibling.name, sister.name) + + Sister.drop_collection() + Brother.drop_collection() + + def test_reference_abstract_class(self): + """Ensure that an abstract class instance cannot be used in the + reference of that abstract class. + """ + class Sibling(Document): + name = StringField() + meta = {"abstract": True} + + class Sister(Sibling): + pass + + class Brother(Sibling): + sibling = ReferenceField(Sibling) + + Sister.drop_collection() + Brother.drop_collection() + + sister = Sibling(name="Alice") + brother = Brother(name="Bob", sibling=sister) + self.assertRaises(ValidationError, brother.save) + + Sister.drop_collection() + Brother.drop_collection() + + def test_abstract_reference_base_type(self): + """Ensure that an an abstract reference fails validation when given a + Document that does not inherit from the abstract type. + """ + class Sibling(Document): + name = StringField() + meta = {"abstract": True} + + class Brother(Sibling): + sibling = ReferenceField(Sibling) + + class Mother(Document): + name = StringField() + + Brother.drop_collection() + Mother.drop_collection() + + mother = Mother(name="Carol") + mother.save() + brother = Brother(name="Bob", sibling=mother) + self.assertRaises(ValidationError, brother.save) + + Brother.drop_collection() + Mother.drop_collection() + def test_generic_reference(self): """Ensure that a GenericReferenceField properly dereferences items. """ @@ -2519,6 +3079,38 @@ class FieldTest(unittest.TestCase): Post.drop_collection() User.drop_collection() + def test_generic_reference_filter_by_dbref(self): + """Ensure we can search for a specific generic reference by + providing its ObjectId. + """ + class Doc(Document): + ref = GenericReferenceField() + + Doc.drop_collection() + + doc1 = Doc.objects.create() + doc2 = Doc.objects.create(ref=doc1) + + doc = Doc.objects.get(ref=DBRef('doc', doc1.pk)) + self.assertEqual(doc, doc2) + + def test_generic_reference_filter_by_objectid(self): + """Ensure we can search for a specific generic reference by + providing its DBRef. + """ + class Doc(Document): + ref = GenericReferenceField() + + Doc.drop_collection() + + doc1 = Doc.objects.create() + doc2 = Doc.objects.create(ref=doc1) + + self.assertTrue(isinstance(doc1.pk, ObjectId)) + + doc = Doc.objects.get(ref=doc1.pk) + self.assertEqual(doc, doc2) + def test_binary_fields(self): """Ensure that binary fields can be stored and retrieved. """ @@ -2526,7 +3118,7 @@ class FieldTest(unittest.TestCase): content_type = StringField() blob = BinaryField() - BLOB = b('\xe6\x00\xc4\xff\x07') + BLOB = six.b('\xe6\x00\xc4\xff\x07') MIME_TYPE = 'application/octet-stream' Attachment.drop_collection() @@ -2536,7 +3128,7 @@ class FieldTest(unittest.TestCase): attachment_1 = Attachment.objects().first() self.assertEqual(MIME_TYPE, attachment_1.content_type) - self.assertEqual(BLOB, bin_type(attachment_1.blob)) + self.assertEqual(BLOB, six.binary_type(attachment_1.blob)) Attachment.drop_collection() @@ -2563,13 +3155,13 @@ class FieldTest(unittest.TestCase): attachment_required = AttachmentRequired() self.assertRaises(ValidationError, attachment_required.validate) - attachment_required.blob = Binary(b('\xe6\x00\xc4\xff\x07')) + attachment_required.blob = Binary(six.b('\xe6\x00\xc4\xff\x07')) attachment_required.validate() attachment_size_limit = AttachmentSizeLimit( - blob=b('\xe6\x00\xc4\xff\x07')) + blob=six.b('\xe6\x00\xc4\xff\x07')) self.assertRaises(ValidationError, attachment_size_limit.validate) - attachment_size_limit.blob = b('\xe6\x00\xc4\xff') + attachment_size_limit.blob = six.b('\xe6\x00\xc4\xff') attachment_size_limit.validate() Attachment.drop_collection() @@ -2607,26 +3199,42 @@ class FieldTest(unittest.TestCase): att.delete() self.assertEqual(0, Attachment.objects.count()) - def test_choices_validation(self): - """Ensure that value is in a container of allowed values. + def test_choices_allow_using_sets_as_choices(self): + """Ensure that sets can be used when setting choices """ class Shirt(Document): - size = StringField(max_length=3, choices=( - ('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), - ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) + size = StringField(choices={'M', 'L'}) - Shirt.drop_collection() + Shirt(size='M').validate() + + def test_choices_validation_allow_no_value(self): + """Ensure that .validate passes and no value was provided + for a field setup with choices + """ + class Shirt(Document): + size = StringField(choices=('S', 'M')) shirt = Shirt() shirt.validate() - shirt.size = "S" + def test_choices_validation_accept_possible_value(self): + """Ensure that value is in a container of allowed values. + """ + class Shirt(Document): + size = StringField(choices=('S', 'M')) + + shirt = Shirt(size='S') shirt.validate() - shirt.size = "XS" - self.assertRaises(ValidationError, shirt.validate) + def test_choices_validation_reject_unknown_value(self): + """Ensure that unallowed value are rejected upon validation + """ + class Shirt(Document): + size = StringField(choices=('S', 'M')) - Shirt.drop_collection() + shirt = Shirt(size="XS") + with self.assertRaises(ValidationError): + shirt.validate() def test_choices_validation_documents(self): """ @@ -2710,28 +3318,32 @@ class FieldTest(unittest.TestCase): ('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) style = StringField(max_length=3, choices=( - ('S', 'Small'), ('B', 'Baggy'), ('W', 'wide')), default='S') + ('S', 'Small'), ('B', 'Baggy'), ('W', 'Wide')), default='W') Shirt.drop_collection() - shirt = Shirt() + shirt1 = Shirt() + shirt2 = Shirt() - self.assertEqual(shirt.get_size_display(), None) - self.assertEqual(shirt.get_style_display(), 'Small') + # Make sure get__display returns the default value (or None) + self.assertEqual(shirt1.get_size_display(), None) + self.assertEqual(shirt1.get_style_display(), 'Wide') - shirt.size = "XXL" - shirt.style = "B" - self.assertEqual(shirt.get_size_display(), 'Extra Extra Large') - self.assertEqual(shirt.get_style_display(), 'Baggy') + shirt1.size = 'XXL' + shirt1.style = 'B' + shirt2.size = 'M' + shirt2.style = 'S' + self.assertEqual(shirt1.get_size_display(), 'Extra Extra Large') + self.assertEqual(shirt1.get_style_display(), 'Baggy') + self.assertEqual(shirt2.get_size_display(), 'Medium') + self.assertEqual(shirt2.get_style_display(), 'Small') # Set as Z - an invalid choice - shirt.size = "Z" - shirt.style = "Z" - self.assertEqual(shirt.get_size_display(), 'Z') - self.assertEqual(shirt.get_style_display(), 'Z') - self.assertRaises(ValidationError, shirt.validate) - - Shirt.drop_collection() + shirt1.size = 'Z' + shirt1.style = 'Z' + self.assertEqual(shirt1.get_size_display(), 'Z') + self.assertEqual(shirt1.get_style_display(), 'Z') + self.assertRaises(ValidationError, shirt1.validate) def test_simple_choices_validation(self): """Ensure that value is in a container of allowed values. @@ -2811,7 +3423,7 @@ class FieldTest(unittest.TestCase): try: shirt.validate() - except ValidationError, error: + except ValidationError as error: # get the validation rules error_dict = error.to_dict() self.assertEqual(error_dict['size'], SIZE_MESSAGE) @@ -2840,7 +3452,7 @@ class FieldTest(unittest.TestCase): self.db['mongoengine.counters'].drop() Person.drop_collection() - for x in xrange(10): + for x in range(10): Person(name="Person %s" % x).save() c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) @@ -2864,7 +3476,7 @@ class FieldTest(unittest.TestCase): self.db['mongoengine.counters'].drop() Person.drop_collection() - for x in xrange(10): + for x in range(10): Person(name="Person %s" % x).save() self.assertEqual(Person.id.get_next_value(), 11) @@ -2879,7 +3491,7 @@ class FieldTest(unittest.TestCase): self.db['mongoengine.counters'].drop() Person.drop_collection() - for x in xrange(10): + for x in range(10): Person(name="Person %s" % x).save() self.assertEqual(Person.id.get_next_value(), '11') @@ -2895,7 +3507,7 @@ class FieldTest(unittest.TestCase): self.db['mongoengine.counters'].drop() Person.drop_collection() - for x in xrange(10): + for x in range(10): Person(name="Person %s" % x).save() c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) @@ -2920,7 +3532,7 @@ class FieldTest(unittest.TestCase): self.db['mongoengine.counters'].drop() Person.drop_collection() - for x in xrange(10): + for x in range(10): Person(name="Person %s" % x).save() c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) @@ -2982,7 +3594,7 @@ class FieldTest(unittest.TestCase): Animal.drop_collection() Person.drop_collection() - for x in xrange(10): + for x in range(10): Animal(name="Animal %s" % x).save() Person(name="Person %s" % x).save() @@ -3012,7 +3624,7 @@ class FieldTest(unittest.TestCase): self.db['mongoengine.counters'].drop() Person.drop_collection() - for x in xrange(10): + for x in range(10): p = Person(name="Person %s" % x) p.save() @@ -3199,7 +3811,7 @@ class FieldTest(unittest.TestCase): self.assertRaises(ValidationError, post.validate) try: post.validate() - except ValidationError, error: + except ValidationError as error: # ValidationError.errors property self.assertTrue(hasattr(error, 'errors')) self.assertTrue(isinstance(error.errors, dict)) @@ -3260,8 +3872,6 @@ class FieldTest(unittest.TestCase): Ensure that tuples remain tuples when they are inside a ComplexBaseField """ - from mongoengine.base import BaseField - class EnumField(BaseField): def __init__(self, **kwargs): @@ -3327,6 +3937,39 @@ class FieldTest(unittest.TestCase): doc = Doc.objects.get() self.assertEqual(doc.embed_me.field_1, "hello") + def test_dynamicfield_dump_document(self): + """Ensure a DynamicField can handle another document's dump + """ + class Doc(Document): + field = DynamicField() + + class ToEmbed(Document): + id = IntField(primary_key=True, default=1) + recursive = DynamicField() + + class ToEmbedParent(Document): + id = IntField(primary_key=True, default=1) + recursive = DynamicField() + + meta = {'allow_inheritance': True} + + class ToEmbedChild(ToEmbedParent): + pass + + to_embed_recursive = ToEmbed(id=1).save() + to_embed = ToEmbed(id=2, recursive=to_embed_recursive).save() + doc = Doc(field=to_embed) + doc.save() + assert isinstance(doc.field, ToEmbed) + assert doc.field == to_embed + # Same thing with a Document with a _cls field + to_embed_recursive = ToEmbedChild(id=1).save() + to_embed_child = ToEmbedChild(id=2, recursive=to_embed_recursive).save() + doc = Doc(field=to_embed_child) + doc.save() + assert isinstance(doc.field, ToEmbedChild) + assert doc.field == to_embed_child + def test_invalid_dict_value(self): class DictFieldTest(Document): dictionary = DictField(required=True) @@ -3377,37 +4020,46 @@ class FieldTest(unittest.TestCase): """Tests if a `FieldDoesNotExist` exception is raised when trying to instanciate a document with a field that's not defined. """ - class Doc(Document): - foo = StringField(db_field='f') + foo = StringField() - def test(): + with self.assertRaises(FieldDoesNotExist): Doc(bar='test') - self.assertRaises(FieldDoesNotExist, test) def test_undefined_field_exception_with_strict(self): """Tests if a `FieldDoesNotExist` exception is raised when trying to instanciate a document with a field that's not defined, even when strict is set to False. """ - class Doc(Document): - foo = StringField(db_field='f') + foo = StringField() meta = {'strict': False} - def test(): + with self.assertRaises(FieldDoesNotExist): Doc(bar='test') - self.assertRaises(FieldDoesNotExist, test) + def test_long_field_is_considered_as_int64(self): + """ + Tests that long fields are stored as long in mongo, even if long value + is small enough to be an int. + """ + class TestLongFieldConsideredAsInt64(Document): + some_long = LongField() + + doc = TestLongFieldConsideredAsInt64(some_long=42).save() + db = get_db() + self.assertTrue(isinstance(db.test_long_field_considered_as_int64.find()[0]['some_long'], Int64)) + self.assertTrue(isinstance(doc.some_long, six.integer_types)) -class EmbeddedDocumentListFieldTestCase(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.db = connect(db='EmbeddedDocumentListFieldTestCase') +class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): + def setUp(self): + """ + Create two BlogPost entries in the database, each with + several EmbeddedDocuments. + """ class Comments(EmbeddedDocument): author = StringField() message = StringField() @@ -3415,14 +4067,11 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): class BlogPost(Document): comments = EmbeddedDocumentListField(Comments) - cls.Comments = Comments - cls.BlogPost = BlogPost + BlogPost.drop_collection() + + self.Comments = Comments + self.BlogPost = BlogPost - def setUp(self): - """ - Create two BlogPost entries in the database, each with - several EmbeddedDocuments. - """ self.post1 = self.BlogPost(comments=[ self.Comments(author='user1', message='message1'), self.Comments(author='user2', message='message1') @@ -3434,13 +4083,6 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): self.Comments(author='user3', message='message1') ]).save() - def tearDown(self): - self.BlogPost.drop_collection() - - @classmethod - def tearDownClass(cls): - cls.db.drop_database('EmbeddedDocumentListFieldTestCase') - def test_no_keyword_filter(self): """ Tests the filter method of a List of Embedded Documents @@ -3449,9 +4091,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): filtered = self.post1.comments.filter() # Ensure nothing was changed - # < 2.6 Incompatible > - # self.assertListEqual(filtered, self.post1.comments) - self.assertEqual(filtered, self.post1.comments) + self.assertListEqual(filtered, self.post1.comments) def test_single_keyword_filter(self): """ @@ -3502,10 +4142,8 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): Tests the filter method of a List of Embedded Documents when the keyword is not a known keyword. """ - # < 2.6 Incompatible > - # with self.assertRaises(AttributeError): - # self.post2.comments.filter(year=2) - self.assertRaises(AttributeError, self.post2.comments.filter, year=2) + with self.assertRaises(AttributeError): + self.post2.comments.filter(year=2) def test_no_keyword_exclude(self): """ @@ -3515,9 +4153,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): filtered = self.post1.comments.exclude() # Ensure everything was removed - # < 2.6 Incompatible > - # self.assertListEqual(filtered, []) - self.assertEqual(filtered, []) + self.assertListEqual(filtered, []) def test_single_keyword_exclude(self): """ @@ -3563,10 +4199,8 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): Tests the exclude method of a List of Embedded Documents when the keyword is not a known keyword. """ - # < 2.6 Incompatible > - # with self.assertRaises(AttributeError): - # self.post2.comments.exclude(year=2) - self.assertRaises(AttributeError, self.post2.comments.exclude, year=2) + with self.assertRaises(AttributeError): + self.post2.comments.exclude(year=2) def test_chained_filter_exclude(self): """ @@ -3604,10 +4238,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): single keyword. """ comment = self.post1.comments.get(author='user1') - - # < 2.6 Incompatible > - # self.assertIsInstance(comment, self.Comments) - self.assertTrue(isinstance(comment, self.Comments)) + self.assertIsInstance(comment, self.Comments) self.assertEqual(comment.author, 'user1') def test_multi_keyword_get(self): @@ -3616,10 +4247,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): multiple keywords. """ comment = self.post2.comments.get(author='user2', message='message2') - - # < 2.6 Incompatible > - # self.assertIsInstance(comment, self.Comments) - self.assertTrue(isinstance(comment, self.Comments)) + self.assertIsInstance(comment, self.Comments) self.assertEqual(comment.author, 'user2') self.assertEqual(comment.message, 'message2') @@ -3628,44 +4256,32 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): Tests the get method of a List of Embedded Documents without a keyword to return multiple documents. """ - # < 2.6 Incompatible > - # with self.assertRaises(MultipleObjectsReturned): - # self.post1.comments.get() - self.assertRaises(MultipleObjectsReturned, self.post1.comments.get) + with self.assertRaises(MultipleObjectsReturned): + self.post1.comments.get() def test_keyword_multiple_return_get(self): """ Tests the get method of a List of Embedded Documents with a keyword to return multiple documents. """ - # < 2.6 Incompatible > - # with self.assertRaises(MultipleObjectsReturned): - # self.post2.comments.get(author='user2') - self.assertRaises( - MultipleObjectsReturned, self.post2.comments.get, author='user2' - ) + with self.assertRaises(MultipleObjectsReturned): + self.post2.comments.get(author='user2') def test_unknown_keyword_get(self): """ Tests the get method of a List of Embedded Documents with an unknown keyword. """ - # < 2.6 Incompatible > - # with self.assertRaises(AttributeError): - # self.post2.comments.get(year=2020) - self.assertRaises(AttributeError, self.post2.comments.get, year=2020) + with self.assertRaises(AttributeError): + self.post2.comments.get(year=2020) def test_no_result_get(self): """ Tests the get method of a List of Embedded Documents where get returns no results. """ - # < 2.6 Incompatible > - # with self.assertRaises(DoesNotExist): - # self.post1.comments.get(author='user3') - self.assertRaises( - DoesNotExist, self.post1.comments.get, author='user3' - ) + with self.assertRaises(DoesNotExist): + self.post1.comments.get(author='user3') def test_first(self): """ @@ -3675,9 +4291,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): comment = self.post1.comments.first() # Ensure a Comment object was returned. - # < 2.6 Incompatible > - # self.assertIsInstance(comment, self.Comments) - self.assertTrue(isinstance(comment, self.Comments)) + self.assertIsInstance(comment, self.Comments) self.assertEqual(comment, self.post1.comments[0]) def test_create(self): @@ -3690,22 +4304,14 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): self.post1.save() # Ensure the returned value is the comment object. - # < 2.6 Incompatible > - # self.assertIsInstance(comment, self.Comments) - self.assertTrue(isinstance(comment, self.Comments)) + self.assertIsInstance(comment, self.Comments) self.assertEqual(comment.author, 'user4') self.assertEqual(comment.message, 'message1') # Ensure the new comment was actually saved to the database. - # < 2.6 Incompatible > - # self.assertIn( - # comment, - # self.BlogPost.objects(comments__author='user4')[0].comments - # ) - self.assertTrue( - comment in self.BlogPost.objects( - comments__author='user4' - )[0].comments + self.assertIn( + comment, + self.BlogPost.objects(comments__author='user4')[0].comments ) def test_filtered_create(self): @@ -3720,22 +4326,14 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): self.post1.save() # Ensure the returned value is the comment object. - # < 2.6 Incompatible > - # self.assertIsInstance(comment, self.Comments) - self.assertTrue(isinstance(comment, self.Comments)) + self.assertIsInstance(comment, self.Comments) self.assertEqual(comment.author, 'user4') self.assertEqual(comment.message, 'message1') # Ensure the new comment was actually saved to the database. - # < 2.6 Incompatible > - # self.assertIn( - # comment, - # self.BlogPost.objects(comments__author='user4')[0].comments - # ) - self.assertTrue( - comment in self.BlogPost.objects( - comments__author='user4' - )[0].comments + self.assertIn( + comment, + self.BlogPost.objects(comments__author='user4')[0].comments ) def test_no_keyword_update(self): @@ -3748,22 +4346,14 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): self.post1.save() # Ensure that nothing was altered. - # < 2.6 Incompatible > - # self.assertIn( - # original[0], - # self.BlogPost.objects(id=self.post1.id)[0].comments - # ) - self.assertTrue( - original[0] in self.BlogPost.objects(id=self.post1.id)[0].comments + self.assertIn( + original[0], + self.BlogPost.objects(id=self.post1.id)[0].comments ) - # < 2.6 Incompatible > - # self.assertIn( - # original[1], - # self.BlogPost.objects(id=self.post1.id)[0].comments - # ) - self.assertTrue( - original[1] in self.BlogPost.objects(id=self.post1.id)[0].comments + self.assertIn( + original[1], + self.BlogPost.objects(id=self.post1.id)[0].comments ) # Ensure the method returned 0 as the number of entries @@ -3788,6 +4378,17 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): # modified self.assertEqual(number, 2) + def test_unicode(self): + """ + Tests that unicode strings handled correctly + """ + post = self.BlogPost(comments=[ + self.Comments(author='user1', message=u'сообщение'), + self.Comments(author='user2', message=u'хабарлама') + ]).save() + self.assertEqual(post.comments.get(message=u'сообщение').author, + 'user1') + def test_save(self): """ Tests the save method of a List of Embedded Documents. @@ -3798,13 +4399,9 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): comments.save() # Ensure that the new comment has been added to the database. - # < 2.6 Incompatible > - # self.assertIn( - # new_comment, - # self.BlogPost.objects(id=self.post1.id)[0].comments - # ) - self.assertTrue( - new_comment in self.BlogPost.objects(id=self.post1.id)[0].comments + self.assertIn( + new_comment, + self.BlogPost.objects(id=self.post1.id)[0].comments ) def test_delete(self): @@ -3816,23 +4413,15 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): # Ensure that all the comments under post1 were deleted in the # database. - # < 2.6 Incompatible > - # self.assertListEqual( - # self.BlogPost.objects(id=self.post1.id)[0].comments, [] - # ) - self.assertEqual( + self.assertListEqual( self.BlogPost.objects(id=self.post1.id)[0].comments, [] ) # Ensure that post1 comments were deleted from the list. - # < 2.6 Incompatible > - # self.assertListEqual(self.post1.comments, []) - self.assertEqual(self.post1.comments, []) + self.assertListEqual(self.post1.comments, []) # Ensure that comments still returned a EmbeddedDocumentList object. - # < 2.6 Incompatible > - # self.assertIsInstance(self.post1.comments, EmbeddedDocumentList) - self.assertTrue(isinstance(self.post1.comments, EmbeddedDocumentList)) + self.assertIsInstance(self.post1.comments, EmbeddedDocumentList) # Ensure that the delete method returned 2 as the number of entries # deleted from the database @@ -3851,7 +4440,8 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique)) A(my_list=[]).save() - self.assertRaises(NotUniqueError, lambda: A(my_list=[]).save()) + with self.assertRaises(NotUniqueError): + A(my_list=[]).save() class EmbeddedWithSparseUnique(EmbeddedDocument): number = IntField(unique=True, sparse=True) @@ -3859,6 +4449,9 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): class B(Document): my_list = ListField(EmbeddedDocumentField(EmbeddedWithSparseUnique)) + A.drop_collection() + B.drop_collection() + B(my_list=[]).save() B(my_list=[]).save() @@ -3872,21 +4465,15 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): self.post1.save() # Ensure that only the user2 comment was deleted. - # < 2.6 Incompatible > - # self.assertNotIn( - # comment, self.BlogPost.objects(id=self.post1.id)[0].comments - # ) - self.assertTrue( - comment not in self.BlogPost.objects(id=self.post1.id)[0].comments + self.assertNotIn( + comment, self.BlogPost.objects(id=self.post1.id)[0].comments ) self.assertEqual( len(self.BlogPost.objects(id=self.post1.id)[0].comments), 1 ) # Ensure that the user2 comment no longer exists in the list. - # < 2.6 Incompatible > - # self.assertNotIn(comment, self.post1.comments) - self.assertTrue(comment not in self.post1.comments) + self.assertNotIn(comment, self.post1.comments) self.assertEqual(len(self.post1.comments), 1) # Ensure that the delete method returned 1 as the number of entries @@ -3904,6 +4491,8 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): a_field = IntField() c_field = IntField(custom_data=custom_data) + CustomData.drop_collection() + a1 = CustomData(a_field=1, c_field=2).save() self.assertEqual(2, a1.c_field) self.assertFalse(hasattr(a1.c_field, 'custom_data')) diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index 7c5abeac..8364d5ef 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -1,18 +1,16 @@ # -*- coding: utf-8 -*- -import sys -sys.path[0:0] = [""] - import copy import os import unittest import tempfile import gridfs +import six from nose.plugins.skip import SkipTest from mongoengine import * from mongoengine.connection import get_db -from mongoengine.python_support import b, StringIO +from mongoengine.python_support import StringIO try: from PIL import Image @@ -20,15 +18,13 @@ try: except ImportError: HAS_PIL = False +from tests.utils import MongoDBTestCase + TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') -class FileTest(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() +class FileTest(MongoDBTestCase): def tearDown(self): self.db.drop_collection('fs.files') @@ -49,7 +45,7 @@ class FileTest(unittest.TestCase): PutFile.drop_collection() - text = b('Hello, World!') + text = six.b('Hello, World!') content_type = 'text/plain' putfile = PutFile() @@ -88,8 +84,8 @@ class FileTest(unittest.TestCase): StreamFile.drop_collection() - text = b('Hello, World!') - more_text = b('Foo Bar') + text = six.b('Hello, World!') + more_text = six.b('Foo Bar') content_type = 'text/plain' streamfile = StreamFile() @@ -123,8 +119,8 @@ class FileTest(unittest.TestCase): StreamFile.drop_collection() - text = b('Hello, World!') - more_text = b('Foo Bar') + text = six.b('Hello, World!') + more_text = six.b('Foo Bar') content_type = 'text/plain' streamfile = StreamFile() @@ -155,8 +151,8 @@ class FileTest(unittest.TestCase): class SetFile(Document): the_file = FileField() - text = b('Hello, World!') - more_text = b('Foo Bar') + text = six.b('Hello, World!') + more_text = six.b('Foo Bar') SetFile.drop_collection() @@ -185,7 +181,7 @@ class FileTest(unittest.TestCase): GridDocument.drop_collection() with tempfile.TemporaryFile() as f: - f.write(b("Hello World!")) + f.write(six.b("Hello World!")) f.flush() # Test without default @@ -202,7 +198,7 @@ class FileTest(unittest.TestCase): self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id) # Test with default - doc_d = GridDocument(the_file=b('')) + doc_d = GridDocument(the_file=six.b('')) doc_d.save() doc_e = GridDocument.objects.with_id(doc_d.id) @@ -228,7 +224,7 @@ class FileTest(unittest.TestCase): # First instance test_file = TestFile() test_file.name = "Hello, World!" - test_file.the_file.put(b('Hello, World!')) + test_file.the_file.put(six.b('Hello, World!')) test_file.save() # Second instance @@ -282,7 +278,7 @@ class FileTest(unittest.TestCase): test_file = TestFile() self.assertFalse(bool(test_file.the_file)) - test_file.the_file.put(b('Hello, World!'), content_type='text/plain') + test_file.the_file.put(six.b('Hello, World!'), content_type='text/plain') test_file.save() self.assertTrue(bool(test_file.the_file)) @@ -297,66 +293,66 @@ class FileTest(unittest.TestCase): test_file = TestFile() self.assertFalse(test_file.the_file in [{"test": 1}]) - def test_file_disk_space(self): - """ Test disk space usage when we delete/replace a file """ + def test_file_disk_space(self): + """ Test disk space usage when we delete/replace a file """ class TestFile(Document): the_file = FileField() - - text = b('Hello, World!') + + text = six.b('Hello, World!') content_type = 'text/plain' testfile = TestFile() testfile.the_file.put(text, content_type=content_type, filename="hello") testfile.save() - - # Now check fs.files and fs.chunks + + # Now check fs.files and fs.chunks db = TestFile._get_db() - + files = db.fs.files.find() chunks = db.fs.chunks.find() self.assertEquals(len(list(files)), 1) self.assertEquals(len(list(chunks)), 1) - # Deleting the docoument should delete the files + # Deleting the docoument should delete the files testfile.delete() - + files = db.fs.files.find() chunks = db.fs.chunks.find() self.assertEquals(len(list(files)), 0) self.assertEquals(len(list(chunks)), 0) - - # Test case where we don't store a file in the first place + + # Test case where we don't store a file in the first place testfile = TestFile() testfile.save() - + files = db.fs.files.find() chunks = db.fs.chunks.find() self.assertEquals(len(list(files)), 0) self.assertEquals(len(list(chunks)), 0) - + testfile.delete() - + files = db.fs.files.find() chunks = db.fs.chunks.find() self.assertEquals(len(list(files)), 0) self.assertEquals(len(list(chunks)), 0) - - # Test case where we overwrite the file + + # Test case where we overwrite the file testfile = TestFile() testfile.the_file.put(text, content_type=content_type, filename="hello") testfile.save() - - text = b('Bonjour, World!') + + text = six.b('Bonjour, World!') testfile.the_file.replace(text, content_type=content_type, filename="hello") testfile.save() - + files = db.fs.files.find() chunks = db.fs.chunks.find() self.assertEquals(len(list(files)), 1) self.assertEquals(len(list(chunks)), 1) - + testfile.delete() - + files = db.fs.files.find() chunks = db.fs.chunks.find() self.assertEquals(len(list(files)), 0) @@ -372,14 +368,14 @@ class FileTest(unittest.TestCase): TestImage.drop_collection() with tempfile.TemporaryFile() as f: - f.write(b("Hello World!")) + f.write(six.b("Hello World!")) f.flush() t = TestImage() try: t.image.put(f) self.fail("Should have raised an invalidation error") - except ValidationError, e: + except ValidationError as e: self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f) t = TestImage() @@ -496,7 +492,7 @@ class FileTest(unittest.TestCase): # First instance test_file = TestFile() test_file.name = "Hello, World!" - test_file.the_file.put(b('Hello, World!'), + test_file.the_file.put(six.b('Hello, World!'), name="hello.txt") test_file.save() @@ -504,16 +500,15 @@ class FileTest(unittest.TestCase): self.assertEqual(data.get('name'), 'hello.txt') test_file = TestFile.objects.first() - self.assertEqual(test_file.the_file.read(), - b('Hello, World!')) + self.assertEqual(test_file.the_file.read(), six.b('Hello, World!')) test_file = TestFile.objects.first() - test_file.the_file = b('HELLO, WORLD!') + test_file.the_file = six.b('HELLO, WORLD!') test_file.save() test_file = TestFile.objects.first() self.assertEqual(test_file.the_file.read(), - b('HELLO, WORLD!')) + six.b('HELLO, WORLD!')) def test_copyable(self): class PutFile(Document): @@ -521,7 +516,7 @@ class FileTest(unittest.TestCase): PutFile.drop_collection() - text = b('Hello, World!') + text = six.b('Hello, World!') content_type = 'text/plain' putfile = PutFile() diff --git a/tests/fields/geo.py b/tests/fields/geo.py index c3f41481..1c5bccc0 100644 --- a/tests/fields/geo.py +++ b/tests/fields/geo.py @@ -1,7 +1,4 @@ # -*- coding: utf-8 -*- -import sys -sys.path[0:0] = [""] - import unittest from mongoengine import * diff --git a/tests/fixtures.py b/tests/fixtures.py index b3bf73e8..d8eb8487 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -26,7 +26,7 @@ class NewDocumentPickleTest(Document): new_field = StringField() -class PickleDyanmicEmbedded(DynamicEmbeddedDocument): +class PickleDynamicEmbedded(DynamicEmbeddedDocument): date = DateTimeField(default=datetime.now) diff --git a/tests/migration/__init__.py b/tests/migration/__init__.py deleted file mode 100644 index 6fc83e02..00000000 --- a/tests/migration/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from convert_to_new_inheritance_model import * -from decimalfield_as_float import * -from refrencefield_dbref_to_object_id import * -from turn_off_inheritance import * -from uuidfield_to_binary import * - -if __name__ == '__main__': - unittest.main() diff --git a/tests/migration/convert_to_new_inheritance_model.py b/tests/migration/convert_to_new_inheritance_model.py deleted file mode 100644 index 89ee9e9d..00000000 --- a/tests/migration/convert_to_new_inheritance_model.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -import unittest - -from mongoengine import Document, connect -from mongoengine.connection import get_db -from mongoengine.fields import StringField - -__all__ = ('ConvertToNewInheritanceModel', ) - - -class ConvertToNewInheritanceModel(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() - - def tearDown(self): - for collection in self.db.collection_names(): - if 'system.' in collection: - continue - self.db.drop_collection(collection) - - def test_how_to_convert_to_the_new_inheritance_model(self): - """Demonstrates migrating from 0.7 to 0.8 - """ - - # 1. Declaration of the class - class Animal(Document): - name = StringField() - meta = { - 'allow_inheritance': True, - 'indexes': ['name'] - } - - # 2. Remove _types - collection = Animal._get_collection() - collection.update({}, {"$unset": {"_types": 1}}, multi=True) - - # 3. Confirm extra data is removed - count = collection.find({'_types': {"$exists": True}}).count() - self.assertEqual(0, count) - - # 4. Remove indexes - info = collection.index_information() - indexes_to_drop = [key for key, value in info.iteritems() - if '_types' in dict(value['key'])] - for index in indexes_to_drop: - collection.drop_index(index) - - # 5. Recreate indexes - Animal.ensure_indexes() diff --git a/tests/migration/decimalfield_as_float.py b/tests/migration/decimalfield_as_float.py deleted file mode 100644 index 3903c913..00000000 --- a/tests/migration/decimalfield_as_float.py +++ /dev/null @@ -1,50 +0,0 @@ - # -*- coding: utf-8 -*- -import unittest -import decimal -from decimal import Decimal - -from mongoengine import Document, connect -from mongoengine.connection import get_db -from mongoengine.fields import StringField, DecimalField, ListField - -__all__ = ('ConvertDecimalField', ) - - -class ConvertDecimalField(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() - - def test_how_to_convert_decimal_fields(self): - """Demonstrates migrating from 0.7 to 0.8 - """ - - # 1. Old definition - using dbrefs - class Person(Document): - name = StringField() - money = DecimalField(force_string=True) - monies = ListField(DecimalField(force_string=True)) - - Person.drop_collection() - Person(name="Wilson Jr", money=Decimal("2.50"), - monies=[Decimal("2.10"), Decimal("5.00")]).save() - - # 2. Start the migration by changing the schema - # Change DecimalField - add precision and rounding settings - class Person(Document): - name = StringField() - money = DecimalField(precision=2, rounding=decimal.ROUND_HALF_UP) - monies = ListField(DecimalField(precision=2, - rounding=decimal.ROUND_HALF_UP)) - - # 3. Loop all the objects and mark parent as changed - for p in Person.objects: - p._mark_as_changed('money') - p._mark_as_changed('monies') - p.save() - - # 4. Confirmation of the fix! - wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] - self.assertTrue(isinstance(wilson['money'], float)) - self.assertTrue(all([isinstance(m, float) for m in wilson['monies']])) diff --git a/tests/migration/refrencefield_dbref_to_object_id.py b/tests/migration/refrencefield_dbref_to_object_id.py deleted file mode 100644 index d3acbe92..00000000 --- a/tests/migration/refrencefield_dbref_to_object_id.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -import unittest - -from mongoengine import Document, connect -from mongoengine.connection import get_db -from mongoengine.fields import StringField, ReferenceField, ListField - -__all__ = ('ConvertToObjectIdsModel', ) - - -class ConvertToObjectIdsModel(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() - - def test_how_to_convert_to_object_id_reference_fields(self): - """Demonstrates migrating from 0.7 to 0.8 - """ - - # 1. Old definition - using dbrefs - class Person(Document): - name = StringField() - parent = ReferenceField('self', dbref=True) - friends = ListField(ReferenceField('self', dbref=True)) - - Person.drop_collection() - - p1 = Person(name="Wilson", parent=None).save() - f1 = Person(name="John", parent=None).save() - f2 = Person(name="Paul", parent=None).save() - f3 = Person(name="George", parent=None).save() - f4 = Person(name="Ringo", parent=None).save() - Person(name="Wilson Jr", parent=p1, friends=[f1, f2, f3, f4]).save() - - # 2. Start the migration by changing the schema - # Change ReferenceField as now dbref defaults to False - class Person(Document): - name = StringField() - parent = ReferenceField('self') - friends = ListField(ReferenceField('self')) - - # 3. Loop all the objects and mark parent as changed - for p in Person.objects: - p._mark_as_changed('parent') - p._mark_as_changed('friends') - p.save() - - # 4. Confirmation of the fix! - wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] - self.assertEqual(p1.id, wilson['parent']) - self.assertEqual([f1.id, f2.id, f3.id, f4.id], wilson['friends']) diff --git a/tests/migration/turn_off_inheritance.py b/tests/migration/turn_off_inheritance.py deleted file mode 100644 index ee461a84..00000000 --- a/tests/migration/turn_off_inheritance.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -import unittest - -from mongoengine import Document, connect -from mongoengine.connection import get_db -from mongoengine.fields import StringField - -__all__ = ('TurnOffInheritanceTest', ) - - -class TurnOffInheritanceTest(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() - - def tearDown(self): - for collection in self.db.collection_names(): - if 'system.' in collection: - continue - self.db.drop_collection(collection) - - def test_how_to_turn_off_inheritance(self): - """Demonstrates migrating from allow_inheritance = True to False. - """ - - # 1. Old declaration of the class - - class Animal(Document): - name = StringField() - meta = { - 'allow_inheritance': True, - 'indexes': ['name'] - } - - # 2. Turn off inheritance - class Animal(Document): - name = StringField() - meta = { - 'allow_inheritance': False, - 'indexes': ['name'] - } - - # 3. Remove _types and _cls - collection = Animal._get_collection() - collection.update({}, {"$unset": {"_types": 1, "_cls": 1}}, multi=True) - - # 3. Confirm extra data is removed - count = collection.find({"$or": [{'_types': {"$exists": True}}, - {'_cls': {"$exists": True}}]}).count() - assert count == 0 - - # 4. Remove indexes - info = collection.index_information() - indexes_to_drop = [key for key, value in info.iteritems() - if '_types' in dict(value['key']) - or '_cls' in dict(value['key'])] - for index in indexes_to_drop: - collection.drop_index(index) - - # 5. Recreate indexes - Animal.ensure_indexes() diff --git a/tests/migration/uuidfield_to_binary.py b/tests/migration/uuidfield_to_binary.py deleted file mode 100644 index a535e91f..00000000 --- a/tests/migration/uuidfield_to_binary.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -import unittest -import uuid - -from mongoengine import Document, connect -from mongoengine.connection import get_db -from mongoengine.fields import StringField, UUIDField, ListField - -__all__ = ('ConvertToBinaryUUID', ) - - -class ConvertToBinaryUUID(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() - - def test_how_to_convert_to_binary_uuid_fields(self): - """Demonstrates migrating from 0.7 to 0.8 - """ - - # 1. Old definition - using dbrefs - class Person(Document): - name = StringField() - uuid = UUIDField(binary=False) - uuids = ListField(UUIDField(binary=False)) - - Person.drop_collection() - Person(name="Wilson Jr", uuid=uuid.uuid4(), - uuids=[uuid.uuid4(), uuid.uuid4()]).save() - - # 2. Start the migration by changing the schema - # Change UUIDFIeld as now binary defaults to True - class Person(Document): - name = StringField() - uuid = UUIDField() - uuids = ListField(UUIDField()) - - # 3. Loop all the objects and mark parent as changed - for p in Person.objects: - p._mark_as_changed('uuid') - p._mark_as_changed('uuids') - p.save() - - # 4. Confirmation of the fix! - wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] - self.assertTrue(isinstance(wilson['uuid'], uuid.UUID)) - self.assertTrue(all([isinstance(u, uuid.UUID) for u in wilson['uuids']])) diff --git a/tests/queryset/field_list.py b/tests/queryset/field_list.py index 7d66d263..d1277e06 100644 --- a/tests/queryset/field_list.py +++ b/tests/queryset/field_list.py @@ -1,6 +1,3 @@ -import sys -sys.path[0:0] = [""] - import unittest from mongoengine import * @@ -95,7 +92,7 @@ class OnlyExcludeAllTest(unittest.TestCase): exclude = ['d', 'e'] only = ['b', 'c'] - qs = MyDoc.objects.fields(**dict(((i, 1) for i in include))) + qs = MyDoc.objects.fields(**{i: 1 for i in include}) self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1}) qs = qs.only(*only) @@ -103,14 +100,14 @@ class OnlyExcludeAllTest(unittest.TestCase): qs = qs.exclude(*exclude) self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) - qs = MyDoc.objects.fields(**dict(((i, 1) for i in include))) + qs = MyDoc.objects.fields(**{i: 1 for i in include}) qs = qs.exclude(*exclude) self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) qs = qs.only(*only) self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) qs = MyDoc.objects.exclude(*exclude) - qs = qs.fields(**dict(((i, 1) for i in include))) + qs = qs.fields(**{i: 1 for i in include}) self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) qs = qs.only(*only) self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) @@ -129,7 +126,7 @@ class OnlyExcludeAllTest(unittest.TestCase): exclude = ['d', 'e'] only = ['b', 'c'] - qs = MyDoc.objects.fields(**dict(((i, 1) for i in include))) + qs = MyDoc.objects.fields(**{i: 1 for i in include}) qs = qs.exclude(*exclude) qs = qs.only(*only) qs = qs.fields(slice__b=5) @@ -144,6 +141,16 @@ class OnlyExcludeAllTest(unittest.TestCase): self.assertEqual(qs._loaded_fields.as_dict(), {'b': {'$slice': 5}}) + def test_mix_slice_with_other_fields(self): + class MyDoc(Document): + a = ListField() + b = ListField() + c = ListField() + + qs = MyDoc.objects.fields(a=1, b=0, slice__c=2) + self.assertEqual(qs._loaded_fields.as_dict(), + {'c': {'$slice': 2}, 'a': 1}) + def test_only(self): """Ensure that QuerySet.only only returns the requested fields. """ diff --git a/tests/queryset/geo.py b/tests/queryset/geo.py index 9aac44f5..d10c51cd 100644 --- a/tests/queryset/geo.py +++ b/tests/queryset/geo.py @@ -1,9 +1,5 @@ -import sys - -sys.path[0:0] = [""] - -import unittest from datetime import datetime, timedelta +import unittest from pymongo.errors import OperationFailure from mongoengine import * diff --git a/tests/queryset/modify.py b/tests/queryset/modify.py index e0c7d1fe..607937f6 100644 --- a/tests/queryset/modify.py +++ b/tests/queryset/modify.py @@ -1,6 +1,3 @@ -import sys -sys.path[0:0] = [""] - import unittest from mongoengine import connect, Document, IntField @@ -99,4 +96,4 @@ class FindAndModifyTest(unittest.TestCase): if __name__ == '__main__': - unittest.main() \ No newline at end of file + unittest.main() diff --git a/tests/queryset/pickable.py b/tests/queryset/pickable.py new file mode 100644 index 00000000..d96e7dc6 --- /dev/null +++ b/tests/queryset/pickable.py @@ -0,0 +1,78 @@ +import pickle +import unittest +from pymongo.mongo_client import MongoClient +from mongoengine import Document, StringField, IntField +from mongoengine.connection import connect + +__author__ = 'stas' + +class Person(Document): + name = StringField() + age = IntField() + +class TestQuerysetPickable(unittest.TestCase): + """ + Test for adding pickling support for QuerySet instances + See issue https://github.com/MongoEngine/mongoengine/issues/442 + """ + def setUp(self): + super(TestQuerysetPickable, self).setUp() + + connection = connect(db="test") #type: pymongo.mongo_client.MongoClient + + connection.drop_database("test") + + self.john = Person.objects.create( + name="John", + age=21 + ) + + + def test_picke_simple_qs(self): + + qs = Person.objects.all() + + pickle.dumps(qs) + + def _get_loaded(self, qs): + s = pickle.dumps(qs) + + return pickle.loads(s) + + def test_unpickle(self): + qs = Person.objects.all() + + loadedQs = self._get_loaded(qs) + + self.assertEqual(qs.count(), loadedQs.count()) + + #can update loadedQs + loadedQs.update(age=23) + + #check + self.assertEqual(Person.objects.first().age, 23) + + def test_pickle_support_filtration(self): + Person.objects.create( + name="Alice", + age=22 + ) + + Person.objects.create( + name="Bob", + age=23 + ) + + qs = Person.objects.filter(age__gte=22) + self.assertEqual(qs.count(), 2) + + loaded = self._get_loaded(qs) + + self.assertEqual(loaded.count(), 2) + self.assertEqual(loaded.filter(name="Bob").first().age, 23) + + + + + + diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index a2b839d6..c54fa13d 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -1,28 +1,23 @@ # -*- coding: utf-8 -*- -import sys -sys.path[0:0] = [""] - +import datetime import unittest import uuid + +from bson import DBRef, ObjectId from nose.plugins.skip import SkipTest - -from datetime import datetime, timedelta - import pymongo from pymongo.errors import ConfigurationError from pymongo.read_preferences import ReadPreference - -from bson import ObjectId, DBRef +import six from mongoengine import * from mongoengine.connection import get_connection, get_db -from mongoengine.python_support import PY3, IS_PYMONGO_3 from mongoengine.context_managers import query_counter, switch_db -from mongoengine.queryset import (QuerySet, QuerySetManager, - MultipleObjectsReturned, DoesNotExist, - queryset_manager) from mongoengine.errors import InvalidQueryError +from mongoengine.python_support import IS_PYMONGO_3 +from mongoengine.queryset import (DoesNotExist, MultipleObjectsReturned, + QuerySet, QuerySetManager, queryset_manager) __all__ = ("QuerySetTest",) @@ -30,7 +25,10 @@ __all__ = ("QuerySetTest",) class db_ops_tracker(query_counter): def get_ops(self): - ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}} + ignore_query = { + 'ns': {'$ne': '%s.system.indexes' % self.db.name}, + 'command.count': {'$ne': 'system.profile'} + } return list(self.db.system.profile.find(ignore_query)) @@ -99,67 +97,120 @@ class QuerySetTest(unittest.TestCase): author = ReferenceField(self.Person) author2 = GenericReferenceField() - def test_reference(): + # test addressing a field from a reference + with self.assertRaises(InvalidQueryError): list(BlogPost.objects(author__name="test")) - self.assertRaises(InvalidQueryError, test_reference) - - def test_generic_reference(): + # should fail for a generic reference as well + with self.assertRaises(InvalidQueryError): list(BlogPost.objects(author2__name="test")) def test_find(self): - """Ensure that a query returns a valid set of results. - """ - self.Person(name="User A", age=20).save() - self.Person(name="User B", age=30).save() + """Ensure that a query returns a valid set of results.""" + user_a = self.Person.objects.create(name='User A', age=20) + user_b = self.Person.objects.create(name='User B', age=30) # Find all people in the collection people = self.Person.objects self.assertEqual(people.count(), 2) results = list(people) + self.assertTrue(isinstance(results[0], self.Person)) self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode))) - self.assertEqual(results[0].name, "User A") + + self.assertEqual(results[0], user_a) + self.assertEqual(results[0].name, 'User A') self.assertEqual(results[0].age, 20) - self.assertEqual(results[1].name, "User B") + + self.assertEqual(results[1], user_b) + self.assertEqual(results[1].name, 'User B') self.assertEqual(results[1].age, 30) - # Use a query to filter the people found to just person1 + # Filter people by age people = self.Person.objects(age=20) self.assertEqual(people.count(), 1) person = people.next() + self.assertEqual(person, user_a) self.assertEqual(person.name, "User A") self.assertEqual(person.age, 20) - # Test limit + def test_limit(self): + """Ensure that QuerySet.limit works as expected.""" + user_a = self.Person.objects.create(name='User A', age=20) + user_b = self.Person.objects.create(name='User B', age=30) + + # Test limit on a new queryset people = list(self.Person.objects.limit(1)) self.assertEqual(len(people), 1) - self.assertEqual(people[0].name, 'User A') + self.assertEqual(people[0], user_a) - # Test skip + # Test limit on an existing queryset + people = self.Person.objects + self.assertEqual(len(people), 2) + people2 = people.limit(1) + self.assertEqual(len(people), 2) + self.assertEqual(len(people2), 1) + self.assertEqual(people2[0], user_a) + + # Test chaining of only after limit + person = self.Person.objects().limit(1).only('name').first() + self.assertEqual(person, user_a) + self.assertEqual(person.name, 'User A') + self.assertEqual(person.age, None) + + def test_skip(self): + """Ensure that QuerySet.skip works as expected.""" + user_a = self.Person.objects.create(name='User A', age=20) + user_b = self.Person.objects.create(name='User B', age=30) + + # Test skip on a new queryset people = list(self.Person.objects.skip(1)) self.assertEqual(len(people), 1) - self.assertEqual(people[0].name, 'User B') + self.assertEqual(people[0], user_b) - person3 = self.Person(name="User C", age=40) - person3.save() + # Test skip on an existing queryset + people = self.Person.objects + self.assertEqual(len(people), 2) + people2 = people.skip(1) + self.assertEqual(len(people), 2) + self.assertEqual(len(people2), 1) + self.assertEqual(people2[0], user_b) + + # Test chaining of only after skip + person = self.Person.objects().skip(1).only('name').first() + self.assertEqual(person, user_b) + self.assertEqual(person.name, 'User B') + self.assertEqual(person.age, None) + + def test_slice(self): + """Ensure slicing a queryset works as expected.""" + user_a = self.Person.objects.create(name='User A', age=20) + user_b = self.Person.objects.create(name='User B', age=30) + user_c = self.Person.objects.create(name="User C", age=40) # Test slice limit people = list(self.Person.objects[:2]) self.assertEqual(len(people), 2) - self.assertEqual(people[0].name, 'User A') - self.assertEqual(people[1].name, 'User B') + self.assertEqual(people[0], user_a) + self.assertEqual(people[1], user_b) # Test slice skip people = list(self.Person.objects[1:]) self.assertEqual(len(people), 2) - self.assertEqual(people[0].name, 'User B') - self.assertEqual(people[1].name, 'User C') + self.assertEqual(people[0], user_b) + self.assertEqual(people[1], user_c) # Test slice limit and skip people = list(self.Person.objects[1:2]) self.assertEqual(len(people), 1) - self.assertEqual(people[0].name, 'User B') + self.assertEqual(people[0], user_b) + + # Test slice limit and skip on an existing queryset + people = self.Person.objects + self.assertEqual(len(people), 3) + people2 = people[1:2] + self.assertEqual(len(people2), 1) + self.assertEqual(people2[0], user_b) # Test slice limit and skip cursor reset qs = self.Person.objects[1:2] @@ -170,6 +221,7 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(len(people), 1) self.assertEqual(people[0].name, 'User B') + # Test empty slice people = list(self.Person.objects[1:1]) self.assertEqual(len(people), 0) @@ -179,19 +231,15 @@ class QuerySetTest(unittest.TestCase): # Test larger slice __repr__ self.Person.objects.delete() - for i in xrange(55): + for i in range(55): self.Person(name='A%s' % i, age=i).save() self.assertEqual(self.Person.objects.count(), 55) self.assertEqual("Person object", "%s" % self.Person.objects[0]) - self.assertEqual( - "[, ]", "%s" % self.Person.objects[1:3]) - self.assertEqual( - "[, ]", "%s" % self.Person.objects[51:53]) - # Test only after limit - self.assertEqual(self.Person.objects().limit(2).only('name')[0].age, None) - # Test only after skip - self.assertEqual(self.Person.objects().skip(2).only('name')[0].age, None) + self.assertEqual("[, ]", + "%s" % self.Person.objects[1:3]) + self.assertEqual("[, ]", + "%s" % self.Person.objects[51:53]) def test_find_one(self): """Ensure that a query using find_one returns a valid result. @@ -221,14 +269,15 @@ class QuerySetTest(unittest.TestCase): person = self.Person.objects[1] self.assertEqual(person.name, "User B") - self.assertRaises(IndexError, self.Person.objects.__getitem__, 2) + with self.assertRaises(IndexError): + self.Person.objects[2] # Find a document using just the object id person = self.Person.objects.with_id(person1.id) self.assertEqual(person.name, "User A") - self.assertRaises( - InvalidQueryError, self.Person.objects(name="User A").with_id, person1.id) + with self.assertRaises(InvalidQueryError): + self.Person.objects(name="User A").with_id(person1.id) def test_find_only_one(self): """Ensure that a query using ``get`` returns at most one result. @@ -287,6 +336,9 @@ class QuerySetTest(unittest.TestCase): blog = Blog.objects(posts__0__comments__0__name='testa').get() self.assertEqual(blog, blog1) + blog = Blog.objects(posts__0__comments__0__name='testb').get() + self.assertEqual(blog, blog2) + query = Blog.objects(posts__1__comments__1__name='testb') self.assertEqual(query.count(), 2) @@ -337,9 +389,37 @@ class QuerySetTest(unittest.TestCase): query = query.filter(boolfield=True) self.assertEqual(query.count(), 1) + def test_batch_size(self): + """Ensure that batch_size works.""" + class A(Document): + s = StringField() + + A.drop_collection() + + for i in range(100): + A.objects.create(s=str(i)) + + # test iterating over the result set + cnt = 0 + for a in A.objects.batch_size(10): + cnt += 1 + self.assertEqual(cnt, 100) + + # test chaining + qs = A.objects.all() + qs = qs.limit(10).batch_size(20).skip(91) + cnt = 0 + for a in qs: + cnt += 1 + self.assertEqual(cnt, 9) + + # test invalid batch size + qs = A.objects.batch_size(-1) + with self.assertRaises(ValueError): + list(qs) + def test_update_write_concern(self): """Test that passing write_concern works""" - self.Person.drop_collection() write_concern = {"fsync": True} @@ -365,18 +445,14 @@ class QuerySetTest(unittest.TestCase): """Test to ensure that update is passed a value to update to""" self.Person.drop_collection() - author = self.Person(name='Test User') - author.save() + author = self.Person.objects.create(name='Test User') - def update_raises(): + with self.assertRaises(OperationError): self.Person.objects(pk=author.pk).update({}) - def update_one_raises(): + with self.assertRaises(OperationError): self.Person.objects(pk=author.pk).update_one({}) - self.assertRaises(OperationError, update_raises) - self.assertRaises(OperationError, update_one_raises) - def test_update_array_position(self): """Ensure that updating by array position works. @@ -404,8 +480,8 @@ class QuerySetTest(unittest.TestCase): Blog.objects.create(posts=[post2, post1]) # Update all of the first comments of second posts of all blogs - Blog.objects().update(set__posts__1__comments__0__name="testc") - testc_blogs = Blog.objects(posts__1__comments__0__name="testc") + Blog.objects().update(set__posts__1__comments__0__name='testc') + testc_blogs = Blog.objects(posts__1__comments__0__name='testc') self.assertEqual(testc_blogs.count(), 2) Blog.drop_collection() @@ -414,14 +490,13 @@ class QuerySetTest(unittest.TestCase): # Update only the first blog returned by the query Blog.objects().update_one( - set__posts__1__comments__1__name="testc") - testc_blogs = Blog.objects(posts__1__comments__1__name="testc") + set__posts__1__comments__1__name='testc') + testc_blogs = Blog.objects(posts__1__comments__1__name='testc') self.assertEqual(testc_blogs.count(), 1) # Check that using this indexing syntax on a non-list fails - def non_list_indexing(): - Blog.objects().update(set__posts__1__comments__0__name__1="asdf") - self.assertRaises(InvalidQueryError, non_list_indexing) + with self.assertRaises(InvalidQueryError): + Blog.objects().update(set__posts__1__comments__0__name__1='asdf') Blog.drop_collection() @@ -489,15 +564,12 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(simple.x, [1, 2, None, 4, 3, 2, 3, 4]) # Nested updates arent supported yet.. - def update_nested(): + with self.assertRaises(OperationError): Simple.drop_collection() Simple(x=[{'test': [1, 2, 3, 4]}]).save() Simple.objects(x__test=2).update(set__x__S__test__S=3) self.assertEqual(simple.x, [1, 2, 3, 4]) - self.assertRaises(OperationError, update_nested) - Simple.drop_collection() - def test_update_using_positional_operator_embedded_document(self): """Ensure that the embedded documents can be updated using the positional operator.""" @@ -590,11 +662,11 @@ class QuerySetTest(unittest.TestCase): members = DictField() club = Club() - club.members['John'] = dict(gender="M", age=13) + club.members['John'] = {'gender': 'M', 'age': 13} club.save() Club.objects().update( - set__members={"John": dict(gender="F", age=14)}) + set__members={"John": {'gender': 'F', 'age': 14}}) club = Club.objects().first() self.assertEqual(club.members['John']['gender'], "F") @@ -633,39 +705,39 @@ class QuerySetTest(unittest.TestCase): self.assertRaises(ValidationError, Doc.objects().update, dt_f="datetime", upsert=True) self.assertRaises(ValidationError, Doc.objects().update, ed_f__str_f=1, upsert=True) - def test_update_related_models( self ): - class TestPerson( Document ): + def test_update_related_models(self): + class TestPerson(Document): name = StringField() - class TestOrganization( Document ): + class TestOrganization(Document): name = StringField() - owner = ReferenceField( TestPerson ) + owner = ReferenceField(TestPerson) TestPerson.drop_collection() TestOrganization.drop_collection() - p = TestPerson( name='p1' ) + p = TestPerson(name='p1') p.save() - o = TestOrganization( name='o1' ) + o = TestOrganization(name='o1') o.save() o.owner = p p.name = 'p2' - self.assertEqual( o._get_changed_fields(), [ 'owner' ] ) - self.assertEqual( p._get_changed_fields(), [ 'name' ] ) + self.assertEqual(o._get_changed_fields(), ['owner']) + self.assertEqual(p._get_changed_fields(), ['name']) o.save() - self.assertEqual( o._get_changed_fields(), [] ) - self.assertEqual( p._get_changed_fields(), [ 'name' ] ) # Fails; it's empty + self.assertEqual(o._get_changed_fields(), []) + self.assertEqual(p._get_changed_fields(), ['name']) # Fails; it's empty # This will do NOTHING at all, even though we changed the name p.save() p.reload() - self.assertEqual( p.name, 'p2' ) # Fails; it's still `p1` + self.assertEqual(p.name, 'p2') # Fails; it's still `p1` def test_upsert(self): self.Person.drop_collection() @@ -680,12 +752,20 @@ class QuerySetTest(unittest.TestCase): def test_upsert_one(self): self.Person.drop_collection() - self.Person.objects(name="Bob", age=30).update_one(upsert=True) + bob = self.Person.objects(name="Bob", age=30).upsert_one() - bob = self.Person.objects.first() self.assertEqual("Bob", bob.name) self.assertEqual(30, bob.age) + bob.name = "Bobby" + bob.save() + + bobby = self.Person.objects(name="Bobby", age=30).upsert_one() + + self.assertEqual("Bobby", bobby.name) + self.assertEqual(30, bobby.age) + self.assertEqual(bob.id, bobby.id) + def test_set_on_insert(self): self.Person.drop_collection() @@ -767,7 +847,7 @@ class QuerySetTest(unittest.TestCase): post2 = Post(comments=[comment2, comment2]) blogs = [] - for i in xrange(1, 100): + for i in range(1, 100): blogs.append(Blog(title="post %s" % i, posts=[post1, post2])) Blog.objects.insert(blogs, load_bulk=False) @@ -804,30 +884,31 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(Blog.objects.count(), 2) - # test handles people trying to upsert - def throw_operation_error(): + # test inserting an existing document (shouldn't be allowed) + with self.assertRaises(OperationError): + blog = Blog.objects.first() + Blog.objects.insert(blog) + + # test inserting a query set + with self.assertRaises(OperationError): blogs = Blog.objects Blog.objects.insert(blogs) - self.assertRaises(OperationError, throw_operation_error) - - # Test can insert new doc + # insert a new doc new_post = Blog(title="code123", id=ObjectId()) Blog.objects.insert(new_post) - # test handles other classes being inserted - def throw_operation_error_wrong_doc(): - class Author(Document): - pass + class Author(Document): + pass + + # try inserting a different document class + with self.assertRaises(OperationError): Blog.objects.insert(Author()) - self.assertRaises(OperationError, throw_operation_error_wrong_doc) - - def throw_operation_error_not_a_document(): + # try inserting a non-document + with self.assertRaises(OperationError): Blog.objects.insert("HELLO WORLD") - self.assertRaises(OperationError, throw_operation_error_not_a_document) - Blog.drop_collection() blog1 = Blog(title="code", posts=[post1, post2]) @@ -847,14 +928,13 @@ class QuerySetTest(unittest.TestCase): blog3 = Blog(title="baz", posts=[post1, post2]) Blog.objects.insert([blog1, blog2]) - def throw_operation_error_not_unique(): + with self.assertRaises(NotUniqueError): Blog.objects.insert([blog2, blog3]) - self.assertRaises(NotUniqueError, throw_operation_error_not_unique) self.assertEqual(Blog.objects.count(), 2) - Blog.objects.insert([blog2, blog3], write_concern={"w": 0, - 'continue_on_error': True}) + Blog.objects.insert([blog2, blog3], + write_concern={"w": 0, 'continue_on_error': True}) self.assertEqual(Blog.objects.count(), 3) def test_get_changed_fields_query_count(self): @@ -987,7 +1067,7 @@ class QuerySetTest(unittest.TestCase): Doc.drop_collection() - for i in xrange(1000): + for i in range(1000): Doc(number=i).save() docs = Doc.objects.order_by('number') @@ -1104,24 +1184,29 @@ class QuerySetTest(unittest.TestCase): blog_2.save() blog_3.save() - blog_post_1 = BlogPost(blog=blog_1, title="Blog Post #1", - is_published=True, - published_date=datetime(2010, 1, 5, 0, 0, 0)) - blog_post_2 = BlogPost(blog=blog_2, title="Blog Post #2", - is_published=True, - published_date=datetime(2010, 1, 6, 0, 0, 0)) - blog_post_3 = BlogPost(blog=blog_3, title="Blog Post #3", - is_published=True, - published_date=datetime(2010, 1, 7, 0, 0, 0)) - - blog_post_1.save() - blog_post_2.save() - blog_post_3.save() + BlogPost.objects.create( + blog=blog_1, + title="Blog Post #1", + is_published=True, + published_date=datetime.datetime(2010, 1, 5, 0, 0, 0) + ) + BlogPost.objects.create( + blog=blog_2, + title="Blog Post #2", + is_published=True, + published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + ) + BlogPost.objects.create( + blog=blog_3, + title="Blog Post #3", + is_published=True, + published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) + ) # find all published blog posts before 2010-01-07 published_posts = BlogPost.published() published_posts = published_posts.filter( - published_date__lt=datetime(2010, 1, 7, 0, 0, 0)) + published_date__lt=datetime.datetime(2010, 1, 7, 0, 0, 0)) self.assertEqual(published_posts.count(), 2) blog_posts = BlogPost.objects @@ -1136,7 +1221,7 @@ class QuerySetTest(unittest.TestCase): qs = list(qs) expected = list(expected) self.assertEqual(len(qs), len(expected)) - for i in xrange(len(qs)): + for i in range(len(qs)): self.assertEqual(qs[i], expected[i]) def test_ordering(self): @@ -1152,16 +1237,18 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - blog_post_1 = BlogPost(title="Blog Post #1", - published_date=datetime(2010, 1, 5, 0, 0, 0)) - blog_post_2 = BlogPost(title="Blog Post #2", - published_date=datetime(2010, 1, 6, 0, 0, 0)) - blog_post_3 = BlogPost(title="Blog Post #3", - published_date=datetime(2010, 1, 7, 0, 0, 0)) - - blog_post_1.save() - blog_post_2.save() - blog_post_3.save() + blog_post_1 = BlogPost.objects.create( + title="Blog Post #1", + published_date=datetime.datetime(2010, 1, 5, 0, 0, 0) + ) + blog_post_2 = BlogPost.objects.create( + title="Blog Post #2", + published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + ) + blog_post_3 = BlogPost.objects.create( + title="Blog Post #3", + published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) + ) # get the "first" BlogPost using default ordering # from BlogPost.meta.ordering @@ -1174,7 +1261,8 @@ class QuerySetTest(unittest.TestCase): self.assertSequence(qs, expected) def test_clear_ordering(self): - """ Ensure that the default ordering can be cleared by calling order_by(). + """Ensure that the default ordering can be cleared by calling + order_by() w/o any arguments. """ class BlogPost(Document): title = StringField() @@ -1186,16 +1274,35 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() + # default ordering should be used by default with db_ops_tracker() as q: BlogPost.objects.filter(title='whatever').first() self.assertEqual(len(q.get_ops()), 1) self.assertEqual( - q.get_ops()[0]['query']['$orderby'], {u'published_date': -1}) + q.get_ops()[0]['query']['$orderby'], + {'published_date': -1} + ) + # calling order_by() should clear the default ordering with db_ops_tracker() as q: BlogPost.objects.filter(title='whatever').order_by().first() self.assertEqual(len(q.get_ops()), 1) - print q.get_ops()[0]['query'] + self.assertFalse('$orderby' in q.get_ops()[0]['query']) + + # calling an explicit order_by should use a specified sort + with db_ops_tracker() as q: + BlogPost.objects.filter(title='whatever').order_by('published_date').first() + self.assertEqual(len(q.get_ops()), 1) + self.assertEqual( + q.get_ops()[0]['query']['$orderby'], + {'published_date': 1} + ) + + # calling order_by() after an explicit sort should clear it + with db_ops_tracker() as q: + qs = BlogPost.objects.filter(title='whatever').order_by('published_date') + qs.order_by().first() + self.assertEqual(len(q.get_ops()), 1) self.assertFalse('$orderby' in q.get_ops()[0]['query']) def test_no_ordering_for_get(self): @@ -1210,7 +1317,7 @@ class QuerySetTest(unittest.TestCase): } BlogPost.objects.create( - title='whatever', published_date=datetime.utcnow()) + title='whatever', published_date=datetime.datetime.utcnow()) with db_ops_tracker() as q: BlogPost.objects.get(title='whatever') @@ -1224,7 +1331,8 @@ class QuerySetTest(unittest.TestCase): self.assertFalse('$orderby' in q.get_ops()[0]['query']) def test_find_embedded(self): - """Ensure that an embedded document is properly returned from a query. + """Ensure that an embedded document is properly returned from + different manners of querying. """ class User(EmbeddedDocument): name = StringField() @@ -1235,16 +1343,45 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - post = BlogPost(content='Had a good coffee today...') - post.author = User(name='Test User') - post.save() + user = User(name='Test User') + BlogPost.objects.create( + author=user, + content='Had a good coffee today...' + ) result = BlogPost.objects.first() self.assertTrue(isinstance(result.author, User)) self.assertEqual(result.author.name, 'Test User') + result = BlogPost.objects.get(author__name=user.name) + self.assertTrue(isinstance(result.author, User)) + self.assertEqual(result.author.name, 'Test User') + + result = BlogPost.objects.get(author={'name': user.name}) + self.assertTrue(isinstance(result.author, User)) + self.assertEqual(result.author.name, 'Test User') + + # Fails, since the string is not a type that is able to represent the + # author's document structure (should be dict) + with self.assertRaises(InvalidQueryError): + BlogPost.objects.get(author=user.name) + + def test_find_empty_embedded(self): + """Ensure that you can save and find an empty embedded document.""" + class User(EmbeddedDocument): + name = StringField() + + class BlogPost(Document): + content = StringField() + author = EmbeddedDocumentField(User) + BlogPost.drop_collection() + BlogPost.objects.create(content='Anonymous post...') + + result = BlogPost.objects.get(author=None) + self.assertEqual(result.author, None) + def test_find_dict_item(self): """Ensure that DictField items may be found. """ @@ -1652,7 +1789,7 @@ class QuerySetTest(unittest.TestCase): Log.drop_collection() - for i in xrange(10): + for i in range(10): Log().save() Log.objects()[3:5].delete() @@ -1755,6 +1892,11 @@ class QuerySetTest(unittest.TestCase): post.reload() self.assertEqual(post.hits, 10) + # Negative dec operator is equal to a positive inc operator + BlogPost.objects.update_one(dec__hits=-1) + post.reload() + self.assertEqual(post.hits, 11) + BlogPost.objects.update(push__tags='mongo') post.reload() self.assertTrue('mongo' in post.tags) @@ -1852,12 +1994,10 @@ class QuerySetTest(unittest.TestCase): Site.objects(id=s.id).update_one(pull__collaborators__user='Esteban') self.assertEqual(Site.objects.first().collaborators, []) - def pull_all(): + with self.assertRaises(InvalidQueryError): Site.objects(id=s.id).update_one( pull_all__collaborators__user=['Ross']) - self.assertRaises(InvalidQueryError, pull_all) - def test_pull_from_nested_embedded(self): class User(EmbeddedDocument): @@ -1888,12 +2028,10 @@ class QuerySetTest(unittest.TestCase): pull__collaborators__unhelpful={'name': 'Frank'}) self.assertEqual(Site.objects.first().collaborators['unhelpful'], []) - def pull_all(): + with self.assertRaises(InvalidQueryError): Site.objects(id=s.id).update_one( pull_all__collaborators__helpful__name=['Ross']) - self.assertRaises(InvalidQueryError, pull_all) - def test_pull_from_nested_mapfield(self): class Collaborator(EmbeddedDocument): @@ -1922,12 +2060,10 @@ class QuerySetTest(unittest.TestCase): pull__collaborators__unhelpful={'user': 'Frank'}) self.assertEqual(Site.objects.first().collaborators['unhelpful'], []) - def pull_all(): + with self.assertRaises(InvalidQueryError): Site.objects(id=s.id).update_one( pull_all__collaborators__helpful__user=['Ross']) - self.assertRaises(InvalidQueryError, pull_all) - def test_update_one_pop_generic_reference(self): class BlogTag(Document): @@ -2073,18 +2209,22 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - blog_post_3 = BlogPost(title="Blog Post #3", - published_date=datetime(2010, 1, 6, 0, 0, 0)) - blog_post_2 = BlogPost(title="Blog Post #2", - published_date=datetime(2010, 1, 5, 0, 0, 0)) - blog_post_4 = BlogPost(title="Blog Post #4", - published_date=datetime(2010, 1, 7, 0, 0, 0)) - blog_post_1 = BlogPost(title="Blog Post #1", published_date=None) - - blog_post_3.save() - blog_post_1.save() - blog_post_4.save() - blog_post_2.save() + blog_post_3 = BlogPost.objects.create( + title="Blog Post #3", + published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + ) + blog_post_2 = BlogPost.objects.create( + title="Blog Post #2", + published_date=datetime.datetime(2010, 1, 5, 0, 0, 0) + ) + blog_post_4 = BlogPost.objects.create( + title="Blog Post #4", + published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) + ) + blog_post_1 = BlogPost.objects.create( + title="Blog Post #1", + published_date=None + ) expected = [blog_post_1, blog_post_2, blog_post_3, blog_post_4] self.assertSequence(BlogPost.objects.order_by('published_date'), @@ -2103,16 +2243,18 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - blog_post_1 = BlogPost(title="A", - published_date=datetime(2010, 1, 6, 0, 0, 0)) - blog_post_2 = BlogPost(title="B", - published_date=datetime(2010, 1, 6, 0, 0, 0)) - blog_post_3 = BlogPost(title="C", - published_date=datetime(2010, 1, 7, 0, 0, 0)) - - blog_post_2.save() - blog_post_3.save() - blog_post_1.save() + blog_post_1 = BlogPost.objects.create( + title="A", + published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + ) + blog_post_2 = BlogPost.objects.create( + title="B", + published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + ) + blog_post_3 = BlogPost.objects.create( + title="C", + published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) + ) qs = BlogPost.objects.order_by('published_date', 'title') expected = [blog_post_1, blog_post_2, blog_post_3] @@ -2178,6 +2320,21 @@ class QuerySetTest(unittest.TestCase): a.author.name for a in Author.objects.order_by('-author__age')] self.assertEqual(names, ['User A', 'User B', 'User C']) + def test_comment(self): + """Make sure adding a comment to the query works.""" + class User(Document): + age = IntField() + + with db_ops_tracker() as q: + adult = (User.objects.filter(age__gte=18) + .comment('looking for an adult') + .first()) + ops = q.get_ops() + self.assertEqual(len(ops), 1) + op = ops[0] + self.assertEqual(op['query']['$query'], {'age': {'$gte': 18}}) + self.assertEqual(op['query']['$comment'], 'looking for an adult') + def test_map_reduce(self): """Ensure map/reduce is both mapping and reducing. """ @@ -2416,7 +2573,7 @@ class QuerySetTest(unittest.TestCase): Link.drop_collection() - now = datetime.utcnow() + now = datetime.datetime.utcnow() # Note: Test data taken from a custom Reddit homepage on # Fri, 12 Feb 2010 14:36:00 -0600. Link ordering should @@ -2425,27 +2582,27 @@ class QuerySetTest(unittest.TestCase): Link(title="Google Buzz auto-followed a woman's abusive ex ...", up_votes=1079, down_votes=553, - submitted=now - timedelta(hours=4)).save() + submitted=now - datetime.timedelta(hours=4)).save() Link(title="We did it! Barbie is a computer engineer.", up_votes=481, down_votes=124, - submitted=now - timedelta(hours=2)).save() + submitted=now - datetime.timedelta(hours=2)).save() Link(title="This Is A Mosquito Getting Killed By A Laser", up_votes=1446, down_votes=530, - submitted=now - timedelta(hours=13)).save() + submitted=now - datetime.timedelta(hours=13)).save() Link(title="Arabic flashcards land physics student in jail.", up_votes=215, down_votes=105, - submitted=now - timedelta(hours=6)).save() + submitted=now - datetime.timedelta(hours=6)).save() Link(title="The Burger Lab: Presenting, the Flood Burger", up_votes=48, down_votes=17, - submitted=now - timedelta(hours=5)).save() + submitted=now - datetime.timedelta(hours=5)).save() Link(title="How to see polarization with the naked eye", up_votes=74, down_votes=13, - submitted=now - timedelta(hours=10)).save() + submitted=now - datetime.timedelta(hours=10)).save() map_f = """ function() { @@ -2495,7 +2652,7 @@ class QuerySetTest(unittest.TestCase): # provide the reddit epoch (used for ranking) as a variable available # to all phases of the map/reduce operation: map, reduce, and finalize. - reddit_epoch = mktime(datetime(2005, 12, 8, 7, 46, 43).timetuple()) + reddit_epoch = mktime(datetime.datetime(2005, 12, 8, 7, 46, 43).timetuple()) scope = {'reddit_epoch': reddit_epoch} # run a map/reduce operation across all links. ordering is set @@ -2531,7 +2688,7 @@ class QuerySetTest(unittest.TestCase): BlogPost(hits=2, tags=['music', 'actors']).save() def test_assertions(f): - f = dict((key, int(val)) for key, val in f.items()) + f = {key: int(val) for key, val in f.items()} self.assertEqual( set(['music', 'film', 'actors', 'watch']), set(f.keys())) self.assertEqual(f['music'], 3) @@ -2546,7 +2703,7 @@ class QuerySetTest(unittest.TestCase): # Ensure query is taken into account def test_assertions(f): - f = dict((key, int(val)) for key, val in f.items()) + f = {key: int(val) for key, val in f.items()} self.assertEqual(set(['music', 'actors', 'watch']), set(f.keys())) self.assertEqual(f['music'], 2) self.assertEqual(f['actors'], 1) @@ -2610,7 +2767,7 @@ class QuerySetTest(unittest.TestCase): doc.save() def test_assertions(f): - f = dict((key, int(val)) for key, val in f.items()) + f = {key: int(val) for key, val in f.items()} self.assertEqual( set(['62-3331-1656', '62-3332-1656']), set(f.keys())) self.assertEqual(f['62-3331-1656'], 2) @@ -2624,7 +2781,7 @@ class QuerySetTest(unittest.TestCase): # Ensure query is taken into account def test_assertions(f): - f = dict((key, int(val)) for key, val in f.items()) + f = {key: int(val) for key, val in f.items()} self.assertEqual(set(['62-3331-1656']), set(f.keys())) self.assertEqual(f['62-3331-1656'], 2) @@ -2731,10 +2888,10 @@ class QuerySetTest(unittest.TestCase): Test.drop_collection() - for i in xrange(50): + for i in range(50): Test(val=1).save() - for i in xrange(20): + for i in range(20): Test(val=2).save() freqs = Test.objects.item_frequencies( @@ -2757,25 +2914,15 @@ class QuerySetTest(unittest.TestCase): avg = float(sum(ages)) / (len(ages) + 1) # take into account the 0 self.assertAlmostEqual(int(self.Person.objects.average('age')), avg) - self.assertAlmostEqual( - int(self.Person.objects.aggregate_average('age')), avg - ) self.Person(name='ageless person').save() self.assertEqual(int(self.Person.objects.average('age')), avg) - self.assertEqual( - int(self.Person.objects.aggregate_average('age')), avg - ) # dot notation self.Person( name='person meta', person_meta=self.PersonMeta(weight=0)).save() self.assertAlmostEqual( int(self.Person.objects.average('person_meta.weight')), 0) - self.assertAlmostEqual( - int(self.Person.objects.aggregate_average('person_meta.weight')), - 0 - ) for i, weight in enumerate(ages): self.Person( @@ -2784,19 +2931,11 @@ class QuerySetTest(unittest.TestCase): self.assertAlmostEqual( int(self.Person.objects.average('person_meta.weight')), avg ) - self.assertAlmostEqual( - int(self.Person.objects.aggregate_average('person_meta.weight')), - avg - ) self.Person(name='test meta none').save() self.assertEqual( int(self.Person.objects.average('person_meta.weight')), avg ) - self.assertEqual( - int(self.Person.objects.aggregate_average('person_meta.weight')), - avg - ) # test summing over a filtered queryset over_50 = [a for a in ages if a >= 50] @@ -2805,10 +2944,6 @@ class QuerySetTest(unittest.TestCase): self.Person.objects.filter(age__gte=50).average('age'), avg ) - self.assertEqual( - self.Person.objects.filter(age__gte=50).aggregate_average('age'), - avg - ) def test_sum(self): """Ensure that field can be summed over correctly. @@ -2818,15 +2953,9 @@ class QuerySetTest(unittest.TestCase): self.Person(name='test%s' % i, age=age).save() self.assertEqual(self.Person.objects.sum('age'), sum(ages)) - self.assertEqual( - self.Person.objects.aggregate_sum('age'), sum(ages) - ) self.Person(name='ageless person').save() self.assertEqual(self.Person.objects.sum('age'), sum(ages)) - self.assertEqual( - self.Person.objects.aggregate_sum('age'), sum(ages) - ) for i, age in enumerate(ages): self.Person(name='test meta%s' % @@ -2835,26 +2964,43 @@ class QuerySetTest(unittest.TestCase): self.assertEqual( self.Person.objects.sum('person_meta.weight'), sum(ages) ) - self.assertEqual( - self.Person.objects.aggregate_sum('person_meta.weight'), - sum(ages) - ) self.Person(name='weightless person').save() self.assertEqual(self.Person.objects.sum('age'), sum(ages)) - self.assertEqual( - self.Person.objects.aggregate_sum('age'), sum(ages) - ) # test summing over a filtered queryset self.assertEqual( self.Person.objects.filter(age__gte=50).sum('age'), sum([a for a in ages if a >= 50]) ) - self.assertEqual( - self.Person.objects.filter(age__gte=50).aggregate_sum('age'), - sum([a for a in ages if a >= 50]) - ) + + def test_sum_over_db_field(self): + """Ensure that a field mapped to a db field with a different name + can be summed over correctly. + """ + class UserVisit(Document): + num_visits = IntField(db_field='visits') + + UserVisit.drop_collection() + + UserVisit.objects.create(num_visits=10) + UserVisit.objects.create(num_visits=5) + + self.assertEqual(UserVisit.objects.sum('num_visits'), 15) + + def test_average_over_db_field(self): + """Ensure that a field mapped to a db field with a different name + can have its average computed correctly. + """ + class UserVisit(Document): + num_visits = IntField(db_field='visits') + + UserVisit.drop_collection() + + UserVisit.objects.create(num_visits=20) + UserVisit.objects.create(num_visits=10) + + self.assertEqual(UserVisit.objects.average('num_visits'), 15) def test_embedded_average(self): class Pay(EmbeddedDocument): @@ -2867,21 +3013,12 @@ class QuerySetTest(unittest.TestCase): Doc.drop_collection() - Doc(name=u"Wilson Junior", - pay=Pay(value=150)).save() + Doc(name='Wilson Junior', pay=Pay(value=150)).save() + Doc(name='Isabella Luanna', pay=Pay(value=530)).save() + Doc(name='Tayza mariana', pay=Pay(value=165)).save() + Doc(name='Eliana Costa', pay=Pay(value=115)).save() - Doc(name=u"Isabella Luanna", - pay=Pay(value=530)).save() - - Doc(name=u"Tayza mariana", - pay=Pay(value=165)).save() - - Doc(name=u"Eliana Costa", - pay=Pay(value=115)).save() - - self.assertEqual( - Doc.objects.average('pay.value'), - 240) + self.assertEqual(Doc.objects.average('pay.value'), 240) def test_embedded_array_average(self): class Pay(EmbeddedDocument): @@ -2889,26 +3026,16 @@ class QuerySetTest(unittest.TestCase): class Doc(Document): name = StringField() - pay = EmbeddedDocumentField( - Pay) + pay = EmbeddedDocumentField(Pay) Doc.drop_collection() - Doc(name=u"Wilson Junior", - pay=Pay(values=[150, 100])).save() + Doc(name='Wilson Junior', pay=Pay(values=[150, 100])).save() + Doc(name='Isabella Luanna', pay=Pay(values=[530, 100])).save() + Doc(name='Tayza mariana', pay=Pay(values=[165, 100])).save() + Doc(name='Eliana Costa', pay=Pay(values=[115, 100])).save() - Doc(name=u"Isabella Luanna", - pay=Pay(values=[530, 100])).save() - - Doc(name=u"Tayza mariana", - pay=Pay(values=[165, 100])).save() - - Doc(name=u"Eliana Costa", - pay=Pay(values=[115, 100])).save() - - self.assertEqual( - Doc.objects.average('pay.values'), - 170) + self.assertEqual(Doc.objects.average('pay.values'), 170) def test_array_average(self): class Doc(Document): @@ -2921,9 +3048,7 @@ class QuerySetTest(unittest.TestCase): Doc(values=[165, 100]).save() Doc(values=[115, 100]).save() - self.assertEqual( - Doc.objects.average('values'), - 170) + self.assertEqual(Doc.objects.average('values'), 170) def test_embedded_sum(self): class Pay(EmbeddedDocument): @@ -2931,26 +3056,16 @@ class QuerySetTest(unittest.TestCase): class Doc(Document): name = StringField() - pay = EmbeddedDocumentField( - Pay) + pay = EmbeddedDocumentField(Pay) Doc.drop_collection() - Doc(name=u"Wilson Junior", - pay=Pay(value=150)).save() + Doc(name='Wilson Junior', pay=Pay(value=150)).save() + Doc(name='Isabella Luanna', pay=Pay(value=530)).save() + Doc(name='Tayza mariana', pay=Pay(value=165)).save() + Doc(name='Eliana Costa', pay=Pay(value=115)).save() - Doc(name=u"Isabella Luanna", - pay=Pay(value=530)).save() - - Doc(name=u"Tayza mariana", - pay=Pay(value=165)).save() - - Doc(name=u"Eliana Costa", - pay=Pay(value=115)).save() - - self.assertEqual( - Doc.objects.sum('pay.value'), - 960) + self.assertEqual(Doc.objects.sum('pay.value'), 960) def test_embedded_array_sum(self): class Pay(EmbeddedDocument): @@ -2958,26 +3073,16 @@ class QuerySetTest(unittest.TestCase): class Doc(Document): name = StringField() - pay = EmbeddedDocumentField( - Pay) + pay = EmbeddedDocumentField(Pay) Doc.drop_collection() - Doc(name=u"Wilson Junior", - pay=Pay(values=[150, 100])).save() + Doc(name='Wilson Junior', pay=Pay(values=[150, 100])).save() + Doc(name='Isabella Luanna', pay=Pay(values=[530, 100])).save() + Doc(name='Tayza mariana', pay=Pay(values=[165, 100])).save() + Doc(name='Eliana Costa', pay=Pay(values=[115, 100])).save() - Doc(name=u"Isabella Luanna", - pay=Pay(values=[530, 100])).save() - - Doc(name=u"Tayza mariana", - pay=Pay(values=[165, 100])).save() - - Doc(name=u"Eliana Costa", - pay=Pay(values=[115, 100])).save() - - self.assertEqual( - Doc.objects.sum('pay.values'), - 1360) + self.assertEqual(Doc.objects.sum('pay.values'), 1360) def test_array_sum(self): class Doc(Document): @@ -2990,9 +3095,7 @@ class QuerySetTest(unittest.TestCase): Doc(values=[165, 100]).save() Doc(values=[115, 100]).save() - self.assertEqual( - Doc.objects.sum('values'), - 1360) + self.assertEqual(Doc.objects.sum('values'), 1360) def test_distinct(self): """Ensure that the QuerySet.distinct method works. @@ -3169,13 +3272,11 @@ class QuerySetTest(unittest.TestCase): mark_twain = Author(name="Mark Twain") john_tolkien = Author(name="John Ronald Reuel Tolkien") - book = Book(title="Tom Sawyer", authors=[mark_twain]).save() - book = Book( - title="The Lord of the Rings", authors=[john_tolkien]).save() - book = Book( - title="The Stories", authors=[mark_twain, john_tolkien]).save() - authors = Book.objects.distinct("authors") + Book.objects.create(title="Tom Sawyer", authors=[mark_twain]) + Book.objects.create(title="The Lord of the Rings", authors=[john_tolkien]) + Book.objects.create(title="The Stories", authors=[mark_twain, john_tolkien]) + authors = Book.objects.distinct("authors") self.assertEqual(authors, [mark_twain, john_tolkien]) def test_distinct_ListField_EmbeddedDocumentField_EmbeddedDocumentField(self): @@ -3205,17 +3306,14 @@ class QuerySetTest(unittest.TestCase): mark_twain = Author(name="Mark Twain", country=scotland) john_tolkien = Author(name="John Ronald Reuel Tolkien", country=tibet) - book = Book(title="Tom Sawyer", authors=[mark_twain]).save() - book = Book( - title="The Lord of the Rings", authors=[john_tolkien]).save() - book = Book( - title="The Stories", authors=[mark_twain, john_tolkien]).save() - country_list = Book.objects.distinct("authors.country") + Book.objects.create(title="Tom Sawyer", authors=[mark_twain]) + Book.objects.create(title="The Lord of the Rings", authors=[john_tolkien]) + Book.objects.create(title="The Stories", authors=[mark_twain, john_tolkien]) + country_list = Book.objects.distinct("authors.country") self.assertEqual(country_list, [scotland, tibet]) continent_list = Book.objects.distinct("authors.country.continent") - self.assertEqual(continent_list, [europe, asia]) def test_distinct_ListField_ReferenceField(self): @@ -3247,7 +3345,7 @@ class QuerySetTest(unittest.TestCase): class BlogPost(Document): tags = ListField(StringField()) deleted = BooleanField(default=False) - date = DateTimeField(default=datetime.now) + date = DateTimeField(default=datetime.datetime.now) @queryset_manager def objects(cls, qryset): @@ -3583,7 +3681,7 @@ class QuerySetTest(unittest.TestCase): Post.drop_collection() - for i in xrange(10): + for i in range(10): Post(title="Post %s" % i).save() self.assertEqual(5, Post.objects.limit(5).skip(5).count(with_limit_and_skip=True)) @@ -3598,12 +3696,21 @@ class QuerySetTest(unittest.TestCase): pass MyDoc.drop_collection() - for i in xrange(0, 10): + for i in range(0, 10): MyDoc().save() self.assertEqual(MyDoc.objects.count(), 10) self.assertEqual(MyDoc.objects.none().count(), 0) + def test_count_list_embedded(self): + class B(EmbeddedDocument): + c = StringField() + + class A(Document): + b = ListField(EmbeddedDocumentField(B)) + + self.assertEqual(A.objects(b=[{'c': 'c'}]).count(), 0) + def test_call_after_limits_set(self): """Ensure that re-filtering after slicing works """ @@ -3645,7 +3752,7 @@ class QuerySetTest(unittest.TestCase): Number.drop_collection() - for i in xrange(1, 101): + for i in range(1, 101): t = Number(n=i) t.save() @@ -3792,11 +3899,9 @@ class QuerySetTest(unittest.TestCase): self.assertTrue(a in results) self.assertTrue(c in results) - def invalid_where(): + with self.assertRaises(TypeError): list(IntPair.objects.where(fielda__gte=3)) - self.assertRaises(TypeError, invalid_where) - def test_scalar(self): class Organization(Document): @@ -4052,7 +4157,7 @@ class QuerySetTest(unittest.TestCase): # Test larger slice __repr__ self.Person.objects.delete() - for i in xrange(55): + for i in range(55): self.Person(name='A%s' % i, age=i).save() self.assertEqual(self.Person.objects.scalar('name').count(), 55) @@ -4060,15 +4165,15 @@ class QuerySetTest(unittest.TestCase): "A0", "%s" % self.Person.objects.order_by('name').scalar('name').first()) self.assertEqual( "A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0]) - if PY3: - self.assertEqual( - "['A1', 'A2']", "%s" % self.Person.objects.order_by('age').scalar('name')[1:3]) - self.assertEqual("['A51', 'A52']", "%s" % self.Person.objects.order_by( + if six.PY3: + self.assertEqual("['A1', 'A2']", "%s" % self.Person.objects.order_by( + 'age').scalar('name')[1:3]) + self.assertEqual("['A51', 'A52']", "%s" % self.Person.objects.order_by( 'age').scalar('name')[51:53]) else: - self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by( + self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by( 'age').scalar('name')[1:3]) - self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by( + self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by( 'age').scalar('name')[51:53]) # with_id and in_bulk @@ -4077,12 +4182,12 @@ class QuerySetTest(unittest.TestCase): self.Person.objects.scalar('name').with_id(person.id)) pks = self.Person.objects.order_by('age').scalar('pk')[1:3] - if PY3: - self.assertEqual("['A1', 'A2']", "%s" % sorted( - self.Person.objects.scalar('name').in_bulk(list(pks)).values())) + names = self.Person.objects.scalar('name').in_bulk(list(pks)).values() + if six.PY3: + expected = "['A1', 'A2']" else: - self.assertEqual("[u'A1', u'A2']", "%s" % sorted( - self.Person.objects.scalar('name').in_bulk(list(pks)).values())) + expected = "[u'A1', u'A2']" + self.assertEqual(expected, "%s" % sorted(names)) def test_elem_match(self): class Foo(EmbeddedDocument): @@ -4105,6 +4210,10 @@ class QuerySetTest(unittest.TestCase): Foo(shape="circle", color="purple", thick=False)]) b2.save() + b3 = Bar(foo=[Foo(shape="square", thick=True), + Foo(shape="circle", color="purple", thick=False)]) + b3.save() + ak = list( Bar.objects(foo__match={'shape': "square", "color": "purple"})) self.assertEqual([b1], ak) @@ -4116,6 +4225,22 @@ class QuerySetTest(unittest.TestCase): ak = list(Bar.objects(foo__match=Foo(shape="square", color="purple"))) self.assertEqual([b1], ak) + ak = list( + Bar.objects(foo__elemMatch={'shape': "square", "color__exists": True})) + self.assertEqual([b1, b2], ak) + + ak = list( + Bar.objects(foo__match={'shape': "square", "color__exists": True})) + self.assertEqual([b1, b2], ak) + + ak = list( + Bar.objects(foo__elemMatch={'shape': "square", "color__exists": False})) + self.assertEqual([b3], ak) + + ak = list( + Bar.objects(foo__match={'shape': "square", "color__exists": False})) + self.assertEqual([b3], ak) + def test_upsert_includes_cls(self): """Upserts should include _cls information for inheritable classes """ @@ -4156,7 +4281,11 @@ class QuerySetTest(unittest.TestCase): def test_read_preference(self): class Bar(Document): - pass + txt = StringField() + + meta = { + 'indexes': ['txt'] + } Bar.drop_collection() bars = list(Bar.objects(read_preference=ReadPreference.PRIMARY)) @@ -4168,9 +4297,51 @@ class QuerySetTest(unittest.TestCase): error_class = TypeError self.assertRaises(error_class, Bar.objects, read_preference='Primary') + # read_preference as a kwarg bars = Bar.objects(read_preference=ReadPreference.SECONDARY_PREFERRED) - self.assertEqual( - bars._read_preference, ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._read_preference, + ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._cursor._Cursor__read_preference, + ReadPreference.SECONDARY_PREFERRED) + + # read_preference as a query set method + bars = Bar.objects.read_preference(ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._read_preference, + ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._cursor._Cursor__read_preference, + ReadPreference.SECONDARY_PREFERRED) + + # read_preference after skip + bars = Bar.objects.skip(1) \ + .read_preference(ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._read_preference, + ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._cursor._Cursor__read_preference, + ReadPreference.SECONDARY_PREFERRED) + + # read_preference after limit + bars = Bar.objects.limit(1) \ + .read_preference(ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._read_preference, + ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._cursor._Cursor__read_preference, + ReadPreference.SECONDARY_PREFERRED) + + # read_preference after order_by + bars = Bar.objects.order_by('txt') \ + .read_preference(ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._read_preference, + ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._cursor._Cursor__read_preference, + ReadPreference.SECONDARY_PREFERRED) + + # read_preference after hint + bars = Bar.objects.hint([('txt', 1)]) \ + .read_preference(ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._read_preference, + ReadPreference.SECONDARY_PREFERRED) + self.assertEqual(bars._cursor._Cursor__read_preference, + ReadPreference.SECONDARY_PREFERRED) def test_json_simple(self): @@ -4206,7 +4377,7 @@ class QuerySetTest(unittest.TestCase): int_field = IntField(default=1) float_field = FloatField(default=1.1) boolean_field = BooleanField(default=True) - datetime_field = DateTimeField(default=datetime.now) + datetime_field = DateTimeField(default=datetime.datetime.now) embedded_document_field = EmbeddedDocumentField( EmbeddedDoc, default=lambda: EmbeddedDoc()) list_field = ListField(default=lambda: [1, 2, 3]) @@ -4216,7 +4387,7 @@ class QuerySetTest(unittest.TestCase): Simple, default=lambda: Simple().save()) map_field = MapField(IntField(), default=lambda: {"simple": 1}) decimal_field = DecimalField(default=1.0) - complex_datetime_field = ComplexDateTimeField(default=datetime.now) + complex_datetime_field = ComplexDateTimeField(default=datetime.datetime.now) url_field = URLField(default="http://mongoengine.org") dynamic_field = DynamicField(default=1) generic_reference_field = GenericReferenceField( @@ -4368,7 +4539,7 @@ class QuerySetTest(unittest.TestCase): name = StringField() Person.drop_collection() - for i in xrange(100): + for i in range(100): Person(name="No: %s" % i).save() with query_counter() as q: @@ -4399,7 +4570,7 @@ class QuerySetTest(unittest.TestCase): name = StringField() Person.drop_collection() - for i in xrange(100): + for i in range(100): Person(name="No: %s" % i).save() with query_counter() as q: @@ -4443,7 +4614,7 @@ class QuerySetTest(unittest.TestCase): fields = DictField() Noddy.drop_collection() - for i in xrange(100): + for i in range(100): noddy = Noddy() for j in range(20): noddy.fields["key" + str(j)] = "value " + str(j) @@ -4455,7 +4626,9 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(counter, 100) self.assertEqual(len(list(docs)), 100) - self.assertRaises(TypeError, lambda: len(docs)) + + with self.assertRaises(TypeError): + len(docs) with query_counter() as q: self.assertEqual(q, 0) @@ -4563,8 +4736,7 @@ class QuerySetTest(unittest.TestCase): B.drop_collection() a = A.objects.create(id='custom_id') - - b = B.objects.create(a=a) + B.objects.create(a=a) self.assertEqual(B.objects.count(), 1) self.assertEqual(B.objects.get(a=a).a, a) @@ -4645,7 +4817,7 @@ class QuerySetTest(unittest.TestCase): name = StringField() Person.drop_collection() - for i in xrange(100): + for i in range(100): Person(name="No: %s" % i).save() with query_counter() as q: @@ -4769,10 +4941,10 @@ class QuerySetTest(unittest.TestCase): ]) def test_delete_count(self): - [self.Person(name="User {0}".format(i), age=i * 10).save() for i in xrange(1, 4)] + [self.Person(name="User {0}".format(i), age=i * 10).save() for i in range(1, 4)] self.assertEqual(self.Person.objects().delete(), 3) # test ordinary QuerySey delete count - [self.Person(name="User {0}".format(i), age=i * 10).save() for i in xrange(1, 4)] + [self.Person(name="User {0}".format(i), age=i * 10).save() for i in range(1, 4)] self.assertEqual(self.Person.objects().skip(1).delete(), 2) # test Document delete with existing documents @@ -4781,12 +4953,14 @@ class QuerySetTest(unittest.TestCase): def test_max_time_ms(self): # 778: max_time_ms can get only int or None as input - self.assertRaises(TypeError, self.Person.objects(name="name").max_time_ms, "not a number") + self.assertRaises(TypeError, + self.Person.objects(name="name").max_time_ms, + 'not a number') def test_subclass_field_query(self): class Animal(Document): is_mamal = BooleanField() - meta = dict(allow_inheritance=True) + meta = {'allow_inheritance': True} class Cat(Animal): whiskers_length = FloatField() @@ -4794,6 +4968,8 @@ class QuerySetTest(unittest.TestCase): class ScottishCat(Cat): folded_ears = BooleanField() + Animal.drop_collection() + Animal(is_mamal=False).save() Cat(is_mamal=True, whiskers_length=5.1).save() ScottishCat(is_mamal=True, folded_ears=True).save() @@ -4822,5 +4998,85 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(1, Doc.objects(item__type__="axe").count()) + def test_len_during_iteration(self): + """Tests that calling len on a queyset during iteration doesn't + stop paging. + """ + class Data(Document): + pass + + for i in range(300): + Data().save() + + records = Data.objects.limit(250) + + # This should pull all 250 docs from mongo and populate the result + # cache + len(records) + + # Assert that iterating over documents in the qs touches every + # document even if we call len(qs) midway through the iteration. + for i, r in enumerate(records): + if i == 58: + len(records) + self.assertEqual(i, 249) + + # Assert the same behavior is true even if we didn't pre-populate the + # result cache. + records = Data.objects.limit(250) + for i, r in enumerate(records): + if i == 58: + len(records) + self.assertEqual(i, 249) + + def test_iteration_within_iteration(self): + """You should be able to reliably iterate over all the documents + in a given queryset even if there are multiple iterations of it + happening at the same time. + """ + class Data(Document): + pass + + for i in range(300): + Data().save() + + qs = Data.objects.limit(250) + for i, doc in enumerate(qs): + for j, doc2 in enumerate(qs): + pass + + self.assertEqual(i, 249) + self.assertEqual(j, 249) + + def test_in_operator_on_non_iterable(self): + """Ensure that using the `__in` operator on a non-iterable raises an + error. + """ + class User(Document): + name = StringField() + + class BlogPost(Document): + content = StringField() + authors = ListField(ReferenceField(User)) + + User.drop_collection() + BlogPost.drop_collection() + + author = User.objects.create(name='Test User') + post = BlogPost.objects.create(content='Had a good coffee today...', + authors=[author]) + + # Make sure using `__in` with a list works + blog_posts = BlogPost.objects(authors__in=[author]) + self.assertEqual(list(blog_posts), [post]) + + # Using `__in` with a non-iterable should raise a TypeError + self.assertRaises(TypeError, BlogPost.objects(authors__in=author.pk).count) + + # Using `__in` with a `Document` (which is seemingly iterable but not + # in a way we'd expect) should raise a TypeError, too + self.assertRaises(TypeError, BlogPost.objects(authors__in=author).count) + + if __name__ == '__main__': unittest.main() diff --git a/tests/queryset/transform.py b/tests/queryset/transform.py index a543317a..20ab0b3f 100644 --- a/tests/queryset/transform.py +++ b/tests/queryset/transform.py @@ -1,11 +1,7 @@ -import sys -sys.path[0:0] = [""] - import unittest from mongoengine import * -from mongoengine.queryset import Q -from mongoengine.queryset import transform +from mongoengine.queryset import Q, transform __all__ = ("TransformTest",) @@ -41,8 +37,8 @@ class TransformTest(unittest.TestCase): DicDoc.drop_collection() Doc.drop_collection() + DicDoc().save() doc = Doc().save() - dic_doc = DicDoc().save() for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")): update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc}) @@ -55,7 +51,6 @@ class TransformTest(unittest.TestCase): update = transform.update(DicDoc, pull__dictField__test=doc) self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict)) - def test_query_field_name(self): """Ensure that the correct field name is used when querying. """ @@ -156,26 +151,33 @@ class TransformTest(unittest.TestCase): class Doc(Document): meta = {'allow_inheritance': False} - raw_query = Doc.objects(__raw__={'deleted': False, - 'scraped': 'yes', - '$nor': [{'views.extracted': 'no'}, - {'attachments.views.extracted':'no'}] - })._query + raw_query = Doc.objects(__raw__={ + 'deleted': False, + 'scraped': 'yes', + '$nor': [ + {'views.extracted': 'no'}, + {'attachments.views.extracted': 'no'} + ] + })._query - expected = {'deleted': False, 'scraped': 'yes', - '$nor': [{'views.extracted': 'no'}, - {'attachments.views.extracted': 'no'}]} - self.assertEqual(expected, raw_query) + self.assertEqual(raw_query, { + 'deleted': False, + 'scraped': 'yes', + '$nor': [ + {'views.extracted': 'no'}, + {'attachments.views.extracted': 'no'} + ] + }) def test_geojson_PointField(self): class Location(Document): loc = PointField() update = transform.update(Location, set__loc=[1, 2]) - self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1,2]}}}) + self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}}) - update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1,2]}) - self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1,2]}}}) + update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1, 2]}) + self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}}) def test_geojson_LineStringField(self): class Location(Document): @@ -224,6 +226,10 @@ class TransformTest(unittest.TestCase): self.assertEqual(1, Doc.objects(item__type__="axe").count()) self.assertEqual(1, Doc.objects(item__name__="Heroic axe").count()) + Doc.objects(id=doc.id).update(set__item__type__='sword') + self.assertEqual(1, Doc.objects(item__type__="sword").count()) + self.assertEqual(0, Doc.objects(item__type__="axe").count()) + def test_understandable_error_raised(self): class Event(Document): title = StringField() @@ -232,7 +238,9 @@ class TransformTest(unittest.TestCase): box = [(35.0, -125.0), (40.0, -100.0)] # I *meant* to execute location__within_box=box events = Event.objects(location__within=box) - self.assertRaises(InvalidQueryError, lambda: events.count()) + with self.assertRaises(InvalidQueryError): + events.count() + if __name__ == '__main__': unittest.main() diff --git a/tests/queryset/visitor.py b/tests/queryset/visitor.py index 0bb6f69d..6f020e88 100644 --- a/tests/queryset/visitor.py +++ b/tests/queryset/visitor.py @@ -1,14 +1,12 @@ -import sys -sys.path[0:0] = [""] - +import datetime +import re import unittest from bson import ObjectId -from datetime import datetime from mongoengine import * -from mongoengine.queryset import Q from mongoengine.errors import InvalidQueryError +from mongoengine.queryset import Q __all__ = ("QTest",) @@ -132,12 +130,12 @@ class QTest(unittest.TestCase): TestDoc(x=10).save() TestDoc(y=True).save() - self.assertEqual(query, - {'$and': [ - {'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]}, - {'$or': [{'x': {'$lt': 100}}, {'y': True}]} - ]}) - + self.assertEqual(query, { + '$and': [ + {'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]}, + {'$or': [{'x': {'$lt': 100}}, {'y': True}]} + ] + }) self.assertEqual(2, TestDoc.objects(q1 & q2).count()) def test_or_and_or_combination(self): @@ -157,15 +155,14 @@ class QTest(unittest.TestCase): q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False))) query = (q1 | q2).to_query(TestDoc) - self.assertEqual(query, - {'$or': [ + self.assertEqual(query, { + '$or': [ {'$and': [{'x': {'$gt': 0}}, {'$or': [{'y': True}, {'y': {'$exists': False}}]}]}, {'$and': [{'x': {'$lt': 100}}, {'$or': [{'y': False}, {'y': {'$exists': False}}]}]} - ]} - ) - + ] + }) self.assertEqual(2, TestDoc.objects(q1 | q2).count()) def test_multiple_occurence_in_field(self): @@ -188,7 +185,7 @@ class QTest(unittest.TestCase): x = IntField() TestDoc.drop_collection() - for i in xrange(1, 101): + for i in range(1, 101): t = TestDoc(x=i) t.save() @@ -215,19 +212,19 @@ class QTest(unittest.TestCase): BlogPost.drop_collection() - post1 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 8), published=False) + post1 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 8), published=False) post1.save() - post2 = BlogPost(title='Test 2', publish_date=datetime(2010, 1, 15), published=True) + post2 = BlogPost(title='Test 2', publish_date=datetime.datetime(2010, 1, 15), published=True) post2.save() post3 = BlogPost(title='Test 3', published=True) post3.save() - post4 = BlogPost(title='Test 4', publish_date=datetime(2010, 1, 8)) + post4 = BlogPost(title='Test 4', publish_date=datetime.datetime(2010, 1, 8)) post4.save() - post5 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 15)) + post5 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 15)) post5.save() post6 = BlogPost(title='Test 1', published=False) @@ -250,7 +247,7 @@ class QTest(unittest.TestCase): self.assertTrue(all(obj.id in posts for obj in published_posts)) # Check Q object combination - date = datetime(2010, 1, 10) + date = datetime.datetime(2010, 1, 10) q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True)) posts = [post.id for post in q] @@ -271,12 +268,13 @@ class QTest(unittest.TestCase): self.assertEqual(self.Person.objects(Q(age__in=[20, 30])).count(), 3) # Test invalid query objs - def wrong_query_objs(): + with self.assertRaises(InvalidQueryError): self.Person.objects('user1') - def wrong_query_objs_filter(): - self.Person.objects('user1') - self.assertRaises(InvalidQueryError, wrong_query_objs) - self.assertRaises(InvalidQueryError, wrong_query_objs_filter) + + # filter should fail, too + with self.assertRaises(InvalidQueryError): + self.Person.objects.filter('user1') + def test_q_regex(self): """Ensure that Q objects can be queried using regexes. @@ -284,7 +282,6 @@ class QTest(unittest.TestCase): person = self.Person(name='Guido van Rossum') person.save() - import re obj = self.Person.objects(Q(name=re.compile('^Gui'))).first() self.assertEqual(obj, person) obj = self.Person.objects(Q(name=re.compile('^gui'))).first() diff --git a/tests/test_connection.py b/tests/test_connection.py index e9477b79..a1d3bfb6 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1,13 +1,11 @@ -import sys import datetime from pymongo.errors import OperationFailure -sys.path[0:0] = [""] - try: import unittest2 as unittest except ImportError: import unittest +from nose.plugins.skip import SkipTest import pymongo from bson.tz_util import utc @@ -18,7 +16,8 @@ from mongoengine import ( ) from mongoengine.python_support import IS_PYMONGO_3 import mongoengine.connection -from mongoengine.connection import get_db, get_connection, ConnectionError +from mongoengine.connection import (MongoEngineConnectionError, get_db, + get_connection) def get_tz_awareness(connection): @@ -51,6 +50,84 @@ class ConnectionTest(unittest.TestCase): conn = get_connection('testdb') self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) + def test_connect_in_mocking(self): + """Ensure that the connect() method works properly in mocking. + """ + try: + import mongomock + except ImportError: + raise SkipTest('you need mongomock installed to run this testcase') + + connect('mongoenginetest', host='mongomock://localhost') + conn = get_connection() + self.assertTrue(isinstance(conn, mongomock.MongoClient)) + + connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2') + conn = get_connection('testdb2') + self.assertTrue(isinstance(conn, mongomock.MongoClient)) + + connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3') + conn = get_connection('testdb3') + self.assertTrue(isinstance(conn, mongomock.MongoClient)) + + connect('mongoenginetest4', is_mock=True, alias='testdb4') + conn = get_connection('testdb4') + self.assertTrue(isinstance(conn, mongomock.MongoClient)) + + connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5') + conn = get_connection('testdb5') + self.assertTrue(isinstance(conn, mongomock.MongoClient)) + + connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6') + conn = get_connection('testdb6') + self.assertTrue(isinstance(conn, mongomock.MongoClient)) + + connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7') + conn = get_connection('testdb7') + self.assertTrue(isinstance(conn, mongomock.MongoClient)) + + def test_connect_with_host_list(self): + """Ensure that the connect() method works when host is a list + + Uses mongomock to test w/o needing multiple mongod/mongos processes + """ + try: + import mongomock + except ImportError: + raise SkipTest('you need mongomock installed to run this testcase') + + connect(host=['mongomock://localhost']) + conn = get_connection() + self.assertTrue(isinstance(conn, mongomock.MongoClient)) + + connect(host=['mongodb://localhost'], is_mock=True, alias='testdb2') + conn = get_connection('testdb2') + self.assertTrue(isinstance(conn, mongomock.MongoClient)) + + connect(host=['localhost'], is_mock=True, alias='testdb3') + conn = get_connection('testdb3') + self.assertTrue(isinstance(conn, mongomock.MongoClient)) + + connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4') + conn = get_connection('testdb4') + self.assertTrue(isinstance(conn, mongomock.MongoClient)) + + connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True, alias='testdb5') + conn = get_connection('testdb5') + self.assertTrue(isinstance(conn, mongomock.MongoClient)) + + connect(host=['localhost:27017', 'localhost:27018'], is_mock=True, alias='testdb6') + conn = get_connection('testdb6') + self.assertTrue(isinstance(conn, mongomock.MongoClient)) + + def test_disconnect(self): + """Ensure that the disconnect() method works properly + """ + conn1 = connect('mongoenginetest') + mongoengine.connection.disconnect() + conn2 = connect('mongoenginetest') + self.assertTrue(conn1 is not conn2) + def test_sharing_connections(self): """Ensure that connections are shared when the connection settings are exactly the same """ @@ -80,7 +157,10 @@ class ConnectionTest(unittest.TestCase): c.mongoenginetest.add_user("username", "password") if not IS_PYMONGO_3: - self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost') + self.assertRaises( + MongoEngineConnectionError, connect, 'testdb_uri_bad', + host='mongodb://test:password@localhost' + ) connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') @@ -95,19 +175,9 @@ class ConnectionTest(unittest.TestCase): c.mongoenginetest.system.users.remove({}) def test_connect_uri_without_db(self): - """Ensure connect() method works properly with uri's without database_name + """Ensure connect() method works properly if the URI doesn't + include a database name. """ - c = connect(db='mongoenginetest', alias='admin') - c.admin.system.users.remove({}) - c.mongoenginetest.system.users.remove({}) - - c.admin.add_user("admin", "password") - c.admin.authenticate("admin", "password") - c.mongoenginetest.add_user("username", "password") - - if not IS_PYMONGO_3: - self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost') - connect("mongoenginetest", host='mongodb://localhost/') conn = get_connection() @@ -117,8 +187,44 @@ class ConnectionTest(unittest.TestCase): self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest') - c.admin.system.users.remove({}) - c.mongoenginetest.system.users.remove({}) + def test_connect_uri_default_db(self): + """Ensure connect() defaults to the right database name if + the URI and the database_name don't explicitly specify it. + """ + connect(host='mongodb://localhost/') + + conn = get_connection() + self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) + + db = get_db() + self.assertTrue(isinstance(db, pymongo.database.Database)) + self.assertEqual(db.name, 'test') + + def test_connect_uri_with_replicaset(self): + """Ensure connect() works when specifying a replicaSet.""" + if IS_PYMONGO_3: + c = connect(host='mongodb://localhost/test?replicaSet=local-rs') + db = get_db() + self.assertTrue(isinstance(db, pymongo.database.Database)) + self.assertEqual(db.name, 'test') + else: + # PyMongo < v3.x raises an exception: + # "localhost:27017 is not a member of replica set local-rs" + with self.assertRaises(MongoEngineConnectionError): + c = connect(host='mongodb://localhost/test?replicaSet=local-rs') + + def test_uri_without_credentials_doesnt_override_conn_settings(self): + """Ensure connect() uses the username & password params if the URI + doesn't explicitly specify them. + """ + c = connect(host='mongodb://localhost/mongoenginetest', + username='user', + password='pass') + + # OperationFailure means that mongoengine attempted authentication + # w/ the provided username/password and failed - that's the desired + # behavior. If the MongoDB URI would override the credentials + self.assertRaises(OperationFailure, get_db) def test_connect_uri_with_authsource(self): """Ensure that the connect() method works well with @@ -137,13 +243,14 @@ class ConnectionTest(unittest.TestCase): self.assertRaises(OperationFailure, test_conn.server_info) else: self.assertRaises( - ConnectionError, connect, 'mongoenginetest', alias='test1', + MongoEngineConnectionError, connect, 'mongoenginetest', + alias='test1', host='mongodb://username2:password@localhost/mongoenginetest' ) - self.assertRaises(ConnectionError, get_db, 'test1') + self.assertRaises(MongoEngineConnectionError, get_db, 'test1') # Authentication succeeds with "authSource" - test_conn2 = connect( + connect( 'mongoenginetest', alias='test2', host=('mongodb://username2:password@localhost/' 'mongoenginetest?authSource=admin') @@ -161,7 +268,7 @@ class ConnectionTest(unittest.TestCase): """ register_connection('testdb', 'mongoenginetest2') - self.assertRaises(ConnectionError, get_connection) + self.assertRaises(MongoEngineConnectionError, get_connection) conn = get_connection('testdb') self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) @@ -178,8 +285,7 @@ class ConnectionTest(unittest.TestCase): self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) def test_connection_kwargs(self): - """Ensure that connection kwargs get passed to pymongo. - """ + """Ensure that connection kwargs get passed to pymongo.""" connect('mongoenginetest', alias='t1', tz_aware=True) conn = get_connection('t1') @@ -189,6 +295,45 @@ class ConnectionTest(unittest.TestCase): conn = get_connection('t2') self.assertFalse(get_tz_awareness(conn)) + def test_connection_pool_via_kwarg(self): + """Ensure we can specify a max connection pool size using + a connection kwarg. + """ + # Use "max_pool_size" or "maxpoolsize" depending on PyMongo version + # (former was changed to the latter as described in + # https://jira.mongodb.org/browse/PYTHON-854). + # TODO remove once PyMongo < 3.0 support is dropped + if pymongo.version_tuple[0] >= 3: + pool_size_kwargs = {'maxpoolsize': 100} + else: + pool_size_kwargs = {'max_pool_size': 100} + + conn = connect('mongoenginetest', alias='max_pool_size_via_kwarg', **pool_size_kwargs) + self.assertEqual(conn.max_pool_size, 100) + + def test_connection_pool_via_uri(self): + """Ensure we can specify a max connection pool size using + an option in a connection URI. + """ + if pymongo.version_tuple[0] == 2 and pymongo.version_tuple[1] < 9: + raise SkipTest('maxpoolsize as a URI option is only supported in PyMongo v2.9+') + + conn = connect(host='mongodb://localhost/test?maxpoolsize=100', alias='max_pool_size_via_uri') + self.assertEqual(conn.max_pool_size, 100) + + def test_write_concern(self): + """Ensure write concern can be specified in connect() via + a kwarg or as part of the connection URI. + """ + conn1 = connect(alias='conn1', host='mongodb://localhost/testing?w=1&j=true') + conn2 = connect('testing', alias='conn2', w=1, j=True) + if IS_PYMONGO_3: + self.assertEqual(conn1.write_concern.document, {'w': 1, 'j': True}) + self.assertEqual(conn2.write_concern.document, {'w': 1, 'j': True}) + else: + self.assertEqual(dict(conn1.write_concern), {'w': 1, 'j': True}) + self.assertEqual(dict(conn2.write_concern), {'w': 1, 'j': True}) + def test_datetime(self): connect('mongoenginetest', tz_aware=True) d = datetime.datetime(2010, 5, 5, tzinfo=utc) diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index c201a5fc..0f6bf815 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -1,5 +1,3 @@ -import sys -sys.path[0:0] = [""] import unittest from mongoengine import * @@ -79,7 +77,7 @@ class ContextManagersTest(unittest.TestCase): User.drop_collection() Group.drop_collection() - for i in xrange(1, 51): + for i in range(1, 51): User(name='user %s' % i).save() user = User.objects.first() @@ -117,7 +115,7 @@ class ContextManagersTest(unittest.TestCase): User.drop_collection() Group.drop_collection() - for i in xrange(1, 51): + for i in range(1, 51): User(name='user %s' % i).save() user = User.objects.first() @@ -195,7 +193,7 @@ class ContextManagersTest(unittest.TestCase): with query_counter() as q: self.assertEqual(0, q) - for i in xrange(1, 51): + for i in range(1, 51): db.test.find({}).count() self.assertEqual(50, q) diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 716df651..6830a188 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -1,5 +1,6 @@ import unittest -from mongoengine.base.datastructures import StrictDict, SemiStrictDict + +from mongoengine.base.datastructures import StrictDict, SemiStrictDict class TestStrictDict(unittest.TestCase): @@ -13,9 +14,18 @@ class TestStrictDict(unittest.TestCase): d = self.dtype(a=1, b=1, c=1) self.assertEqual((d.a, d.b, d.c), (1, 1, 1)) + def test_repr(self): + d = self.dtype(a=1, b=2, c=3) + self.assertEqual(repr(d), '{"a": 1, "b": 2, "c": 3}') + + # make sure quotes are escaped properly + d = self.dtype(a='"', b="'", c="") + self.assertEqual(repr(d), '{"a": \'"\', "b": "\'", "c": \'\'}') + def test_init_fails_on_nonexisting_attrs(self): - self.assertRaises(AttributeError, lambda: self.dtype(a=1, b=2, d=3)) - + with self.assertRaises(AttributeError): + self.dtype(a=1, b=2, d=3) + def test_eq(self): d = self.dtype(a=1, b=1, c=1) dd = self.dtype(a=1, b=1, c=1) @@ -24,7 +34,7 @@ class TestStrictDict(unittest.TestCase): g = self.strict_dict_class(("a", "b", "c", "d"))(a=1, b=1, c=1, d=1) h = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=1) i = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=2) - + self.assertEqual(d, dd) self.assertNotEqual(d, e) self.assertNotEqual(d, f) @@ -37,20 +47,18 @@ class TestStrictDict(unittest.TestCase): d = self.dtype() d.a = 1 self.assertEqual(d.a, 1) - self.assertRaises(AttributeError, lambda: d.b) - + self.assertRaises(AttributeError, getattr, d, 'b') + def test_setattr_raises_on_nonexisting_attr(self): d = self.dtype() - - def _f(): + with self.assertRaises(AttributeError): d.x = 1 - self.assertRaises(AttributeError, _f) - + def test_setattr_getattr_special(self): d = self.strict_dict_class(["items"]) d.items = 1 self.assertEqual(d.items, 1) - + def test_get(self): d = self.dtype(a=1) self.assertEqual(d.get('a'), 1) @@ -88,7 +96,7 @@ class TestSemiSrictDict(TestStrictDict): def test_init_succeeds_with_nonexisting_attrs(self): d = self.dtype(a=1, b=1, c=1, x=2) self.assertEqual((d.a, d.b, d.c, d.x), (1, 1, 1, 2)) - + def test_iter_with_nonexisting_attrs(self): d = self.dtype(a=1, b=1, c=1, x=2) self.assertEqual(list(d), ['a', 'b', 'c', 'x']) diff --git a/tests/test_dereference.py b/tests/test_dereference.py index e1ae3740..7f58a85b 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -import sys -sys.path[0:0] = [""] import unittest from bson import DBRef, ObjectId @@ -12,9 +10,13 @@ from mongoengine.context_managers import query_counter class FieldTest(unittest.TestCase): - def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() + @classmethod + def setUpClass(cls): + cls.db = connect(db='mongoenginetest') + + @classmethod + def tearDownClass(cls): + cls.db.drop_database('mongoenginetest') def test_list_item_dereference(self): """Ensure that DBRef items in ListFields are dereferenced. @@ -28,7 +30,7 @@ class FieldTest(unittest.TestCase): User.drop_collection() Group.drop_collection() - for i in xrange(1, 51): + for i in range(1, 51): user = User(name='user %s' % i) user.save() @@ -86,7 +88,7 @@ class FieldTest(unittest.TestCase): User.drop_collection() Group.drop_collection() - for i in xrange(1, 51): + for i in range(1, 51): user = User(name='user %s' % i) user.save() @@ -158,7 +160,7 @@ class FieldTest(unittest.TestCase): User.drop_collection() Group.drop_collection() - for i in xrange(1, 26): + for i in range(1, 26): user = User(name='user %s' % i) user.save() @@ -304,6 +306,7 @@ class FieldTest(unittest.TestCase): User.drop_collection() Post.drop_collection() + SimpleList.drop_collection() u1 = User.objects.create(name='u1') u2 = User.objects.create(name='u2') @@ -435,7 +438,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() members = [] - for i in xrange(1, 51): + for i in range(1, 51): a = UserA(name='User A %s' % i) a.save() @@ -526,7 +529,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() members = [] - for i in xrange(1, 51): + for i in range(1, 51): a = UserA(name='User A %s' % i) a.save() @@ -609,15 +612,15 @@ class FieldTest(unittest.TestCase): Group.drop_collection() members = [] - for i in xrange(1, 51): + for i in range(1, 51): user = User(name='user %s' % i) user.save() members.append(user) - group = Group(members=dict([(str(u.id), u) for u in members])) + group = Group(members={str(u.id): u for u in members}) group.save() - group = Group(members=dict([(str(u.id), u) for u in members])) + group = Group(members={str(u.id): u for u in members}) group.save() with query_counter() as q: @@ -682,7 +685,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() members = [] - for i in xrange(1, 51): + for i in range(1, 51): a = UserA(name='User A %s' % i) a.save() @@ -694,9 +697,9 @@ class FieldTest(unittest.TestCase): members += [a, b, c] - group = Group(members=dict([(str(u.id), u) for u in members])) + group = Group(members={str(u.id): u for u in members}) group.save() - group = Group(members=dict([(str(u.id), u) for u in members])) + group = Group(members={str(u.id): u for u in members}) group.save() with query_counter() as q: @@ -778,16 +781,16 @@ class FieldTest(unittest.TestCase): Group.drop_collection() members = [] - for i in xrange(1, 51): + for i in range(1, 51): a = UserA(name='User A %s' % i) a.save() members += [a] - group = Group(members=dict([(str(u.id), u) for u in members])) + group = Group(members={str(u.id): u for u in members}) group.save() - group = Group(members=dict([(str(u.id), u) for u in members])) + group = Group(members={str(u.id): u for u in members}) group.save() with query_counter() as q: @@ -861,7 +864,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() members = [] - for i in xrange(1, 51): + for i in range(1, 51): a = UserA(name='User A %s' % i) a.save() @@ -873,9 +876,9 @@ class FieldTest(unittest.TestCase): members += [a, b, c] - group = Group(members=dict([(str(u.id), u) for u in members])) + group = Group(members={str(u.id): u for u in members}) group.save() - group = Group(members=dict([(str(u.id), u) for u in members])) + group = Group(members={str(u.id): u for u in members}) group.save() with query_counter() as q: @@ -1098,7 +1101,7 @@ class FieldTest(unittest.TestCase): User.drop_collection() Group.drop_collection() - for i in xrange(1, 51): + for i in range(1, 51): User(name='user %s' % i).save() Group(name="Test", members=User.objects).save() @@ -1127,7 +1130,7 @@ class FieldTest(unittest.TestCase): User.drop_collection() Group.drop_collection() - for i in xrange(1, 51): + for i in range(1, 51): User(name='user %s' % i).save() Group(name="Test", members=User.objects).save() @@ -1164,7 +1167,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() members = [] - for i in xrange(1, 51): + for i in range(1, 51): a = UserA(name='User A %s' % i).save() b = UserB(name='User B %s' % i).save() c = UserC(name='User C %s' % i).save() diff --git a/tests/test_replicaset_connection.py b/tests/test_replicaset_connection.py index 361cff41..a53f5903 100644 --- a/tests/test_replicaset_connection.py +++ b/tests/test_replicaset_connection.py @@ -1,6 +1,3 @@ -import sys - -sys.path[0:0] = [""] import unittest from pymongo import ReadPreference @@ -18,7 +15,7 @@ else: import mongoengine from mongoengine import * -from mongoengine.connection import ConnectionError +from mongoengine.connection import MongoEngineConnectionError class ConnectionTest(unittest.TestCase): @@ -41,7 +38,7 @@ class ConnectionTest(unittest.TestCase): conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=READ_PREF) - except ConnectionError, e: + except MongoEngineConnectionError as e: return if not isinstance(conn, CONN_CLASS): diff --git a/tests/test_signals.py b/tests/test_signals.py index 8672925c..df687d0e 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -import sys -sys.path[0:0] = [""] import unittest from mongoengine import * @@ -25,6 +23,8 @@ class SignalTests(unittest.TestCase): connect(db='mongoenginetest') class Author(Document): + # Make the id deterministic for easier testing + id = SequenceField(primary_key=True) name = StringField() def __unicode__(self): @@ -33,7 +33,7 @@ class SignalTests(unittest.TestCase): @classmethod def pre_init(cls, sender, document, *args, **kwargs): signal_output.append('pre_init signal, %s' % cls.__name__) - signal_output.append(str(kwargs['values'])) + signal_output.append(kwargs['values']) @classmethod def post_init(cls, sender, document, **kwargs): @@ -43,48 +43,55 @@ class SignalTests(unittest.TestCase): @classmethod def pre_save(cls, sender, document, **kwargs): signal_output.append('pre_save signal, %s' % document) + signal_output.append(kwargs) @classmethod def pre_save_post_validation(cls, sender, document, **kwargs): signal_output.append('pre_save_post_validation signal, %s' % document) - if 'created' in kwargs: - if kwargs['created']: - signal_output.append('Is created') - else: - signal_output.append('Is updated') + if kwargs.pop('created', False): + signal_output.append('Is created') + else: + signal_output.append('Is updated') + signal_output.append(kwargs) @classmethod def post_save(cls, sender, document, **kwargs): dirty_keys = document._delta()[0].keys() + document._delta()[1].keys() signal_output.append('post_save signal, %s' % document) signal_output.append('post_save dirty keys, %s' % dirty_keys) - if 'created' in kwargs: - if kwargs['created']: - signal_output.append('Is created') - else: - signal_output.append('Is updated') + if kwargs.pop('created', False): + signal_output.append('Is created') + else: + signal_output.append('Is updated') + signal_output.append(kwargs) @classmethod def pre_delete(cls, sender, document, **kwargs): signal_output.append('pre_delete signal, %s' % document) + signal_output.append(kwargs) @classmethod def post_delete(cls, sender, document, **kwargs): signal_output.append('post_delete signal, %s' % document) + signal_output.append(kwargs) @classmethod def pre_bulk_insert(cls, sender, documents, **kwargs): signal_output.append('pre_bulk_insert signal, %s' % documents) + signal_output.append(kwargs) @classmethod def post_bulk_insert(cls, sender, documents, **kwargs): signal_output.append('post_bulk_insert signal, %s' % documents) - if kwargs.get('loaded', False): + if kwargs.pop('loaded', False): signal_output.append('Is loaded') else: signal_output.append('Not loaded') + signal_output.append(kwargs) + self.Author = Author Author.drop_collection() + Author.id.set_next_value(0) class Another(Document): @@ -96,10 +103,12 @@ class SignalTests(unittest.TestCase): @classmethod def pre_delete(cls, sender, document, **kwargs): signal_output.append('pre_delete signal, %s' % document) + signal_output.append(kwargs) @classmethod def post_delete(cls, sender, document, **kwargs): signal_output.append('post_delete signal, %s' % document) + signal_output.append(kwargs) self.Another = Another Another.drop_collection() @@ -118,6 +127,41 @@ class SignalTests(unittest.TestCase): self.ExplicitId = ExplicitId ExplicitId.drop_collection() + class Post(Document): + title = StringField() + content = StringField() + active = BooleanField(default=False) + + def __unicode__(self): + return self.title + + @classmethod + def pre_bulk_insert(cls, sender, documents, **kwargs): + signal_output.append('pre_bulk_insert signal, %s' % + [(doc, {'active': documents[n].active}) + for n, doc in enumerate(documents)]) + + # make changes here, this is just an example - + # it could be anything that needs pre-validation or looks-ups before bulk bulk inserting + for document in documents: + if not document.active: + document.active = True + signal_output.append(kwargs) + + @classmethod + def post_bulk_insert(cls, sender, documents, **kwargs): + signal_output.append('post_bulk_insert signal, %s' % + [(doc, {'active': documents[n].active}) + for n, doc in enumerate(documents)]) + if kwargs.pop('loaded', False): + signal_output.append('Is loaded') + else: + signal_output.append('Not loaded') + signal_output.append(kwargs) + + self.Post = Post + Post.drop_collection() + # Save up the number of connected signals so that we can check at the # end that all the signals we register get properly unregistered self.pre_signals = ( @@ -147,6 +191,9 @@ class SignalTests(unittest.TestCase): signals.post_save.connect(ExplicitId.post_save, sender=ExplicitId) + signals.pre_bulk_insert.connect(Post.pre_bulk_insert, sender=Post) + signals.post_bulk_insert.connect(Post.post_bulk_insert, sender=Post) + def tearDown(self): signals.pre_init.disconnect(self.Author.pre_init) signals.post_init.disconnect(self.Author.post_init) @@ -163,6 +210,9 @@ class SignalTests(unittest.TestCase): signals.post_save.disconnect(self.ExplicitId.post_save) + signals.pre_bulk_insert.disconnect(self.Post.pre_bulk_insert) + signals.post_bulk_insert.disconnect(self.Post.post_bulk_insert) + # Check that all our signals got disconnected properly. post_signals = ( len(signals.pre_init.receivers), @@ -199,66 +249,121 @@ class SignalTests(unittest.TestCase): a.save() self.get_signal_output(lambda: None) # eliminate signal output a1 = self.Author.objects(name='Bill Shakespeare')[0] - + self.assertEqual(self.get_signal_output(create_author), [ "pre_init signal, Author", - "{'name': 'Bill Shakespeare'}", + {'name': 'Bill Shakespeare'}, "post_init signal, Bill Shakespeare, document._created = True", ]) a1 = self.Author(name='Bill Shakespeare') self.assertEqual(self.get_signal_output(a1.save), [ "pre_save signal, Bill Shakespeare", + {}, "pre_save_post_validation signal, Bill Shakespeare", "Is created", + {}, "post_save signal, Bill Shakespeare", "post_save dirty keys, ['name']", - "Is created" + "Is created", + {} ]) a1.reload() a1.name = 'William Shakespeare' self.assertEqual(self.get_signal_output(a1.save), [ "pre_save signal, William Shakespeare", + {}, "pre_save_post_validation signal, William Shakespeare", "Is updated", + {}, "post_save signal, William Shakespeare", "post_save dirty keys, ['name']", - "Is updated" + "Is updated", + {} ]) self.assertEqual(self.get_signal_output(a1.delete), [ 'pre_delete signal, William Shakespeare', + {}, 'post_delete signal, William Shakespeare', + {} ]) - signal_output = self.get_signal_output(load_existing_author) - # test signal_output lines separately, because of random ObjectID after object load - self.assertEqual(signal_output[0], + self.assertEqual(self.get_signal_output(load_existing_author), [ "pre_init signal, Author", - ) - self.assertEqual(signal_output[2], - "post_init signal, Bill Shakespeare, document._created = False", - ) + {'id': 2, 'name': 'Bill Shakespeare'}, + "post_init signal, Bill Shakespeare, document._created = False" + ]) - - signal_output = self.get_signal_output(bulk_create_author_with_load) - - # The output of this signal is not entirely deterministic. The reloaded - # object will have an object ID. Hence, we only check part of the output - self.assertEqual(signal_output[3], "pre_bulk_insert signal, []" - ) - self.assertEqual(signal_output[-2:], - ["post_bulk_insert signal, []", - "Is loaded",]) + self.assertEqual(self.get_signal_output(bulk_create_author_with_load), [ + 'pre_init signal, Author', + {'name': 'Bill Shakespeare'}, + 'post_init signal, Bill Shakespeare, document._created = True', + 'pre_bulk_insert signal, []', + {}, + 'pre_init signal, Author', + {'id': 3, 'name': 'Bill Shakespeare'}, + 'post_init signal, Bill Shakespeare, document._created = False', + 'post_bulk_insert signal, []', + 'Is loaded', + {} + ]) self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [ "pre_init signal, Author", - "{'name': 'Bill Shakespeare'}", + {'name': 'Bill Shakespeare'}, "post_init signal, Bill Shakespeare, document._created = True", "pre_bulk_insert signal, []", + {}, "post_bulk_insert signal, []", "Not loaded", + {} + ]) + + def test_signal_kwargs(self): + """ Make sure signal_kwargs is passed to signals calls. """ + + def live_and_let_die(): + a = self.Author(name='Bill Shakespeare') + a.save(signal_kwargs={'live': True, 'die': False}) + a.delete(signal_kwargs={'live': False, 'die': True}) + + self.assertEqual(self.get_signal_output(live_and_let_die), [ + "pre_init signal, Author", + {'name': 'Bill Shakespeare'}, + "post_init signal, Bill Shakespeare, document._created = True", + "pre_save signal, Bill Shakespeare", + {'die': False, 'live': True}, + "pre_save_post_validation signal, Bill Shakespeare", + "Is created", + {'die': False, 'live': True}, + "post_save signal, Bill Shakespeare", + "post_save dirty keys, ['name']", + "Is created", + {'die': False, 'live': True}, + 'pre_delete signal, Bill Shakespeare', + {'die': True, 'live': False}, + 'post_delete signal, Bill Shakespeare', + {'die': True, 'live': False} + ]) + + def bulk_create_author(): + a1 = self.Author(name='Bill Shakespeare') + self.Author.objects.insert([a1], signal_kwargs={'key': True}) + + self.assertEqual(self.get_signal_output(bulk_create_author), [ + 'pre_init signal, Author', + {'name': 'Bill Shakespeare'}, + 'post_init signal, Bill Shakespeare, document._created = True', + 'pre_bulk_insert signal, []', + {'key': True}, + 'pre_init signal, Author', + {'id': 2, 'name': 'Bill Shakespeare'}, + 'post_init signal, Bill Shakespeare, document._created = False', + 'post_bulk_insert signal, []', + 'Is loaded', + {'key': True} ]) def test_queryset_delete_signals(self): @@ -267,7 +372,9 @@ class SignalTests(unittest.TestCase): self.Another(name='Bill Shakespeare').save() self.assertEqual(self.get_signal_output(self.Another.objects.delete), [ 'pre_delete signal, Bill Shakespeare', + {}, 'post_delete signal, Bill Shakespeare', + {} ]) def test_signals_with_explicit_doc_ids(self): @@ -306,6 +413,23 @@ class SignalTests(unittest.TestCase): ei.switch_db("testdb-1", keep_created=False) self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + def test_signals_bulk_insert(self): + def bulk_set_active_post(): + posts = [ + self.Post(title='Post 1'), + self.Post(title='Post 2'), + self.Post(title='Post 3') + ] + self.Post.objects.insert(posts) + + results = self.get_signal_output(bulk_set_active_post) + self.assertEqual(results, [ + "pre_bulk_insert signal, [(, {'active': False}), (, {'active': False}), (, {'active': False})]", + {}, + "post_bulk_insert signal, [(, {'active': True}), (, {'active': True}), (, {'active': True})]", + 'Is loaded', + {} + ]) if __name__ == '__main__': unittest.main() diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 00000000..128bbff0 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,22 @@ +import unittest + +from mongoengine import connect +from mongoengine.connection import get_db + +MONGO_TEST_DB = 'mongoenginetest' + + +class MongoDBTestCase(unittest.TestCase): + """Base class for tests that need a mongodb connection + db is being dropped automatically + """ + + @classmethod + def setUpClass(cls): + cls._connection = connect(db=MONGO_TEST_DB) + cls._connection.drop_database(MONGO_TEST_DB) + cls.db = get_db() + + @classmethod + def tearDownClass(cls): + cls._connection.drop_database(MONGO_TEST_DB) diff --git a/tox.ini b/tox.ini index 3f69b5e8..8278ee75 100644 --- a/tox.ini +++ b/tox.ini @@ -1,12 +1,20 @@ [tox] -envlist = {py26,py27,py32,py33,py34,pypy,pypy3}-{mg27,mg28} -#envlist = {py26,py27,py32,py33,py34,pypy,pypy3}-{mg27,mg28,mg30,mgdev} +envlist = {py27,py33,py34,py35,pypy,pypy3}-{mg27,mg28,mg30},flake8 [testenv] commands = python setup.py nosetests {posargs} deps = + nose mg27: PyMongo<2.8 - mg28: PyMongo>=2.8,<3.0 + mg28: PyMongo>=2.8,<2.9 mg30: PyMongo>=3.0 - mgdev: https://github.com/mongodb/mongo-python-driver/tarball/master +setenv = + PYTHON_EGG_CACHE = {envdir}/python-eggs + +[testenv:flake8] +deps = + flake8 + flake8-import-order +commands = + flake8