Compare commits
	
		
			28 Commits
		
	
	
		
			fix-db-fie
			...
			fix-old-py
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | cedb5bda22 | ||
|  | 88a951ba4f | ||
|  | 403ceb19dc | ||
|  | 835d3c3d18 | ||
|  | 3135b456be | ||
|  | 0be6d3661a | ||
|  | 6f5f5b4711 | ||
|  | c6c5f85abb | ||
|  | 7b860f7739 | ||
|  | e28804c03a | ||
|  | 1b9432824b | ||
|  | 3b71a6b5c5 | ||
|  | 7ce8768c19 | ||
|  | 25e0f12976 | ||
|  | f168682a68 | ||
|  | d25058a46d | ||
|  | 4d0c092d9f | ||
|  | 15714ef855 | ||
|  | eb743beaa3 | ||
|  | 0007535a46 | ||
|  | 8391af026c | ||
|  | 800f656dcf | ||
|  | 088c5f49d9 | ||
|  | d8d98b6143 | ||
|  | 02fb3b9315 | ||
|  | 4f87db784e | ||
|  | 7e6287b925 | ||
|  | 8d6cb087c6 | 
							
								
								
									
										4
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -14,4 +14,6 @@ env/ | ||||
| .project | ||||
| .pydevproject | ||||
| tests/test_bugfix.py | ||||
| htmlcov/ | ||||
| htmlcov/ | ||||
| venv | ||||
| venv3 | ||||
|   | ||||
							
								
								
									
										22
									
								
								.landscape.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								.landscape.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | ||||
| pylint: | ||||
|     disable: | ||||
|         # We use this a lot (e.g. via document._meta) | ||||
|         - protected-access | ||||
|  | ||||
|     options: | ||||
|         additional-builtins: | ||||
|             # add xrange and long as valid built-ins. In Python 3, xrange is | ||||
|             # translated into range and long is translated into int via 2to3 (see | ||||
|             # "use_2to3" in setup.py). This should be removed when we drop Python | ||||
|             # 2 support (which probably won't happen any time soon). | ||||
|             - xrange | ||||
|             - long | ||||
|  | ||||
| pyflakes: | ||||
|     disable: | ||||
|         # undefined variables are already covered by pylint (and exclude | ||||
|         # xrange & long) | ||||
|         - F821 | ||||
|  | ||||
| ignore-paths: | ||||
|     - benchmark.py | ||||
| @@ -1,7 +1,6 @@ | ||||
| language: python | ||||
|  | ||||
| python: | ||||
| - '2.6' | ||||
| - '2.7' | ||||
| - '3.3' | ||||
| - '3.4' | ||||
| @@ -43,7 +42,11 @@ before_script: | ||||
| script: | ||||
| - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage | ||||
|  | ||||
| after_script: coveralls --verbose | ||||
| # For now only submit coveralls for Python v2.7. Python v3.x currently shows | ||||
| # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible | ||||
| # code in a separate dir and runs tests on that. | ||||
| after_script: | ||||
| - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi | ||||
|  | ||||
| notifications: | ||||
|   irc: irc.freenode.org#mongoengine | ||||
|   | ||||
							
								
								
									
										1
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										1
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -242,3 +242,4 @@ that much better: | ||||
|  * xiaost7 (https://github.com/xiaost7) | ||||
|  * Victor Varvaryuk | ||||
|  * Stanislav Kaledin (https://github.com/sallyruthstruik) | ||||
|  * Dmitry Yantsen (https://github.com/mrTable) | ||||
|   | ||||
| @@ -4,7 +4,7 @@ MongoEngine | ||||
| :Info: MongoEngine is an ORM-like layer on top of PyMongo. | ||||
| :Repository: https://github.com/MongoEngine/mongoengine | ||||
| :Author: Harry Marr (http://github.com/hmarr) | ||||
| :Maintainer: Ross Lawley (http://github.com/rozza) | ||||
| :Maintainer: Stefan Wójcik (http://github.com/wojcikstefan) | ||||
|  | ||||
| .. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master | ||||
|   :target: https://travis-ci.org/MongoEngine/mongoengine | ||||
| @@ -52,10 +52,14 @@ Some simple examples of what MongoEngine code looks like: | ||||
|  | ||||
| .. code :: python | ||||
|  | ||||
|     from mongoengine import * | ||||
|     connect('mydb') | ||||
|  | ||||
|     class BlogPost(Document): | ||||
|         title = StringField(required=True, max_length=200) | ||||
|         posted = DateTimeField(default=datetime.datetime.now) | ||||
|         tags = ListField(StringField(max_length=50)) | ||||
|         meta = {'allow_inheritance': True} | ||||
|  | ||||
|     class TextPost(BlogPost): | ||||
|         content = StringField(required=True) | ||||
|   | ||||
							
								
								
									
										152
									
								
								benchmark.py
									
									
									
									
									
								
							
							
						
						
									
										152
									
								
								benchmark.py
									
									
									
									
									
								
							| @@ -1,118 +1,41 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| """ | ||||
| Simple benchmark comparing PyMongo and MongoEngine. | ||||
|  | ||||
| Sample run on a mid 2015 MacBook Pro (commit b282511): | ||||
|  | ||||
| Benchmarking... | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - Pymongo | ||||
| 2.58979988098 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - Pymongo write_concern={"w": 0} | ||||
| 1.26657605171 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine | ||||
| 8.4351580143 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries without continual assign - MongoEngine | ||||
| 7.20191693306 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True | ||||
| 6.31104588509 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True | ||||
| 6.07083487511 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False | ||||
| 5.97704291344 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False | ||||
| 5.9111430645 | ||||
| """ | ||||
|  | ||||
| import timeit | ||||
|  | ||||
|  | ||||
| def cprofile_main(): | ||||
|     from pymongo import Connection | ||||
|     connection = Connection() | ||||
|     connection.drop_database('timeit_test') | ||||
|     connection.disconnect() | ||||
|  | ||||
|     from mongoengine import Document, DictField, connect | ||||
|     connect("timeit_test") | ||||
|  | ||||
|     class Noddy(Document): | ||||
|         fields = DictField() | ||||
|  | ||||
|     for i in range(1): | ||||
|         noddy = Noddy() | ||||
|         for j in range(20): | ||||
|             noddy.fields["key" + str(j)] = "value " + str(j) | ||||
|         noddy.save() | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     """ | ||||
|     0.4 Performance Figures ... | ||||
|  | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     3.86744189262 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     6.23374891281 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||
|     5.33027005196 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||
|     pass - No Cascade | ||||
|  | ||||
|     0.5.X | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     3.89597702026 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     21.7735359669 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||
|     19.8670389652 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||
|     pass - No Cascade | ||||
|  | ||||
|     0.6.X | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     3.81559205055 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     10.0446798801 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||
|     9.51354718208 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||
|     9.02567505836 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, force=True | ||||
|     8.44933390617 | ||||
|  | ||||
|     0.7.X | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     3.78801012039 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     9.73050498962 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||
|     8.33456707001 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||
|     8.37778115273 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, force=True | ||||
|     8.36906409264 | ||||
|     0.8.X | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     3.69964408875 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo write_concern={"w": 0} | ||||
|     3.5526599884 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     7.00959801674 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries without continual assign - MongoEngine | ||||
|     5.60943293571 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade=True | ||||
|     6.715102911 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True | ||||
|     5.50644683838 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False | ||||
|     4.69851183891 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False | ||||
|     4.68946313858 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     """ | ||||
|     print("Benchmarking...") | ||||
|  | ||||
|     setup = """ | ||||
| @@ -131,7 +54,7 @@ noddy = db.noddy | ||||
| for i in range(10000): | ||||
|     example = {'fields': {}} | ||||
|     for j in range(20): | ||||
|         example['fields']["key"+str(j)] = "value "+str(j) | ||||
|         example['fields']['key' + str(j)] = 'value ' + str(j) | ||||
|  | ||||
|     noddy.save(example) | ||||
|  | ||||
| @@ -146,9 +69,10 @@ myNoddys = noddy.find() | ||||
|  | ||||
|     stmt = """ | ||||
| from pymongo import MongoClient | ||||
| from pymongo.write_concern import WriteConcern | ||||
| connection = MongoClient() | ||||
|  | ||||
| db = connection.timeit_test | ||||
| db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0)) | ||||
| noddy = db.noddy | ||||
|  | ||||
| for i in range(10000): | ||||
| @@ -156,7 +80,7 @@ for i in range(10000): | ||||
|     for j in range(20): | ||||
|         example['fields']["key"+str(j)] = "value "+str(j) | ||||
|  | ||||
|     noddy.save(example, write_concern={"w": 0}) | ||||
|     noddy.save(example) | ||||
|  | ||||
| myNoddys = noddy.find() | ||||
| [n for n in myNoddys] # iterate | ||||
| @@ -171,10 +95,10 @@ myNoddys = noddy.find() | ||||
| from pymongo import MongoClient | ||||
| connection = MongoClient() | ||||
| connection.drop_database('timeit_test') | ||||
| connection.disconnect() | ||||
| connection.close() | ||||
|  | ||||
| from mongoengine import Document, DictField, connect | ||||
| connect("timeit_test") | ||||
| connect('timeit_test') | ||||
|  | ||||
| class Noddy(Document): | ||||
|     fields = DictField() | ||||
|   | ||||
| @@ -2,16 +2,38 @@ | ||||
| Changelog | ||||
| ========= | ||||
|  | ||||
| Development | ||||
| =========== | ||||
| - (Fill this out as you fix issues and develop you features). | ||||
|  | ||||
| Changes in 0.11.0 | ||||
| ================= | ||||
| - BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428 | ||||
| - BREAKING CHANGE: Dropped Python 2.6 support. #1428 | ||||
| - BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428 | ||||
| - Fixed absent rounding for DecimalField when `force_string` is set. #1103 | ||||
|  | ||||
| Changes in 0.10.8 | ||||
| ================= | ||||
| - Fill this in as PRs for v0.10.8 are merged | ||||
| - Added support for QuerySet.batch_size (#1426) | ||||
| - Fixed query set iteration within iteration #1427 | ||||
| - Fixed an issue where specifying a MongoDB URI host would override more information than it should #1421 | ||||
| - Added ability to filter the generic reference field by ObjectId and DBRef #1425 | ||||
| - Fixed delete cascade for models with a custom primary key field #1247 | ||||
| - Added ability to specify an authentication mechanism (e.g. X.509) #1333 | ||||
| - Added support for falsey primary keys (e.g. doc.pk = 0) #1354 | ||||
| - Fixed QuerySet#sum/average for fields w/ explicit db_field #1417 | ||||
| - Fixed filtering by embedded_doc=None #1422 | ||||
| - Added support for cursor.comment #1420 | ||||
| - Fixed doc.get_<field>_display #1419 | ||||
| - Fixed __repr__ method of the StrictDict #1424 | ||||
| - Added a deprecation warning for Python 2.6 | ||||
|  | ||||
| Changes in 0.10.7 | ||||
| ================= | ||||
| - Dropped Python 3.2 support #1390 | ||||
| - Fixed the bug where dynamic doc has index inside a dict field #1278 | ||||
| - Fixed: ListField minus index assignment does not work #1128 | ||||
| - Fixed not being able to specify `use_db_field=False` on `ListField(EmbeddedDocumentField)` instances | ||||
| - Fixed cascade delete mixing among collections #1224 | ||||
| - Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206 | ||||
| - Raise `OperationError` when trying to do a `drop_collection` on document with no collection set. | ||||
| @@ -28,7 +50,7 @@ Changes in 0.10.7 | ||||
| - Fixed connecting to a list of hosts #1389 | ||||
| - Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334 | ||||
| - Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218 | ||||
| - Improvements to the dictionary fields docs # 1383 | ||||
| - Improvements to the dictionary fields docs #1383 | ||||
|  | ||||
| Changes in 0.10.6 | ||||
| ================= | ||||
|   | ||||
| @@ -2,6 +2,39 @@ | ||||
| Upgrading | ||||
| ######### | ||||
|  | ||||
| 0.11.0 | ||||
| ****** | ||||
| This release includes a major rehaul of MongoEngine's code quality and | ||||
| introduces a few breaking changes. It also touches many different parts of | ||||
| the package and although all the changes have been tested and scrutinized, | ||||
| you're encouraged to thorougly test the upgrade. | ||||
|  | ||||
| First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`. | ||||
| If you import or catch this exception, you'll need to rename it in your code. | ||||
|  | ||||
| Second breaking change drops Python v2.6 support. If you run MongoEngine on | ||||
| that Python version, you'll need to upgrade it first. | ||||
|  | ||||
| Third breaking change drops an old backward compatibility measure where | ||||
| `from mongoengine.base import ErrorClass` would work on top of | ||||
| `from mongoengine.errors import ErrorClass` (where `ErrorClass` is e.g. | ||||
| `ValidationError`). If you import any exceptions from `mongoengine.base`, | ||||
| change it to `mongoengine.errors`. | ||||
|  | ||||
| 0.10.8 | ||||
| ****** | ||||
| This version fixed an issue where specifying a MongoDB URI host would override | ||||
| more information than it should. These changes are minor, but they still | ||||
| subtly modify the connection logic and thus you're encouraged to test your | ||||
| MongoDB connection before shipping v0.10.8 in production. | ||||
|  | ||||
| 0.10.7 | ||||
| ****** | ||||
|  | ||||
| `QuerySet.aggregate_sum` and `QuerySet.aggregate_average` are dropped. Use | ||||
| `QuerySet.sum` and `QuerySet.average` instead which use the aggreation framework | ||||
| by default from now on. | ||||
|  | ||||
| 0.9.0 | ||||
| ***** | ||||
|  | ||||
|   | ||||
| @@ -1,25 +1,35 @@ | ||||
| import connection | ||||
| from connection import * | ||||
| import document | ||||
| from document import * | ||||
| import errors | ||||
| from errors import * | ||||
| import fields | ||||
| from fields import * | ||||
| import queryset | ||||
| from queryset import * | ||||
| import signals | ||||
| from signals import * | ||||
| # Import submodules so that we can expose their __all__ | ||||
| from mongoengine import connection | ||||
| from mongoengine import document | ||||
| from mongoengine import errors | ||||
| from mongoengine import fields | ||||
| from mongoengine import queryset | ||||
| from mongoengine import signals | ||||
|  | ||||
| __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + | ||||
|            list(queryset.__all__) + signals.__all__ + list(errors.__all__)) | ||||
| # Import everything from each submodule so that it can be accessed via | ||||
| # mongoengine, e.g. instead of `from mongoengine.connection import connect`, | ||||
| # users can simply use `from mongoengine import connect`, or even | ||||
| # `from mongoengine import *` and then `connect('testdb')`. | ||||
| from mongoengine.connection import * | ||||
| from mongoengine.document import * | ||||
| from mongoengine.errors import * | ||||
| from mongoengine.fields import * | ||||
| from mongoengine.queryset import * | ||||
| from mongoengine.signals import * | ||||
|  | ||||
| VERSION = (0, 10, 7) | ||||
|  | ||||
| __all__ = (list(document.__all__) + list(fields.__all__) + | ||||
|            list(connection.__all__) + list(queryset.__all__) + | ||||
|            list(signals.__all__) + list(errors.__all__)) | ||||
|  | ||||
|  | ||||
| VERSION = (0, 11, 0) | ||||
|  | ||||
|  | ||||
| def get_version(): | ||||
|     if isinstance(VERSION[-1], basestring): | ||||
|         return '.'.join(map(str, VERSION[:-1])) + VERSION[-1] | ||||
|     """Return the VERSION as a string, e.g. for VERSION == (0, 10, 7), | ||||
|     return '0.10.7'. | ||||
|     """ | ||||
|     return '.'.join(map(str, VERSION)) | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -1,8 +1,28 @@ | ||||
| # Base module is split into several files for convenience. Files inside of | ||||
| # this module should import from a specific submodule (e.g. | ||||
| # `from mongoengine.base.document import BaseDocument`), but all of the | ||||
| # other modules should import directly from the top-level module (e.g. | ||||
| # `from mongoengine.base import BaseDocument`). This approach is cleaner and | ||||
| # also helps with cyclical import errors. | ||||
| from mongoengine.base.common import * | ||||
| from mongoengine.base.datastructures import * | ||||
| from mongoengine.base.document import * | ||||
| from mongoengine.base.fields import * | ||||
| from mongoengine.base.metaclasses import * | ||||
|  | ||||
| # Help with backwards compatibility | ||||
| from mongoengine.errors import * | ||||
| __all__ = ( | ||||
|     # common | ||||
|     'UPDATE_OPERATORS', '_document_registry', 'get_document', | ||||
|  | ||||
|     # datastructures | ||||
|     'BaseDict', 'BaseList', 'EmbeddedDocumentList', | ||||
|  | ||||
|     # document | ||||
|     'BaseDocument', | ||||
|  | ||||
|     # fields | ||||
|     'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField', | ||||
|  | ||||
|     # metaclasses | ||||
|     'DocumentMetaclass', 'TopLevelDocumentMetaclass' | ||||
| ) | ||||
|   | ||||
| @@ -1,13 +1,18 @@ | ||||
| from mongoengine.errors import NotRegistered | ||||
|  | ||||
| __all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry') | ||||
| __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') | ||||
|  | ||||
|  | ||||
| UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push', | ||||
|                         'push_all', 'pull', 'pull_all', 'add_to_set', | ||||
|                         'set_on_insert', 'min', 'max']) | ||||
|  | ||||
| ALLOW_INHERITANCE = False | ||||
|  | ||||
| _document_registry = {} | ||||
|  | ||||
|  | ||||
| def get_document(name): | ||||
|     """Get a document class by name.""" | ||||
|     doc = _document_registry.get(name, None) | ||||
|     if not doc: | ||||
|         # Possible old style name | ||||
|   | ||||
| @@ -1,14 +1,16 @@ | ||||
| import itertools | ||||
| import weakref | ||||
|  | ||||
| import six | ||||
|  | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | ||||
|  | ||||
| __all__ = ("BaseDict", "BaseList", "EmbeddedDocumentList") | ||||
| __all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList') | ||||
|  | ||||
|  | ||||
| class BaseDict(dict): | ||||
|     """A special dict so we can watch any changes""" | ||||
|     """A special dict so we can watch any changes.""" | ||||
|  | ||||
|     _dereferenced = False | ||||
|     _instance = None | ||||
| @@ -93,8 +95,7 @@ class BaseDict(dict): | ||||
|  | ||||
|  | ||||
| class BaseList(list): | ||||
|     """A special list so we can watch any changes | ||||
|     """ | ||||
|     """A special list so we can watch any changes.""" | ||||
|  | ||||
|     _dereferenced = False | ||||
|     _instance = None | ||||
| @@ -209,17 +210,22 @@ class BaseList(list): | ||||
| class EmbeddedDocumentList(BaseList): | ||||
|  | ||||
|     @classmethod | ||||
|     def __match_all(cls, i, kwargs): | ||||
|         items = kwargs.items() | ||||
|         return all([ | ||||
|             getattr(i, k) == v or unicode(getattr(i, k)) == v for k, v in items | ||||
|         ]) | ||||
|     def __match_all(cls, embedded_doc, kwargs): | ||||
|         """Return True if a given embedded doc matches all the filter | ||||
|         kwargs. If it doesn't return False. | ||||
|         """ | ||||
|         for key, expected_value in kwargs.items(): | ||||
|             doc_val = getattr(embedded_doc, key) | ||||
|             if doc_val != expected_value and six.text_type(doc_val) != expected_value: | ||||
|                 return False | ||||
|         return True | ||||
|  | ||||
|     @classmethod | ||||
|     def __only_matches(cls, obj, kwargs): | ||||
|     def __only_matches(cls, embedded_docs, kwargs): | ||||
|         """Return embedded docs that match the filter kwargs.""" | ||||
|         if not kwargs: | ||||
|             return obj | ||||
|         return filter(lambda i: cls.__match_all(i, kwargs), obj) | ||||
|             return embedded_docs | ||||
|         return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)] | ||||
|  | ||||
|     def __init__(self, list_items, instance, name): | ||||
|         super(EmbeddedDocumentList, self).__init__(list_items, instance, name) | ||||
| @@ -285,18 +291,18 @@ class EmbeddedDocumentList(BaseList): | ||||
|         values = self.__only_matches(self, kwargs) | ||||
|         if len(values) == 0: | ||||
|             raise DoesNotExist( | ||||
|                 "%s matching query does not exist." % self._name | ||||
|                 '%s matching query does not exist.' % self._name | ||||
|             ) | ||||
|         elif len(values) > 1: | ||||
|             raise MultipleObjectsReturned( | ||||
|                 "%d items returned, instead of 1" % len(values) | ||||
|                 '%d items returned, instead of 1' % len(values) | ||||
|             ) | ||||
|  | ||||
|         return values[0] | ||||
|  | ||||
|     def first(self): | ||||
|         """ | ||||
|         Returns the first embedded document in the list, or ``None`` if empty. | ||||
|         """Return the first embedded document in the list, or ``None`` | ||||
|         if empty. | ||||
|         """ | ||||
|         if len(self) > 0: | ||||
|             return self[0] | ||||
| @@ -438,7 +444,7 @@ class StrictDict(object): | ||||
|                 __slots__ = allowed_keys_tuple | ||||
|  | ||||
|                 def __repr__(self): | ||||
|                     return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k) for k in self.iterkeys()) | ||||
|                     return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items()) | ||||
|  | ||||
|             cls._classes[allowed_keys] = SpecificStrictDict | ||||
|         return cls._classes[allowed_keys] | ||||
|   | ||||
| @@ -1,6 +1,5 @@ | ||||
| import copy | ||||
| import numbers | ||||
| import operator | ||||
| from collections import Hashable | ||||
| from functools import partial | ||||
|  | ||||
| @@ -8,30 +7,27 @@ from bson import ObjectId, json_util | ||||
| from bson.dbref import DBRef | ||||
| from bson.son import SON | ||||
| import pymongo | ||||
| import six | ||||
|  | ||||
| from mongoengine import signals | ||||
| from mongoengine.base.common import ALLOW_INHERITANCE, get_document | ||||
| from mongoengine.base.datastructures import ( | ||||
|     BaseDict, | ||||
|     BaseList, | ||||
|     EmbeddedDocumentList, | ||||
|     SemiStrictDict, | ||||
|     StrictDict | ||||
| ) | ||||
| from mongoengine.base.common import get_document | ||||
| from mongoengine.base.datastructures import (BaseDict, BaseList, | ||||
|                                              EmbeddedDocumentList, | ||||
|                                              SemiStrictDict, StrictDict) | ||||
| from mongoengine.base.fields import ComplexBaseField | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, | ||||
|                                 LookUpError, ValidationError) | ||||
| from mongoengine.python_support import PY3, txt_type | ||||
|                                 LookUpError, OperationError, ValidationError) | ||||
|  | ||||
| __all__ = ('BaseDocument', 'NON_FIELD_ERRORS') | ||||
| __all__ = ('BaseDocument',) | ||||
|  | ||||
| NON_FIELD_ERRORS = '__all__' | ||||
|  | ||||
|  | ||||
| class BaseDocument(object): | ||||
|     __slots__ = ('_changed_fields', '_initialised', '_created', '_data', | ||||
|                  '_dynamic_fields', '_auto_id_field', '_db_field_map', '__weakref__') | ||||
|                  '_dynamic_fields', '_auto_id_field', '_db_field_map', | ||||
|                  '__weakref__') | ||||
|  | ||||
|     _dynamic = False | ||||
|     _dynamic_lock = True | ||||
| @@ -57,15 +53,15 @@ class BaseDocument(object): | ||||
|                 name = next(field) | ||||
|                 if name in values: | ||||
|                     raise TypeError( | ||||
|                         "Multiple values for keyword argument '" + name + "'") | ||||
|                         'Multiple values for keyword argument "%s"' % name) | ||||
|                 values[name] = value | ||||
|  | ||||
|         __auto_convert = values.pop("__auto_convert", True) | ||||
|         __auto_convert = values.pop('__auto_convert', True) | ||||
|  | ||||
|         # 399: set default values only to fields loaded from DB | ||||
|         __only_fields = set(values.pop("__only_fields", values)) | ||||
|         __only_fields = set(values.pop('__only_fields', values)) | ||||
|  | ||||
|         _created = values.pop("_created", True) | ||||
|         _created = values.pop('_created', True) | ||||
|  | ||||
|         signals.pre_init.send(self.__class__, document=self, values=values) | ||||
|  | ||||
| @@ -76,7 +72,7 @@ class BaseDocument(object): | ||||
|                 self._fields.keys() + ['id', 'pk', '_cls', '_text_score']) | ||||
|             if _undefined_fields: | ||||
|                 msg = ( | ||||
|                     "The fields '{0}' do not exist on the document '{1}'" | ||||
|                     'The fields "{0}" do not exist on the document "{1}"' | ||||
|                 ).format(_undefined_fields, self._class_name) | ||||
|                 raise FieldDoesNotExist(msg) | ||||
|  | ||||
| @@ -95,7 +91,7 @@ class BaseDocument(object): | ||||
|             value = getattr(self, key, None) | ||||
|             setattr(self, key, value) | ||||
|  | ||||
|         if "_cls" not in values: | ||||
|         if '_cls' not in values: | ||||
|             self._cls = self._class_name | ||||
|  | ||||
|         # Set passed values after initialisation | ||||
| @@ -121,7 +117,7 @@ class BaseDocument(object): | ||||
|                 else: | ||||
|                     self._data[key] = value | ||||
|  | ||||
|         # Set any get_fieldname_display methods | ||||
|         # Set any get_<field>_display methods | ||||
|         self.__set_field_display() | ||||
|  | ||||
|         if self._dynamic: | ||||
| @@ -150,7 +146,7 @@ class BaseDocument(object): | ||||
|         if self._dynamic and not self._dynamic_lock: | ||||
|  | ||||
|             if not hasattr(self, name) and not name.startswith('_'): | ||||
|                 DynamicField = _import_class("DynamicField") | ||||
|                 DynamicField = _import_class('DynamicField') | ||||
|                 field = DynamicField(db_field=name) | ||||
|                 field.name = name | ||||
|                 self._dynamic_fields[name] = field | ||||
| @@ -169,11 +165,13 @@ class BaseDocument(object): | ||||
|         except AttributeError: | ||||
|             self__created = True | ||||
|  | ||||
|         if (self._is_document and not self__created and | ||||
|                 name in self._meta.get('shard_key', tuple()) and | ||||
|                 self._data.get(name) != value): | ||||
|             OperationError = _import_class('OperationError') | ||||
|             msg = "Shard Keys are immutable. Tried to update %s" % name | ||||
|         if ( | ||||
|             self._is_document and | ||||
|             not self__created and | ||||
|             name in self._meta.get('shard_key', tuple()) and | ||||
|             self._data.get(name) != value | ||||
|         ): | ||||
|             msg = 'Shard Keys are immutable. Tried to update %s' % name | ||||
|             raise OperationError(msg) | ||||
|  | ||||
|         try: | ||||
| @@ -197,8 +195,8 @@ class BaseDocument(object): | ||||
|         return data | ||||
|  | ||||
|     def __setstate__(self, data): | ||||
|         if isinstance(data["_data"], SON): | ||||
|             data["_data"] = self.__class__._from_son(data["_data"])._data | ||||
|         if isinstance(data['_data'], SON): | ||||
|             data['_data'] = self.__class__._from_son(data['_data'])._data | ||||
|         for k in ('_changed_fields', '_initialised', '_created', '_data', | ||||
|                   '_dynamic_fields'): | ||||
|             if k in data: | ||||
| @@ -212,7 +210,7 @@ class BaseDocument(object): | ||||
|  | ||||
|         dynamic_fields = data.get('_dynamic_fields') or SON() | ||||
|         for k in dynamic_fields.keys(): | ||||
|             setattr(self, k, data["_data"].get(k)) | ||||
|             setattr(self, k, data['_data'].get(k)) | ||||
|  | ||||
|     def __iter__(self): | ||||
|         return iter(self._fields_ordered) | ||||
| @@ -254,12 +252,13 @@ class BaseDocument(object): | ||||
|         return repr_type('<%s: %s>' % (self.__class__.__name__, u)) | ||||
|  | ||||
|     def __str__(self): | ||||
|         # TODO this could be simpler? | ||||
|         if hasattr(self, '__unicode__'): | ||||
|             if PY3: | ||||
|             if six.PY3: | ||||
|                 return self.__unicode__() | ||||
|             else: | ||||
|                 return unicode(self).encode('utf-8') | ||||
|         return txt_type('%s object' % self.__class__.__name__) | ||||
|                 return six.text_type(self).encode('utf-8') | ||||
|         return six.text_type('%s object' % self.__class__.__name__) | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         if isinstance(other, self.__class__) and hasattr(other, 'id') and other.id is not None: | ||||
| @@ -308,7 +307,7 @@ class BaseDocument(object): | ||||
|             fields = [] | ||||
|  | ||||
|         data = SON() | ||||
|         data["_id"] = None | ||||
|         data['_id'] = None | ||||
|         data['_cls'] = self._class_name | ||||
|  | ||||
|         # only root fields ['test1.a', 'test2'] => ['test1', 'test2'] | ||||
| @@ -351,18 +350,8 @@ class BaseDocument(object): | ||||
|                 else: | ||||
|                     data[field.name] = value | ||||
|  | ||||
|         # If "_id" has not been set, then try and set it | ||||
|         Document = _import_class("Document") | ||||
|         if isinstance(self, Document): | ||||
|             if data["_id"] is None: | ||||
|                 data["_id"] = self._data.get("id", None) | ||||
|  | ||||
|         if data['_id'] is None: | ||||
|             data.pop('_id') | ||||
|  | ||||
|         # Only add _cls if allow_inheritance is True | ||||
|         if (not hasattr(self, '_meta') or | ||||
|                 not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)): | ||||
|         if not self._meta.get('allow_inheritance'): | ||||
|             data.pop('_cls') | ||||
|  | ||||
|         return data | ||||
| @@ -376,16 +365,16 @@ class BaseDocument(object): | ||||
|         if clean: | ||||
|             try: | ||||
|                 self.clean() | ||||
|             except ValidationError, error: | ||||
|             except ValidationError as error: | ||||
|                 errors[NON_FIELD_ERRORS] = error | ||||
|  | ||||
|         # Get a list of tuples of field names and their current values | ||||
|         fields = [(self._fields.get(name, self._dynamic_fields.get(name)), | ||||
|                    self._data.get(name)) for name in self._fields_ordered] | ||||
|  | ||||
|         EmbeddedDocumentField = _import_class("EmbeddedDocumentField") | ||||
|         EmbeddedDocumentField = _import_class('EmbeddedDocumentField') | ||||
|         GenericEmbeddedDocumentField = _import_class( | ||||
|             "GenericEmbeddedDocumentField") | ||||
|             'GenericEmbeddedDocumentField') | ||||
|  | ||||
|         for field, value in fields: | ||||
|             if value is not None: | ||||
| @@ -395,21 +384,21 @@ class BaseDocument(object): | ||||
|                         field._validate(value, clean=clean) | ||||
|                     else: | ||||
|                         field._validate(value) | ||||
|                 except ValidationError, error: | ||||
|                 except ValidationError as error: | ||||
|                     errors[field.name] = error.errors or error | ||||
|                 except (ValueError, AttributeError, AssertionError), error: | ||||
|                 except (ValueError, AttributeError, AssertionError) as error: | ||||
|                     errors[field.name] = error | ||||
|             elif field.required and not getattr(field, '_auto_gen', False): | ||||
|                 errors[field.name] = ValidationError('Field is required', | ||||
|                                                      field_name=field.name) | ||||
|  | ||||
|         if errors: | ||||
|             pk = "None" | ||||
|             pk = 'None' | ||||
|             if hasattr(self, 'pk'): | ||||
|                 pk = self.pk | ||||
|             elif self._instance and hasattr(self._instance, 'pk'): | ||||
|                 pk = self._instance.pk | ||||
|             message = "ValidationError (%s:%s) " % (self._class_name, pk) | ||||
|             message = 'ValidationError (%s:%s) ' % (self._class_name, pk) | ||||
|             raise ValidationError(message, errors=errors) | ||||
|  | ||||
|     def to_json(self, *args, **kwargs): | ||||
| @@ -426,33 +415,26 @@ class BaseDocument(object): | ||||
|         return cls._from_son(json_util.loads(json_data), created=created) | ||||
|  | ||||
|     def __expand_dynamic_values(self, name, value): | ||||
|         """expand any dynamic values to their correct types / values""" | ||||
|         """Expand any dynamic values to their correct types / values.""" | ||||
|         if not isinstance(value, (dict, list, tuple)): | ||||
|             return value | ||||
|  | ||||
|         EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') | ||||
|  | ||||
|         is_list = False | ||||
|         if not hasattr(value, 'items'): | ||||
|             is_list = True | ||||
|             value = dict([(k, v) for k, v in enumerate(value)]) | ||||
|  | ||||
|         if not is_list and '_cls' in value: | ||||
|         # If the value is a dict with '_cls' in it, turn it into a document | ||||
|         is_dict = isinstance(value, dict) | ||||
|         if is_dict and '_cls' in value: | ||||
|             cls = get_document(value['_cls']) | ||||
|             return cls(**value) | ||||
|  | ||||
|         data = {} | ||||
|         for k, v in value.items(): | ||||
|             key = name if is_list else k | ||||
|             data[k] = self.__expand_dynamic_values(key, v) | ||||
|  | ||||
|         if is_list:  # Convert back to a list | ||||
|             data_items = sorted(data.items(), key=operator.itemgetter(0)) | ||||
|             value = [v for k, v in data_items] | ||||
|         if is_dict: | ||||
|             value = { | ||||
|                 k: self.__expand_dynamic_values(k, v) | ||||
|                 for k, v in value.items() | ||||
|             } | ||||
|         else: | ||||
|             value = data | ||||
|             value = [self.__expand_dynamic_values(name, v) for v in value] | ||||
|  | ||||
|         # Convert lists / values so we can watch for any changes on them | ||||
|         EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') | ||||
|         if (isinstance(value, (list, tuple)) and | ||||
|                 not isinstance(value, BaseList)): | ||||
|             if issubclass(type(self), EmbeddedDocumentListField): | ||||
| @@ -465,8 +447,7 @@ class BaseDocument(object): | ||||
|         return value | ||||
|  | ||||
|     def _mark_as_changed(self, key): | ||||
|         """Marks a key as explicitly changed by the user | ||||
|         """ | ||||
|         """Mark a key as explicitly changed by the user.""" | ||||
|         if not key: | ||||
|             return | ||||
|  | ||||
| @@ -496,10 +477,11 @@ class BaseDocument(object): | ||||
|                         remove(field) | ||||
|  | ||||
|     def _clear_changed_fields(self): | ||||
|         """Using get_changed_fields iterate and remove any fields that are | ||||
|         marked as changed""" | ||||
|         """Using _get_changed_fields iterate and remove any fields that | ||||
|         are marked as changed. | ||||
|         """ | ||||
|         for changed in self._get_changed_fields(): | ||||
|             parts = changed.split(".") | ||||
|             parts = changed.split('.') | ||||
|             data = self | ||||
|             for part in parts: | ||||
|                 if isinstance(data, list): | ||||
| @@ -511,10 +493,13 @@ class BaseDocument(object): | ||||
|                     data = data.get(part, None) | ||||
|                 else: | ||||
|                     data = getattr(data, part, None) | ||||
|                 if hasattr(data, "_changed_fields"): | ||||
|                     if hasattr(data, "_is_document") and data._is_document: | ||||
|  | ||||
|                 if hasattr(data, '_changed_fields'): | ||||
|                     if getattr(data, '_is_document', False): | ||||
|                         continue | ||||
|  | ||||
|                     data._changed_fields = [] | ||||
|  | ||||
|         self._changed_fields = [] | ||||
|  | ||||
|     def _nestable_types_changed_fields(self, changed_fields, key, data, inspected): | ||||
| @@ -526,26 +511,27 @@ class BaseDocument(object): | ||||
|             iterator = data.iteritems() | ||||
|  | ||||
|         for index, value in iterator: | ||||
|             list_key = "%s%s." % (key, index) | ||||
|             list_key = '%s%s.' % (key, index) | ||||
|             # don't check anything lower if this key is already marked | ||||
|             # as changed. | ||||
|             if list_key[:-1] in changed_fields: | ||||
|                 continue | ||||
|             if hasattr(value, '_get_changed_fields'): | ||||
|                 changed = value._get_changed_fields(inspected) | ||||
|                 changed_fields += ["%s%s" % (list_key, k) | ||||
|                 changed_fields += ['%s%s' % (list_key, k) | ||||
|                                    for k in changed if k] | ||||
|             elif isinstance(value, (list, tuple, dict)): | ||||
|                 self._nestable_types_changed_fields( | ||||
|                     changed_fields, list_key, value, inspected) | ||||
|  | ||||
|     def _get_changed_fields(self, inspected=None): | ||||
|         """Returns a list of all fields that have explicitly been changed. | ||||
|         """Return a list of all fields that have explicitly been changed. | ||||
|         """ | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument") | ||||
|         ReferenceField = _import_class("ReferenceField") | ||||
|         SortedListField = _import_class("SortedListField") | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         DynamicEmbeddedDocument = _import_class('DynamicEmbeddedDocument') | ||||
|         ReferenceField = _import_class('ReferenceField') | ||||
|         SortedListField = _import_class('SortedListField') | ||||
|  | ||||
|         changed_fields = [] | ||||
|         changed_fields += getattr(self, '_changed_fields', []) | ||||
|  | ||||
| @@ -572,7 +558,7 @@ class BaseDocument(object): | ||||
|             ): | ||||
|                 # Find all embedded fields that have been changed | ||||
|                 changed = data._get_changed_fields(inspected) | ||||
|                 changed_fields += ["%s%s" % (key, k) for k in changed if k] | ||||
|                 changed_fields += ['%s%s' % (key, k) for k in changed if k] | ||||
|             elif (isinstance(data, (list, tuple, dict)) and | ||||
|                     db_field_name not in changed_fields): | ||||
|                 if (hasattr(field, 'field') and | ||||
| @@ -676,21 +662,25 @@ class BaseDocument(object): | ||||
|  | ||||
|     @classmethod | ||||
|     def _get_collection_name(cls): | ||||
|         """Returns the collection name for this class. None for abstract class | ||||
|         """Return the collection name for this class. None for abstract | ||||
|         class. | ||||
|         """ | ||||
|         return cls._meta.get('collection', None) | ||||
|  | ||||
|     @classmethod | ||||
|     def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False): | ||||
|         """Create an instance of a Document (subclass) from a PyMongo SON. | ||||
|         """Create an instance of a Document (subclass) from a PyMongo | ||||
|         SON. | ||||
|         """ | ||||
|         if not only_fields: | ||||
|             only_fields = [] | ||||
|  | ||||
|         # get the class name from the document, falling back to the given | ||||
|         # Get the class name from the document, falling back to the given | ||||
|         # class if unavailable | ||||
|         class_name = son.get('_cls', cls._class_name) | ||||
|         data = dict(("%s" % key, value) for key, value in son.iteritems()) | ||||
|  | ||||
|         # Convert SON to a dict, making sure each key is a string | ||||
|         data = {str(key): value for key, value in son.iteritems()} | ||||
|  | ||||
|         # Return correct subclass for document type | ||||
|         if class_name != cls._class_name: | ||||
| @@ -712,19 +702,20 @@ class BaseDocument(object): | ||||
|                                         else field.to_python(value)) | ||||
|                     if field_name != field.db_field: | ||||
|                         del data[field.db_field] | ||||
|                 except (AttributeError, ValueError), e: | ||||
|                 except (AttributeError, ValueError) as e: | ||||
|                     errors_dict[field_name] = e | ||||
|  | ||||
|         if errors_dict: | ||||
|             errors = "\n".join(["%s - %s" % (k, v) | ||||
|             errors = '\n'.join(['%s - %s' % (k, v) | ||||
|                                 for k, v in errors_dict.items()]) | ||||
|             msg = ("Invalid data to create a `%s` instance.\n%s" | ||||
|             msg = ('Invalid data to create a `%s` instance.\n%s' | ||||
|                    % (cls._class_name, errors)) | ||||
|             raise InvalidDocumentError(msg) | ||||
|  | ||||
|         # In STRICT documents, remove any keys that aren't in cls._fields | ||||
|         if cls.STRICT: | ||||
|             data = dict((k, v) | ||||
|                         for k, v in data.iteritems() if k in cls._fields) | ||||
|             data = {k: v for k, v in data.iteritems() if k in cls._fields} | ||||
|  | ||||
|         obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data) | ||||
|         obj._changed_fields = changed_fields | ||||
|         if not _auto_dereference: | ||||
| @@ -734,37 +725,43 @@ class BaseDocument(object): | ||||
|  | ||||
|     @classmethod | ||||
|     def _build_index_specs(cls, meta_indexes): | ||||
|         """Generate and merge the full index specs | ||||
|         """ | ||||
|  | ||||
|         """Generate and merge the full index specs.""" | ||||
|         geo_indices = cls._geo_indices() | ||||
|         unique_indices = cls._unique_with_indexes() | ||||
|         index_specs = [cls._build_index_spec(spec) | ||||
|                        for spec in meta_indexes] | ||||
|         index_specs = [cls._build_index_spec(spec) for spec in meta_indexes] | ||||
|  | ||||
|         def merge_index_specs(index_specs, indices): | ||||
|             """Helper method for merging index specs.""" | ||||
|             if not indices: | ||||
|                 return index_specs | ||||
|  | ||||
|             spec_fields = [v['fields'] | ||||
|                            for k, v in enumerate(index_specs)] | ||||
|             # Merge unique_indexes with existing specs | ||||
|             for k, v in enumerate(indices): | ||||
|                 if v['fields'] in spec_fields: | ||||
|                     index_specs[spec_fields.index(v['fields'])].update(v) | ||||
|             # Create a map of index fields to index spec. We're converting | ||||
|             # the fields from a list to a tuple so that it's hashable. | ||||
|             spec_fields = { | ||||
|                 tuple(index['fields']): index for index in index_specs | ||||
|             } | ||||
|  | ||||
|             # For each new index, if there's an existing index with the same | ||||
|             # fields list, update the existing spec with all data from the | ||||
|             # new spec. | ||||
|             for new_index in indices: | ||||
|                 candidate = spec_fields.get(tuple(new_index['fields'])) | ||||
|                 if candidate is None: | ||||
|                     index_specs.append(new_index) | ||||
|                 else: | ||||
|                     index_specs.append(v) | ||||
|                     candidate.update(new_index) | ||||
|  | ||||
|             return index_specs | ||||
|  | ||||
|         # Merge geo indexes and unique_with indexes into the meta index specs. | ||||
|         index_specs = merge_index_specs(index_specs, geo_indices) | ||||
|         index_specs = merge_index_specs(index_specs, unique_indices) | ||||
|         return index_specs | ||||
|  | ||||
|     @classmethod | ||||
|     def _build_index_spec(cls, spec): | ||||
|         """Build a PyMongo index spec from a MongoEngine index spec. | ||||
|         """ | ||||
|         if isinstance(spec, basestring): | ||||
|         """Build a PyMongo index spec from a MongoEngine index spec.""" | ||||
|         if isinstance(spec, six.string_types): | ||||
|             spec = {'fields': [spec]} | ||||
|         elif isinstance(spec, (list, tuple)): | ||||
|             spec = {'fields': list(spec)} | ||||
| @@ -775,8 +772,7 @@ class BaseDocument(object): | ||||
|         direction = None | ||||
|  | ||||
|         # Check to see if we need to include _cls | ||||
|         allow_inheritance = cls._meta.get('allow_inheritance', | ||||
|                                           ALLOW_INHERITANCE) | ||||
|         allow_inheritance = cls._meta.get('allow_inheritance') | ||||
|         include_cls = ( | ||||
|             allow_inheritance and | ||||
|             not spec.get('sparse', False) and | ||||
| @@ -786,7 +782,7 @@ class BaseDocument(object): | ||||
|  | ||||
|         # 733: don't include cls if index_cls is False unless there is an explicit cls with the index | ||||
|         include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True)) | ||||
|         if "cls" in spec: | ||||
|         if 'cls' in spec: | ||||
|             spec.pop('cls') | ||||
|         for key in spec['fields']: | ||||
|             # If inherited spec continue | ||||
| @@ -801,19 +797,19 @@ class BaseDocument(object): | ||||
|             # GEOHAYSTACK from ) | ||||
|             # GEO2D from * | ||||
|             direction = pymongo.ASCENDING | ||||
|             if key.startswith("-"): | ||||
|             if key.startswith('-'): | ||||
|                 direction = pymongo.DESCENDING | ||||
|             elif key.startswith("$"): | ||||
|             elif key.startswith('$'): | ||||
|                 direction = pymongo.TEXT | ||||
|             elif key.startswith("#"): | ||||
|             elif key.startswith('#'): | ||||
|                 direction = pymongo.HASHED | ||||
|             elif key.startswith("("): | ||||
|             elif key.startswith('('): | ||||
|                 direction = pymongo.GEOSPHERE | ||||
|             elif key.startswith(")"): | ||||
|             elif key.startswith(')'): | ||||
|                 direction = pymongo.GEOHAYSTACK | ||||
|             elif key.startswith("*"): | ||||
|             elif key.startswith('*'): | ||||
|                 direction = pymongo.GEO2D | ||||
|             if key.startswith(("+", "-", "*", "$", "#", "(", ")")): | ||||
|             if key.startswith(('+', '-', '*', '$', '#', '(', ')')): | ||||
|                 key = key[1:] | ||||
|  | ||||
|             # Use real field name, do it manually because we need field | ||||
| @@ -826,7 +822,7 @@ class BaseDocument(object): | ||||
|                 parts = [] | ||||
|                 for field in fields: | ||||
|                     try: | ||||
|                         if field != "_id": | ||||
|                         if field != '_id': | ||||
|                             field = field.db_field | ||||
|                     except AttributeError: | ||||
|                         pass | ||||
| @@ -845,49 +841,53 @@ class BaseDocument(object): | ||||
|         return spec | ||||
|  | ||||
|     @classmethod | ||||
|     def _unique_with_indexes(cls, namespace=""): | ||||
|         """ | ||||
|         Find and set unique indexes | ||||
|         """ | ||||
|     def _unique_with_indexes(cls, namespace=''): | ||||
|         """Find unique indexes in the document schema and return them.""" | ||||
|         unique_indexes = [] | ||||
|         for field_name, field in cls._fields.items(): | ||||
|             sparse = field.sparse | ||||
|  | ||||
|             # Generate a list of indexes needed by uniqueness constraints | ||||
|             if field.unique: | ||||
|                 unique_fields = [field.db_field] | ||||
|  | ||||
|                 # Add any unique_with fields to the back of the index spec | ||||
|                 if field.unique_with: | ||||
|                     if isinstance(field.unique_with, basestring): | ||||
|                     if isinstance(field.unique_with, six.string_types): | ||||
|                         field.unique_with = [field.unique_with] | ||||
|  | ||||
|                     # Convert unique_with field names to real field names | ||||
|                     unique_with = [] | ||||
|                     for other_name in field.unique_with: | ||||
|                         parts = other_name.split('.') | ||||
|  | ||||
|                         # Lookup real name | ||||
|                         parts = cls._lookup_field(parts) | ||||
|                         name_parts = [part.db_field for part in parts] | ||||
|                         unique_with.append('.'.join(name_parts)) | ||||
|  | ||||
|                         # Unique field should be required | ||||
|                         parts[-1].required = True | ||||
|                         sparse = (not sparse and | ||||
|                                   parts[-1].name not in cls.__dict__) | ||||
|  | ||||
|                     unique_fields += unique_with | ||||
|  | ||||
|                 # Add the new index to the list | ||||
|                 fields = [("%s%s" % (namespace, f), pymongo.ASCENDING) | ||||
|                           for f in unique_fields] | ||||
|                 fields = [ | ||||
|                     ('%s%s' % (namespace, f), pymongo.ASCENDING) | ||||
|                     for f in unique_fields | ||||
|                 ] | ||||
|                 index = {'fields': fields, 'unique': True, 'sparse': sparse} | ||||
|                 unique_indexes.append(index) | ||||
|  | ||||
|             if field.__class__.__name__ == "ListField": | ||||
|             if field.__class__.__name__ == 'ListField': | ||||
|                 field = field.field | ||||
|  | ||||
|             # Grab any embedded document field unique indexes | ||||
|             if (field.__class__.__name__ == "EmbeddedDocumentField" and | ||||
|             if (field.__class__.__name__ == 'EmbeddedDocumentField' and | ||||
|                     field.document_type != cls): | ||||
|                 field_namespace = "%s." % field_name | ||||
|                 field_namespace = '%s.' % field_name | ||||
|                 doc_cls = field.document_type | ||||
|                 unique_indexes += doc_cls._unique_with_indexes(field_namespace) | ||||
|  | ||||
| @@ -899,8 +899,9 @@ class BaseDocument(object): | ||||
|         geo_indices = [] | ||||
|         inspected.append(cls) | ||||
|  | ||||
|         geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField", | ||||
|                                 "PointField", "LineStringField", "PolygonField"] | ||||
|         geo_field_type_names = ('EmbeddedDocumentField', 'GeoPointField', | ||||
|                                 'PointField', 'LineStringField', | ||||
|                                 'PolygonField') | ||||
|  | ||||
|         geo_field_types = tuple([_import_class(field) | ||||
|                                  for field in geo_field_type_names]) | ||||
| @@ -908,32 +909,68 @@ class BaseDocument(object): | ||||
|         for field in cls._fields.values(): | ||||
|             if not isinstance(field, geo_field_types): | ||||
|                 continue | ||||
|  | ||||
|             if hasattr(field, 'document_type'): | ||||
|                 field_cls = field.document_type | ||||
|                 if field_cls in inspected: | ||||
|                     continue | ||||
|  | ||||
|                 if hasattr(field_cls, '_geo_indices'): | ||||
|                     geo_indices += field_cls._geo_indices( | ||||
|                         inspected, parent_field=field.db_field) | ||||
|             elif field._geo_index: | ||||
|                 field_name = field.db_field | ||||
|                 if parent_field: | ||||
|                     field_name = "%s.%s" % (parent_field, field_name) | ||||
|                 geo_indices.append({'fields': | ||||
|                                     [(field_name, field._geo_index)]}) | ||||
|                     field_name = '%s.%s' % (parent_field, field_name) | ||||
|                 geo_indices.append({ | ||||
|                     'fields': [(field_name, field._geo_index)] | ||||
|                 }) | ||||
|  | ||||
|         return geo_indices | ||||
|  | ||||
|     @classmethod | ||||
|     def _lookup_field(cls, parts): | ||||
|         """Lookup a field based on its attribute and return a list containing | ||||
|         the field's parents and the field. | ||||
|         """ | ||||
|         """Given the path to a given field, return a list containing | ||||
|         the Field object associated with that field and all of its parent | ||||
|         Field objects. | ||||
|  | ||||
|         ListField = _import_class("ListField") | ||||
|         Args: | ||||
|             parts (str, list, or tuple) - path to the field. Should be a | ||||
|             string for simple fields existing on this document or a list | ||||
|             of strings for a field that exists deeper in embedded documents. | ||||
|  | ||||
|         Returns: | ||||
|             A list of Field instances for fields that were found or | ||||
|             strings for sub-fields that weren't. | ||||
|  | ||||
|         Example: | ||||
|             >>> user._lookup_field('name') | ||||
|             [<mongoengine.fields.StringField at 0x1119bff50>] | ||||
|  | ||||
|             >>> user._lookup_field('roles') | ||||
|             [<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>] | ||||
|  | ||||
|             >>> user._lookup_field(['roles', 'role']) | ||||
|             [<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>, | ||||
|              <mongoengine.fields.StringField at 0x1119ec050>] | ||||
|  | ||||
|             >>> user._lookup_field('doesnt_exist') | ||||
|             raises LookUpError | ||||
|  | ||||
|             >>> user._lookup_field(['roles', 'doesnt_exist']) | ||||
|             [<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>, | ||||
|              'doesnt_exist'] | ||||
|  | ||||
|         """ | ||||
|         # TODO this method is WAY too complicated. Simplify it. | ||||
|         # TODO don't think returning a string for embedded non-existent fields is desired | ||||
|  | ||||
|         ListField = _import_class('ListField') | ||||
|         DynamicField = _import_class('DynamicField') | ||||
|  | ||||
|         if not isinstance(parts, (list, tuple)): | ||||
|             parts = [parts] | ||||
|  | ||||
|         fields = [] | ||||
|         field = None | ||||
|  | ||||
| @@ -943,16 +980,17 @@ class BaseDocument(object): | ||||
|                 fields.append(field_name) | ||||
|                 continue | ||||
|  | ||||
|             # Look up first field from the document | ||||
|             if field is None: | ||||
|                 # Look up first field from the document | ||||
|                 if field_name == 'pk': | ||||
|                     # Deal with "primary key" alias | ||||
|                     field_name = cls._meta['id_field'] | ||||
|  | ||||
|                 if field_name in cls._fields: | ||||
|                     field = cls._fields[field_name] | ||||
|                 elif cls._dynamic: | ||||
|                     field = DynamicField(db_field=field_name) | ||||
|                 elif cls._meta.get("allow_inheritance", False) or cls._meta.get("abstract", False): | ||||
|                 elif cls._meta.get('allow_inheritance') or cls._meta.get('abstract', False): | ||||
|                     # 744: in case the field is defined in a subclass | ||||
|                     for subcls in cls.__subclasses__(): | ||||
|                         try: | ||||
| @@ -965,35 +1003,55 @@ class BaseDocument(object): | ||||
|                     else: | ||||
|                         raise LookUpError('Cannot resolve field "%s"' % field_name) | ||||
|                 else: | ||||
|                     raise LookUpError('Cannot resolve field "%s"' | ||||
|                                       % field_name) | ||||
|                     raise LookUpError('Cannot resolve field "%s"' % field_name) | ||||
|             else: | ||||
|                 ReferenceField = _import_class('ReferenceField') | ||||
|                 GenericReferenceField = _import_class('GenericReferenceField') | ||||
|  | ||||
|                 # If previous field was a reference, throw an error (we | ||||
|                 # cannot look up fields that are on references). | ||||
|                 if isinstance(field, (ReferenceField, GenericReferenceField)): | ||||
|                     raise LookUpError('Cannot perform join in mongoDB: %s' % | ||||
|                                       '__'.join(parts)) | ||||
|  | ||||
|                 # If the parent field has a "field" attribute which has a | ||||
|                 # lookup_member method, call it to find the field | ||||
|                 # corresponding to this iteration. | ||||
|                 if hasattr(getattr(field, 'field', None), 'lookup_member'): | ||||
|                     new_field = field.field.lookup_member(field_name) | ||||
|  | ||||
|                 # If the parent field is a DynamicField or if it's part of | ||||
|                 # a DynamicDocument, mark current field as a DynamicField | ||||
|                 # with db_name equal to the field name. | ||||
|                 elif cls._dynamic and (isinstance(field, DynamicField) or | ||||
|                                        getattr(getattr(field, 'document_type', None), '_dynamic', None)): | ||||
|                     new_field = DynamicField(db_field=field_name) | ||||
|  | ||||
|                 # Else, try to use the parent field's lookup_member method | ||||
|                 # to find the subfield. | ||||
|                 elif hasattr(field, 'lookup_member'): | ||||
|                     new_field = field.lookup_member(field_name) | ||||
|  | ||||
|                 # Raise a LookUpError if all the other conditions failed. | ||||
|                 else: | ||||
|                     # Look up subfield on the previous field or raise | ||||
|                     try: | ||||
|                         new_field = field.lookup_member(field_name) | ||||
|                     except AttributeError: | ||||
|                         raise LookUpError('Cannot resolve subfield or operator {} ' | ||||
|                                           'on the field {}'.format( | ||||
|                                               field_name, field.name)) | ||||
|                     raise LookUpError( | ||||
|                         'Cannot resolve subfield or operator {} ' | ||||
|                         'on the field {}'.format(field_name, field.name) | ||||
|                     ) | ||||
|  | ||||
|                 # If current field still wasn't found and the parent field | ||||
|                 # is a ComplexBaseField, add the name current field name and | ||||
|                 # move on. | ||||
|                 if not new_field and isinstance(field, ComplexBaseField): | ||||
|                     fields.append(field_name) | ||||
|                     continue | ||||
|                 elif not new_field: | ||||
|                     raise LookUpError('Cannot resolve field "%s"' | ||||
|                                       % field_name) | ||||
|                     raise LookUpError('Cannot resolve field "%s"' % field_name) | ||||
|  | ||||
|                 field = new_field  # update field to the new field type | ||||
|  | ||||
|             fields.append(field) | ||||
|  | ||||
|         return fields | ||||
|  | ||||
|     @classmethod | ||||
| @@ -1005,19 +1063,18 @@ class BaseDocument(object): | ||||
|         return '.'.join(parts) | ||||
|  | ||||
|     def __set_field_display(self): | ||||
|         """Dynamically set the display value for a field with choices""" | ||||
|         for attr_name, field in self._fields.items(): | ||||
|             if field.choices: | ||||
|                 if self._dynamic: | ||||
|                     obj = self | ||||
|                 else: | ||||
|                     obj = type(self) | ||||
|                 setattr(obj, | ||||
|                         'get_%s_display' % attr_name, | ||||
|                         partial(self.__get_field_display, field=field)) | ||||
|         """For each field that specifies choices, create a | ||||
|         get_<field>_display method. | ||||
|         """ | ||||
|         fields_with_choices = [(n, f) for n, f in self._fields.items() | ||||
|                                if f.choices] | ||||
|         for attr_name, field in fields_with_choices: | ||||
|             setattr(self, | ||||
|                     'get_%s_display' % attr_name, | ||||
|                     partial(self.__get_field_display, field=field)) | ||||
|  | ||||
|     def __get_field_display(self, field): | ||||
|         """Returns the display value for a choice field""" | ||||
|         """Return the display value for a choice field""" | ||||
|         value = getattr(self, field.name) | ||||
|         if field.choices and isinstance(field.choices[0], (list, tuple)): | ||||
|             return dict(field.choices).get(value, value) | ||||
|   | ||||
| @@ -4,21 +4,17 @@ import weakref | ||||
|  | ||||
| from bson import DBRef, ObjectId, SON | ||||
| import pymongo | ||||
| import six | ||||
|  | ||||
| from mongoengine.base.common import ALLOW_INHERITANCE | ||||
| from mongoengine.base.datastructures import ( | ||||
|     BaseDict, BaseList, EmbeddedDocumentList | ||||
| ) | ||||
| from mongoengine.base.common import UPDATE_OPERATORS | ||||
| from mongoengine.base.datastructures import (BaseDict, BaseList, | ||||
|                                              EmbeddedDocumentList) | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import ValidationError | ||||
|  | ||||
| __all__ = ("BaseField", "ComplexBaseField", | ||||
|            "ObjectIdField", "GeoJsonBaseField") | ||||
|  | ||||
|  | ||||
| UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push', | ||||
|                         'push_all', 'pull', 'pull_all', 'add_to_set', | ||||
|                         'set_on_insert', 'min', 'max']) | ||||
| __all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField', | ||||
|            'GeoJsonBaseField') | ||||
|  | ||||
|  | ||||
| class BaseField(object): | ||||
| @@ -27,7 +23,6 @@ class BaseField(object): | ||||
|  | ||||
|     .. versionchanged:: 0.5 - added verbose and help text | ||||
|     """ | ||||
|  | ||||
|     name = None | ||||
|     _geo_index = False | ||||
|     _auto_gen = False  # Call `generate` to generate a value | ||||
| @@ -73,7 +68,7 @@ class BaseField(object): | ||||
|         self.db_field = (db_field or name) if not primary_key else '_id' | ||||
|  | ||||
|         if name: | ||||
|             msg = "Fields' 'name' attribute deprecated in favour of 'db_field'" | ||||
|             msg = 'Field\'s "name" attribute deprecated in favour of "db_field"' | ||||
|             warnings.warn(msg, DeprecationWarning) | ||||
|         self.required = required or primary_key | ||||
|         self.default = default | ||||
| @@ -89,7 +84,7 @@ class BaseField(object): | ||||
|         # Detect and report conflicts between metadata and base properties. | ||||
|         conflicts = set(dir(self)) & set(kwargs) | ||||
|         if conflicts: | ||||
|             raise TypeError("%s already has attribute(s): %s" % ( | ||||
|             raise TypeError('%s already has attribute(s): %s' % ( | ||||
|                 self.__class__.__name__, ', '.join(conflicts))) | ||||
|  | ||||
|         # Assign metadata to the instance | ||||
| @@ -147,25 +142,21 @@ class BaseField(object): | ||||
|                     v._instance = weakref.proxy(instance) | ||||
|         instance._data[self.name] = value | ||||
|  | ||||
|     def error(self, message="", errors=None, field_name=None): | ||||
|         """Raises a ValidationError. | ||||
|         """ | ||||
|     def error(self, message='', errors=None, field_name=None): | ||||
|         """Raise a ValidationError.""" | ||||
|         field_name = field_name if field_name else self.name | ||||
|         raise ValidationError(message, errors=errors, field_name=field_name) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         """Convert a MongoDB-compatible type to a Python type. | ||||
|         """ | ||||
|         """Convert a MongoDB-compatible type to a Python type.""" | ||||
|         return value | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         """Convert a Python type to a MongoDB-compatible type. | ||||
|         """ | ||||
|         """Convert a Python type to a MongoDB-compatible type.""" | ||||
|         return self.to_python(value) | ||||
|  | ||||
|     def _to_mongo_safe_call(self, value, use_db_field=True, fields=None): | ||||
|         """A helper method to call to_mongo with proper inputs | ||||
|         """ | ||||
|         """Helper method to call to_mongo with proper inputs.""" | ||||
|         f_inputs = self.to_mongo.__code__.co_varnames | ||||
|         ex_vars = {} | ||||
|         if 'fields' in f_inputs: | ||||
| @@ -177,15 +168,13 @@ class BaseField(object): | ||||
|         return self.to_mongo(value, **ex_vars) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         """Prepare a value that is being used in a query for PyMongo. | ||||
|         """ | ||||
|         """Prepare a value that is being used in a query for PyMongo.""" | ||||
|         if op in UPDATE_OPERATORS: | ||||
|             self.validate(value) | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value, clean=True): | ||||
|         """Perform validation on a value. | ||||
|         """ | ||||
|         """Perform validation on a value.""" | ||||
|         pass | ||||
|  | ||||
|     def _validate_choices(self, value): | ||||
| @@ -200,11 +189,13 @@ class BaseField(object): | ||||
|         if isinstance(value, (Document, EmbeddedDocument)): | ||||
|             if not any(isinstance(value, c) for c in choice_list): | ||||
|                 self.error( | ||||
|                     'Value must be instance of %s' % unicode(choice_list) | ||||
|                     'Value must be an instance of %s' % ( | ||||
|                         six.text_type(choice_list) | ||||
|                     ) | ||||
|                 ) | ||||
|         # Choices which are types other than Documents | ||||
|         elif value not in choice_list: | ||||
|             self.error('Value must be one of %s' % unicode(choice_list)) | ||||
|             self.error('Value must be one of %s' % six.text_type(choice_list)) | ||||
|  | ||||
|     def _validate(self, value, **kwargs): | ||||
|         # Check the Choices Constraint | ||||
| @@ -247,8 +238,7 @@ class ComplexBaseField(BaseField): | ||||
|     field = None | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         """Descriptor to automatically dereference references. | ||||
|         """ | ||||
|         """Descriptor to automatically dereference references.""" | ||||
|         if instance is None: | ||||
|             # Document class being used rather than a document object | ||||
|             return self | ||||
| @@ -260,7 +250,7 @@ class ComplexBaseField(BaseField): | ||||
|                        (self.field is None or isinstance(self.field, | ||||
|                                                          (GenericReferenceField, ReferenceField)))) | ||||
|  | ||||
|         _dereference = _import_class("DeReference")() | ||||
|         _dereference = _import_class('DeReference')() | ||||
|  | ||||
|         self._auto_dereference = instance._fields[self.name]._auto_dereference | ||||
|         if instance._initialised and dereference and instance._data.get(self.name): | ||||
| @@ -295,9 +285,8 @@ class ComplexBaseField(BaseField): | ||||
|         return value | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         """Convert a MongoDB-compatible type to a Python type. | ||||
|         """ | ||||
|         if isinstance(value, basestring): | ||||
|         """Convert a MongoDB-compatible type to a Python type.""" | ||||
|         if isinstance(value, six.string_types): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, 'to_python'): | ||||
| @@ -307,14 +296,14 @@ class ComplexBaseField(BaseField): | ||||
|         if not hasattr(value, 'items'): | ||||
|             try: | ||||
|                 is_list = True | ||||
|                 value = dict([(k, v) for k, v in enumerate(value)]) | ||||
|                 value = {k: v for k, v in enumerate(value)} | ||||
|             except TypeError:  # Not iterable return the value | ||||
|                 return value | ||||
|  | ||||
|         if self.field: | ||||
|             self.field._auto_dereference = self._auto_dereference | ||||
|             value_dict = dict([(key, self.field.to_python(item)) | ||||
|                                for key, item in value.items()]) | ||||
|             value_dict = {key: self.field.to_python(item) | ||||
|                           for key, item in value.items()} | ||||
|         else: | ||||
|             Document = _import_class('Document') | ||||
|             value_dict = {} | ||||
| @@ -337,13 +326,12 @@ class ComplexBaseField(BaseField): | ||||
|         return value_dict | ||||
|  | ||||
|     def to_mongo(self, value, use_db_field=True, fields=None): | ||||
|         """Convert a Python type to a MongoDB-compatible type. | ||||
|         """ | ||||
|         Document = _import_class("Document") | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         GenericReferenceField = _import_class("GenericReferenceField") | ||||
|         """Convert a Python type to a MongoDB-compatible type.""" | ||||
|         Document = _import_class('Document') | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         GenericReferenceField = _import_class('GenericReferenceField') | ||||
|  | ||||
|         if isinstance(value, basestring): | ||||
|         if isinstance(value, six.string_types): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, 'to_mongo'): | ||||
| @@ -360,13 +348,15 @@ class ComplexBaseField(BaseField): | ||||
|         if not hasattr(value, 'items'): | ||||
|             try: | ||||
|                 is_list = True | ||||
|                 value = dict([(k, v) for k, v in enumerate(value)]) | ||||
|                 value = {k: v for k, v in enumerate(value)} | ||||
|             except TypeError:  # Not iterable return the value | ||||
|                 return value | ||||
|  | ||||
|         if self.field: | ||||
|             value_dict = dict([(key, self.field._to_mongo_safe_call(item, use_db_field, fields)) | ||||
|                                for key, item in value.iteritems()]) | ||||
|             value_dict = { | ||||
|                 key: self.field._to_mongo_safe_call(item, use_db_field, fields) | ||||
|                 for key, item in value.iteritems() | ||||
|             } | ||||
|         else: | ||||
|             value_dict = {} | ||||
|             for k, v in value.iteritems(): | ||||
| @@ -380,9 +370,7 @@ class ComplexBaseField(BaseField): | ||||
|                     # any _cls data so make it a generic reference allows | ||||
|                     # us to dereference | ||||
|                     meta = getattr(v, '_meta', {}) | ||||
|                     allow_inheritance = ( | ||||
|                         meta.get('allow_inheritance', ALLOW_INHERITANCE) | ||||
|                         is True) | ||||
|                     allow_inheritance = meta.get('allow_inheritance') | ||||
|                     if not allow_inheritance and not self.field: | ||||
|                         value_dict[k] = GenericReferenceField().to_mongo(v) | ||||
|                     else: | ||||
| @@ -404,8 +392,7 @@ class ComplexBaseField(BaseField): | ||||
|         return value_dict | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """If field is provided ensure the value is valid. | ||||
|         """ | ||||
|         """If field is provided ensure the value is valid.""" | ||||
|         errors = {} | ||||
|         if self.field: | ||||
|             if hasattr(value, 'iteritems') or hasattr(value, 'items'): | ||||
| @@ -415,9 +402,9 @@ class ComplexBaseField(BaseField): | ||||
|             for k, v in sequence: | ||||
|                 try: | ||||
|                     self.field._validate(v) | ||||
|                 except ValidationError, error: | ||||
|                 except ValidationError as error: | ||||
|                     errors[k] = error.errors or error | ||||
|                 except (ValueError, AssertionError), error: | ||||
|                 except (ValueError, AssertionError) as error: | ||||
|                     errors[k] = error | ||||
|  | ||||
|             if errors: | ||||
| @@ -443,8 +430,7 @@ class ComplexBaseField(BaseField): | ||||
|  | ||||
|  | ||||
| class ObjectIdField(BaseField): | ||||
|     """A field wrapper around MongoDB's ObjectIds. | ||||
|     """ | ||||
|     """A field wrapper around MongoDB's ObjectIds.""" | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         try: | ||||
| @@ -457,10 +443,10 @@ class ObjectIdField(BaseField): | ||||
|     def to_mongo(self, value): | ||||
|         if not isinstance(value, ObjectId): | ||||
|             try: | ||||
|                 return ObjectId(unicode(value)) | ||||
|             except Exception, e: | ||||
|                 return ObjectId(six.text_type(value)) | ||||
|             except Exception as e: | ||||
|                 # e.message attribute has been deprecated since Python 2.6 | ||||
|                 self.error(unicode(e)) | ||||
|                 self.error(six.text_type(e)) | ||||
|         return value | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
| @@ -468,7 +454,7 @@ class ObjectIdField(BaseField): | ||||
|  | ||||
|     def validate(self, value): | ||||
|         try: | ||||
|             ObjectId(unicode(value)) | ||||
|             ObjectId(six.text_type(value)) | ||||
|         except Exception: | ||||
|             self.error('Invalid Object ID') | ||||
|  | ||||
| @@ -480,21 +466,20 @@ class GeoJsonBaseField(BaseField): | ||||
|     """ | ||||
|  | ||||
|     _geo_index = pymongo.GEOSPHERE | ||||
|     _type = "GeoBase" | ||||
|     _type = 'GeoBase' | ||||
|  | ||||
|     def __init__(self, auto_index=True, *args, **kwargs): | ||||
|         """ | ||||
|         :param bool auto_index: Automatically create a "2dsphere" index.\ | ||||
|         :param bool auto_index: Automatically create a '2dsphere' index.\ | ||||
|             Defaults to `True`. | ||||
|         """ | ||||
|         self._name = "%sField" % self._type | ||||
|         self._name = '%sField' % self._type | ||||
|         if not auto_index: | ||||
|             self._geo_index = False | ||||
|         super(GeoJsonBaseField, self).__init__(*args, **kwargs) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Validate the GeoJson object based on its type | ||||
|         """ | ||||
|         """Validate the GeoJson object based on its type.""" | ||||
|         if isinstance(value, dict): | ||||
|             if set(value.keys()) == set(['type', 'coordinates']): | ||||
|                 if value['type'] != self._type: | ||||
| @@ -509,7 +494,7 @@ class GeoJsonBaseField(BaseField): | ||||
|             self.error('%s can only accept lists of [x, y]' % self._name) | ||||
|             return | ||||
|  | ||||
|         validate = getattr(self, "_validate_%s" % self._type.lower()) | ||||
|         validate = getattr(self, '_validate_%s' % self._type.lower()) | ||||
|         error = validate(value) | ||||
|         if error: | ||||
|             self.error(error) | ||||
| @@ -522,7 +507,7 @@ class GeoJsonBaseField(BaseField): | ||||
|         try: | ||||
|             value[0][0][0] | ||||
|         except (TypeError, IndexError): | ||||
|             return "Invalid Polygon must contain at least one valid linestring" | ||||
|             return 'Invalid Polygon must contain at least one valid linestring' | ||||
|  | ||||
|         errors = [] | ||||
|         for val in value: | ||||
| @@ -533,12 +518,12 @@ class GeoJsonBaseField(BaseField): | ||||
|                 errors.append(error) | ||||
|         if errors: | ||||
|             if top_level: | ||||
|                 return "Invalid Polygon:\n%s" % ", ".join(errors) | ||||
|                 return 'Invalid Polygon:\n%s' % ', '.join(errors) | ||||
|             else: | ||||
|                 return "%s" % ", ".join(errors) | ||||
|                 return '%s' % ', '.join(errors) | ||||
|  | ||||
|     def _validate_linestring(self, value, top_level=True): | ||||
|         """Validates a linestring""" | ||||
|         """Validate a linestring.""" | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return 'LineStrings must contain list of coordinate pairs' | ||||
|  | ||||
| @@ -546,7 +531,7 @@ class GeoJsonBaseField(BaseField): | ||||
|         try: | ||||
|             value[0][0] | ||||
|         except (TypeError, IndexError): | ||||
|             return "Invalid LineString must contain at least one valid point" | ||||
|             return 'Invalid LineString must contain at least one valid point' | ||||
|  | ||||
|         errors = [] | ||||
|         for val in value: | ||||
| @@ -555,19 +540,19 @@ class GeoJsonBaseField(BaseField): | ||||
|                 errors.append(error) | ||||
|         if errors: | ||||
|             if top_level: | ||||
|                 return "Invalid LineString:\n%s" % ", ".join(errors) | ||||
|                 return 'Invalid LineString:\n%s' % ', '.join(errors) | ||||
|             else: | ||||
|                 return "%s" % ", ".join(errors) | ||||
|                 return '%s' % ', '.join(errors) | ||||
|  | ||||
|     def _validate_point(self, value): | ||||
|         """Validate each set of coords""" | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return 'Points must be a list of coordinate pairs' | ||||
|         elif not len(value) == 2: | ||||
|             return "Value (%s) must be a two-dimensional point" % repr(value) | ||||
|             return 'Value (%s) must be a two-dimensional point' % repr(value) | ||||
|         elif (not isinstance(value[0], (float, int)) or | ||||
|               not isinstance(value[1], (float, int))): | ||||
|             return "Both values (%s) in point must be float or int" % repr(value) | ||||
|             return 'Both values (%s) in point must be float or int' % repr(value) | ||||
|  | ||||
|     def _validate_multipoint(self, value): | ||||
|         if not isinstance(value, (list, tuple)): | ||||
| @@ -577,7 +562,7 @@ class GeoJsonBaseField(BaseField): | ||||
|         try: | ||||
|             value[0][0] | ||||
|         except (TypeError, IndexError): | ||||
|             return "Invalid MultiPoint must contain at least one valid point" | ||||
|             return 'Invalid MultiPoint must contain at least one valid point' | ||||
|  | ||||
|         errors = [] | ||||
|         for point in value: | ||||
| @@ -586,7 +571,7 @@ class GeoJsonBaseField(BaseField): | ||||
|                 errors.append(error) | ||||
|  | ||||
|         if errors: | ||||
|             return "%s" % ", ".join(errors) | ||||
|             return '%s' % ', '.join(errors) | ||||
|  | ||||
|     def _validate_multilinestring(self, value, top_level=True): | ||||
|         if not isinstance(value, (list, tuple)): | ||||
| @@ -596,7 +581,7 @@ class GeoJsonBaseField(BaseField): | ||||
|         try: | ||||
|             value[0][0][0] | ||||
|         except (TypeError, IndexError): | ||||
|             return "Invalid MultiLineString must contain at least one valid linestring" | ||||
|             return 'Invalid MultiLineString must contain at least one valid linestring' | ||||
|  | ||||
|         errors = [] | ||||
|         for linestring in value: | ||||
| @@ -606,9 +591,9 @@ class GeoJsonBaseField(BaseField): | ||||
|  | ||||
|         if errors: | ||||
|             if top_level: | ||||
|                 return "Invalid MultiLineString:\n%s" % ", ".join(errors) | ||||
|                 return 'Invalid MultiLineString:\n%s' % ', '.join(errors) | ||||
|             else: | ||||
|                 return "%s" % ", ".join(errors) | ||||
|                 return '%s' % ', '.join(errors) | ||||
|  | ||||
|     def _validate_multipolygon(self, value): | ||||
|         if not isinstance(value, (list, tuple)): | ||||
| @@ -618,7 +603,7 @@ class GeoJsonBaseField(BaseField): | ||||
|         try: | ||||
|             value[0][0][0][0] | ||||
|         except (TypeError, IndexError): | ||||
|             return "Invalid MultiPolygon must contain at least one valid Polygon" | ||||
|             return 'Invalid MultiPolygon must contain at least one valid Polygon' | ||||
|  | ||||
|         errors = [] | ||||
|         for polygon in value: | ||||
| @@ -627,9 +612,9 @@ class GeoJsonBaseField(BaseField): | ||||
|                 errors.append(error) | ||||
|  | ||||
|         if errors: | ||||
|             return "Invalid MultiPolygon:\n%s" % ", ".join(errors) | ||||
|             return 'Invalid MultiPolygon:\n%s' % ', '.join(errors) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if isinstance(value, dict): | ||||
|             return value | ||||
|         return SON([("type", self._type), ("coordinates", value)]) | ||||
|         return SON([('type', self._type), ('coordinates', value)]) | ||||
|   | ||||
| @@ -1,10 +1,11 @@ | ||||
| import warnings | ||||
|  | ||||
| from mongoengine.base.common import ALLOW_INHERITANCE, _document_registry | ||||
| import six | ||||
|  | ||||
| from mongoengine.base.common import _document_registry | ||||
| from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import InvalidDocumentError | ||||
| from mongoengine.python_support import PY3 | ||||
| from mongoengine.queryset import (DO_NOTHING, DoesNotExist, | ||||
|                                   MultipleObjectsReturned, | ||||
|                                   QuerySetManager) | ||||
| @@ -45,7 +46,8 @@ class DocumentMetaclass(type): | ||||
|             attrs['_meta'] = meta | ||||
|             attrs['_meta']['abstract'] = False  # 789: EmbeddedDocument shouldn't inherit abstract | ||||
|  | ||||
|         if attrs['_meta'].get('allow_inheritance', ALLOW_INHERITANCE): | ||||
|         # If allow_inheritance is True, add a "_cls" string field to the attrs | ||||
|         if attrs['_meta'].get('allow_inheritance'): | ||||
|             StringField = _import_class('StringField') | ||||
|             attrs['_cls'] = StringField() | ||||
|  | ||||
| @@ -87,16 +89,17 @@ class DocumentMetaclass(type): | ||||
|         # Ensure no duplicate db_fields | ||||
|         duplicate_db_fields = [k for k, v in field_names.items() if v > 1] | ||||
|         if duplicate_db_fields: | ||||
|             msg = ("Multiple db_fields defined for: %s " % | ||||
|                    ", ".join(duplicate_db_fields)) | ||||
|             msg = ('Multiple db_fields defined for: %s ' % | ||||
|                    ', '.join(duplicate_db_fields)) | ||||
|             raise InvalidDocumentError(msg) | ||||
|  | ||||
|         # Set _fields and db_field maps | ||||
|         attrs['_fields'] = doc_fields | ||||
|         attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k)) | ||||
|                                        for k, v in doc_fields.iteritems()]) | ||||
|         attrs['_reverse_db_field_map'] = dict( | ||||
|             (v, k) for k, v in attrs['_db_field_map'].iteritems()) | ||||
|         attrs['_db_field_map'] = {k: getattr(v, 'db_field', k) | ||||
|                                   for k, v in doc_fields.items()} | ||||
|         attrs['_reverse_db_field_map'] = { | ||||
|             v: k for k, v in attrs['_db_field_map'].items() | ||||
|         } | ||||
|  | ||||
|         attrs['_fields_ordered'] = tuple(i[1] for i in sorted( | ||||
|                                          (v.creation_counter, v.name) | ||||
| @@ -116,10 +119,8 @@ class DocumentMetaclass(type): | ||||
|             if hasattr(base, '_meta'): | ||||
|                 # Warn if allow_inheritance isn't set and prevent | ||||
|                 # inheritance of classes where inheritance is set to False | ||||
|                 allow_inheritance = base._meta.get('allow_inheritance', | ||||
|                                                    ALLOW_INHERITANCE) | ||||
|                 if (allow_inheritance is not True and | ||||
|                         not base._meta.get('abstract')): | ||||
|                 allow_inheritance = base._meta.get('allow_inheritance') | ||||
|                 if not allow_inheritance and not base._meta.get('abstract'): | ||||
|                     raise ValueError('Document %s may not be subclassed' % | ||||
|                                      base.__name__) | ||||
|  | ||||
| @@ -161,7 +162,7 @@ class DocumentMetaclass(type): | ||||
|         # module continues to use im_func and im_self, so the code below | ||||
|         # copies __func__ into im_func and __self__ into im_self for | ||||
|         # classmethod objects in Document derived classes. | ||||
|         if PY3: | ||||
|         if six.PY3: | ||||
|             for val in new_class.__dict__.values(): | ||||
|                 if isinstance(val, classmethod): | ||||
|                     f = val.__get__(new_class) | ||||
| @@ -179,11 +180,11 @@ class DocumentMetaclass(type): | ||||
|             if isinstance(f, CachedReferenceField): | ||||
|  | ||||
|                 if issubclass(new_class, EmbeddedDocument): | ||||
|                     raise InvalidDocumentError( | ||||
|                         "CachedReferenceFields is not allowed in EmbeddedDocuments") | ||||
|                     raise InvalidDocumentError('CachedReferenceFields is not ' | ||||
|                                                'allowed in EmbeddedDocuments') | ||||
|                 if not f.document_type: | ||||
|                     raise InvalidDocumentError( | ||||
|                         "Document is not available to sync") | ||||
|                         'Document is not available to sync') | ||||
|  | ||||
|                 if f.auto_sync: | ||||
|                     f.start_listener() | ||||
| @@ -195,8 +196,8 @@ class DocumentMetaclass(type): | ||||
|                                       'reverse_delete_rule', | ||||
|                                       DO_NOTHING) | ||||
|                 if isinstance(f, DictField) and delete_rule != DO_NOTHING: | ||||
|                     msg = ("Reverse delete rules are not supported " | ||||
|                            "for %s (field: %s)" % | ||||
|                     msg = ('Reverse delete rules are not supported ' | ||||
|                            'for %s (field: %s)' % | ||||
|                            (field.__class__.__name__, field.name)) | ||||
|                     raise InvalidDocumentError(msg) | ||||
|  | ||||
| @@ -204,16 +205,16 @@ class DocumentMetaclass(type): | ||||
|  | ||||
|             if delete_rule != DO_NOTHING: | ||||
|                 if issubclass(new_class, EmbeddedDocument): | ||||
|                     msg = ("Reverse delete rules are not supported for " | ||||
|                            "EmbeddedDocuments (field: %s)" % field.name) | ||||
|                     msg = ('Reverse delete rules are not supported for ' | ||||
|                            'EmbeddedDocuments (field: %s)' % field.name) | ||||
|                     raise InvalidDocumentError(msg) | ||||
|                 f.document_type.register_delete_rule(new_class, | ||||
|                                                      field.name, delete_rule) | ||||
|  | ||||
|             if (field.name and hasattr(Document, field.name) and | ||||
|                     EmbeddedDocument not in new_class.mro()): | ||||
|                 msg = ("%s is a document method and not a valid " | ||||
|                        "field name" % field.name) | ||||
|                 msg = ('%s is a document method and not a valid ' | ||||
|                        'field name' % field.name) | ||||
|                 raise InvalidDocumentError(msg) | ||||
|  | ||||
|         return new_class | ||||
| @@ -271,6 +272,11 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|                 'index_drop_dups': False, | ||||
|                 'index_opts': None, | ||||
|                 'delete_rules': None, | ||||
|  | ||||
|                 # allow_inheritance can be True, False, and None. True means | ||||
|                 # "allow inheritance", False means "don't allow inheritance", | ||||
|                 # None means "do whatever your parent does, or don't allow | ||||
|                 # inheritance if you're a top-level class". | ||||
|                 'allow_inheritance': None, | ||||
|             } | ||||
|             attrs['_is_base_cls'] = True | ||||
| @@ -303,7 +309,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|         # If parent wasn't an abstract class | ||||
|         if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and | ||||
|                 not parent_doc_cls._meta.get('abstract', True)): | ||||
|             msg = "Trying to set a collection on a subclass (%s)" % name | ||||
|             msg = 'Trying to set a collection on a subclass (%s)' % name | ||||
|             warnings.warn(msg, SyntaxWarning) | ||||
|             del attrs['_meta']['collection'] | ||||
|  | ||||
| @@ -311,7 +317,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|         if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'): | ||||
|             if (parent_doc_cls and | ||||
|                     not parent_doc_cls._meta.get('abstract', False)): | ||||
|                 msg = "Abstract document cannot have non-abstract base" | ||||
|                 msg = 'Abstract document cannot have non-abstract base' | ||||
|                 raise ValueError(msg) | ||||
|             return super_new(cls, name, bases, attrs) | ||||
|  | ||||
| @@ -334,12 +340,16 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|  | ||||
|         meta.merge(attrs.get('_meta', {}))  # Top level meta | ||||
|  | ||||
|         # Only simple classes (direct subclasses of Document) | ||||
|         # may set allow_inheritance to False | ||||
|         # Only simple classes (i.e. direct subclasses of Document) may set | ||||
|         # allow_inheritance to False. If the base Document allows inheritance, | ||||
|         # none of its subclasses can override allow_inheritance to False. | ||||
|         simple_class = all([b._meta.get('abstract') | ||||
|                             for b in flattened_bases if hasattr(b, '_meta')]) | ||||
|         if (not simple_class and meta['allow_inheritance'] is False and | ||||
|                 not meta['abstract']): | ||||
|         if ( | ||||
|             not simple_class and | ||||
|             meta['allow_inheritance'] is False and | ||||
|             not meta['abstract'] | ||||
|         ): | ||||
|             raise ValueError('Only direct subclasses of Document may set ' | ||||
|                              '"allow_inheritance" to False') | ||||
|  | ||||
|   | ||||
| @@ -1,11 +1,14 @@ | ||||
| from pymongo import MongoClient, ReadPreference, uri_parser | ||||
| from mongoengine.python_support import (IS_PYMONGO_3, str_types) | ||||
| import six | ||||
|  | ||||
| __all__ = ['ConnectionError', 'connect', 'register_connection', | ||||
| from mongoengine.python_support import IS_PYMONGO_3 | ||||
|  | ||||
| __all__ = ['MongoEngineConnectionError', 'connect', 'register_connection', | ||||
|            'DEFAULT_CONNECTION_NAME'] | ||||
|  | ||||
|  | ||||
| DEFAULT_CONNECTION_NAME = 'default' | ||||
|  | ||||
| if IS_PYMONGO_3: | ||||
|     READ_PREFERENCE = ReadPreference.PRIMARY | ||||
| else: | ||||
| @@ -13,7 +16,10 @@ else: | ||||
|     READ_PREFERENCE = False | ||||
|  | ||||
|  | ||||
| class ConnectionError(Exception): | ||||
| class MongoEngineConnectionError(Exception): | ||||
|     """Error raised when the database connection can't be established or | ||||
|     when a connection with a requested alias can't be retrieved. | ||||
|     """ | ||||
|     pass | ||||
|  | ||||
|  | ||||
| @@ -24,7 +30,9 @@ _dbs = {} | ||||
|  | ||||
| def register_connection(alias, name=None, host=None, port=None, | ||||
|                         read_preference=READ_PREFERENCE, | ||||
|                         username=None, password=None, authentication_source=None, | ||||
|                         username=None, password=None, | ||||
|                         authentication_source=None, | ||||
|                         authentication_mechanism=None, | ||||
|                         **kwargs): | ||||
|     """Add a connection. | ||||
|  | ||||
| @@ -38,14 +46,15 @@ def register_connection(alias, name=None, host=None, port=None, | ||||
|     :param username: username to authenticate with | ||||
|     :param password: password to authenticate with | ||||
|     :param authentication_source: database to authenticate against | ||||
|     :param authentication_mechanism: database authentication mechanisms. | ||||
|         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, | ||||
|         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. | ||||
|     :param is_mock: explicitly use mongomock for this connection | ||||
|         (can also be done by using `mongomock://` as db host prefix) | ||||
|     :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver | ||||
|  | ||||
|     .. versionchanged:: 0.10.6 - added mongomock support | ||||
|     """ | ||||
|     global _connection_settings | ||||
|  | ||||
|     conn_settings = { | ||||
|         'name': name or 'test', | ||||
|         'host': host or 'localhost', | ||||
| @@ -53,35 +62,45 @@ def register_connection(alias, name=None, host=None, port=None, | ||||
|         'read_preference': read_preference, | ||||
|         'username': username, | ||||
|         'password': password, | ||||
|         'authentication_source': authentication_source | ||||
|         'authentication_source': authentication_source, | ||||
|         'authentication_mechanism': authentication_mechanism | ||||
|     } | ||||
|  | ||||
|     conn_host = conn_settings['host'] | ||||
|     # host can be a list or a string, so if string, force to a list | ||||
|     if isinstance(conn_host, str_types): | ||||
|  | ||||
|     # Host can be a list or a string, so if string, force to a list. | ||||
|     if isinstance(conn_host, six.string_types): | ||||
|         conn_host = [conn_host] | ||||
|  | ||||
|     resolved_hosts = [] | ||||
|     for entity in conn_host: | ||||
|         # Handle uri style connections | ||||
|  | ||||
|         # Handle Mongomock | ||||
|         if entity.startswith('mongomock://'): | ||||
|             conn_settings['is_mock'] = True | ||||
|             # `mongomock://` is not a valid url prefix and must be replaced by `mongodb://` | ||||
|             resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1)) | ||||
|  | ||||
|         # Handle URI style connections, only updating connection params which | ||||
|         # were explicitly specified in the URI. | ||||
|         elif '://' in entity: | ||||
|             uri_dict = uri_parser.parse_uri(entity) | ||||
|             resolved_hosts.append(entity) | ||||
|             conn_settings.update({ | ||||
|                 'name': uri_dict.get('database') or name, | ||||
|                 'username': uri_dict.get('username'), | ||||
|                 'password': uri_dict.get('password'), | ||||
|                 'read_preference': read_preference, | ||||
|             }) | ||||
|  | ||||
|             if uri_dict.get('database'): | ||||
|                 conn_settings['name'] = uri_dict.get('database') | ||||
|  | ||||
|             for param in ('read_preference', 'username', 'password'): | ||||
|                 if uri_dict.get(param): | ||||
|                     conn_settings[param] = uri_dict[param] | ||||
|  | ||||
|             uri_options = uri_dict['options'] | ||||
|             if 'replicaset' in uri_options: | ||||
|                 conn_settings['replicaSet'] = True | ||||
|             if 'authsource' in uri_options: | ||||
|                 conn_settings['authentication_source'] = uri_options['authsource'] | ||||
|             if 'authmechanism' in uri_options: | ||||
|                 conn_settings['authentication_mechanism'] = uri_options['authmechanism'] | ||||
|         else: | ||||
|             resolved_hosts.append(entity) | ||||
|     conn_settings['host'] = resolved_hosts | ||||
| @@ -95,9 +114,7 @@ def register_connection(alias, name=None, host=None, port=None, | ||||
|  | ||||
|  | ||||
| def disconnect(alias=DEFAULT_CONNECTION_NAME): | ||||
|     global _connections | ||||
|     global _dbs | ||||
|  | ||||
|     """Close the connection with a given alias.""" | ||||
|     if alias in _connections: | ||||
|         get_connection(alias=alias).close() | ||||
|         del _connections[alias] | ||||
| @@ -106,69 +123,99 @@ def disconnect(alias=DEFAULT_CONNECTION_NAME): | ||||
|  | ||||
|  | ||||
| def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||
|     global _connections | ||||
|     """Return a connection with a given alias.""" | ||||
|  | ||||
|     # Connect to the database if not already connected | ||||
|     if reconnect: | ||||
|         disconnect(alias) | ||||
|  | ||||
|     if alias not in _connections: | ||||
|         if alias not in _connection_settings: | ||||
|             msg = 'Connection with alias "%s" has not been defined' % alias | ||||
|             if alias == DEFAULT_CONNECTION_NAME: | ||||
|                 msg = 'You have not defined a default connection' | ||||
|             raise ConnectionError(msg) | ||||
|         conn_settings = _connection_settings[alias].copy() | ||||
|     # If the requested alias already exists in the _connections list, return | ||||
|     # it immediately. | ||||
|     if alias in _connections: | ||||
|         return _connections[alias] | ||||
|  | ||||
|         conn_settings.pop('name', None) | ||||
|         conn_settings.pop('username', None) | ||||
|         conn_settings.pop('password', None) | ||||
|         conn_settings.pop('authentication_source', None) | ||||
|  | ||||
|         is_mock = conn_settings.pop('is_mock', None) | ||||
|         if is_mock: | ||||
|             # Use MongoClient from mongomock | ||||
|             try: | ||||
|                 import mongomock | ||||
|             except ImportError: | ||||
|                 raise RuntimeError('You need mongomock installed ' | ||||
|                                    'to mock MongoEngine.') | ||||
|             connection_class = mongomock.MongoClient | ||||
|     # Validate that the requested alias exists in the _connection_settings. | ||||
|     # Raise MongoEngineConnectionError if it doesn't. | ||||
|     if alias not in _connection_settings: | ||||
|         if alias == DEFAULT_CONNECTION_NAME: | ||||
|             msg = 'You have not defined a default connection' | ||||
|         else: | ||||
|             # Use MongoClient from pymongo | ||||
|             connection_class = MongoClient | ||||
|             msg = 'Connection with alias "%s" has not been defined' % alias | ||||
|         raise MongoEngineConnectionError(msg) | ||||
|  | ||||
|     def _clean_settings(settings_dict): | ||||
|         irrelevant_fields = set([ | ||||
|             'name', 'username', 'password', 'authentication_source', | ||||
|             'authentication_mechanism' | ||||
|         ]) | ||||
|         return { | ||||
|             k: v for k, v in settings_dict.items() | ||||
|             if k not in irrelevant_fields | ||||
|         } | ||||
|  | ||||
|     # Retrieve a copy of the connection settings associated with the requested | ||||
|     # alias and remove the database name and authentication info (we don't | ||||
|     # care about them at this point). | ||||
|     conn_settings = _clean_settings(_connection_settings[alias].copy()) | ||||
|  | ||||
|     # Determine if we should use PyMongo's or mongomock's MongoClient. | ||||
|     is_mock = conn_settings.pop('is_mock', False) | ||||
|     if is_mock: | ||||
|         try: | ||||
|             import mongomock | ||||
|         except ImportError: | ||||
|             raise RuntimeError('You need mongomock installed to mock ' | ||||
|                                'MongoEngine.') | ||||
|         connection_class = mongomock.MongoClient | ||||
|     else: | ||||
|         connection_class = MongoClient | ||||
|  | ||||
|         # For replica set connections with PyMongo 2.x, use | ||||
|         # MongoReplicaSetClient. | ||||
|         # TODO remove this once we stop supporting PyMongo 2.x. | ||||
|         if 'replicaSet' in conn_settings and not IS_PYMONGO_3: | ||||
|             connection_class = MongoReplicaSetClient | ||||
|             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) | ||||
|  | ||||
|             # hosts_or_uri has to be a string, so if 'host' was provided | ||||
|             # as a list, join its parts and separate them by ',' | ||||
|             if isinstance(conn_settings['hosts_or_uri'], list): | ||||
|                 conn_settings['hosts_or_uri'] = ','.join( | ||||
|                     conn_settings['hosts_or_uri']) | ||||
|  | ||||
|         if 'replicaSet' in conn_settings: | ||||
|             # Discard port since it can't be used on MongoReplicaSetClient | ||||
|             conn_settings.pop('port', None) | ||||
|             # Discard replicaSet if not base string | ||||
|             if not isinstance(conn_settings['replicaSet'], basestring): | ||||
|                 conn_settings.pop('replicaSet', None) | ||||
|             if not IS_PYMONGO_3: | ||||
|                 connection_class = MongoReplicaSetClient | ||||
|                 conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) | ||||
|  | ||||
|     # Iterate over all of the connection settings and if a connection with | ||||
|     # the same parameters is already established, use it instead of creating | ||||
|     # a new one. | ||||
|     existing_connection = None | ||||
|     connection_settings_iterator = ( | ||||
|         (db_alias, settings.copy()) | ||||
|         for db_alias, settings in _connection_settings.items() | ||||
|     ) | ||||
|     for db_alias, connection_settings in connection_settings_iterator: | ||||
|         connection_settings = _clean_settings(connection_settings) | ||||
|         if conn_settings == connection_settings and _connections.get(db_alias): | ||||
|             existing_connection = _connections[db_alias] | ||||
|             break | ||||
|  | ||||
|     # If an existing connection was found, assign it to the new alias | ||||
|     if existing_connection: | ||||
|         _connections[alias] = existing_connection | ||||
|     else: | ||||
|         # Otherwise, create the new connection for this alias. Raise | ||||
|         # MongoEngineConnectionError if it can't be established. | ||||
|         try: | ||||
|             connection = None | ||||
|             # check for shared connections | ||||
|             connection_settings_iterator = ( | ||||
|                 (db_alias, settings.copy()) for db_alias, settings in _connection_settings.iteritems()) | ||||
|             for db_alias, connection_settings in connection_settings_iterator: | ||||
|                 connection_settings.pop('name', None) | ||||
|                 connection_settings.pop('username', None) | ||||
|                 connection_settings.pop('password', None) | ||||
|                 connection_settings.pop('authentication_source', None) | ||||
|                 if conn_settings == connection_settings and _connections.get(db_alias, None): | ||||
|                     connection = _connections[db_alias] | ||||
|                     break | ||||
|             _connections[alias] = connection_class(**conn_settings) | ||||
|         except Exception as e: | ||||
|             raise MongoEngineConnectionError( | ||||
|                 'Cannot connect to database %s :\n%s' % (alias, e)) | ||||
|  | ||||
|             _connections[alias] = connection if connection else connection_class(**conn_settings) | ||||
|         except Exception, e: | ||||
|             raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e)) | ||||
|     return _connections[alias] | ||||
|  | ||||
|  | ||||
| def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||
|     global _dbs | ||||
|     if reconnect: | ||||
|         disconnect(alias) | ||||
|  | ||||
| @@ -176,11 +223,13 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||
|         conn = get_connection(alias) | ||||
|         conn_settings = _connection_settings[alias] | ||||
|         db = conn[conn_settings['name']] | ||||
|         auth_kwargs = {'source': conn_settings['authentication_source']} | ||||
|         if conn_settings['authentication_mechanism'] is not None: | ||||
|             auth_kwargs['mechanism'] = conn_settings['authentication_mechanism'] | ||||
|         # Authenticate if necessary | ||||
|         if conn_settings['username'] and conn_settings['password']: | ||||
|             db.authenticate(conn_settings['username'], | ||||
|                             conn_settings['password'], | ||||
|                             source=conn_settings['authentication_source']) | ||||
|         if conn_settings['username'] and (conn_settings['password'] or | ||||
|                                           conn_settings['authentication_mechanism'] == 'MONGODB-X509'): | ||||
|             db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs) | ||||
|         _dbs[alias] = db | ||||
|     return _dbs[alias] | ||||
|  | ||||
| @@ -197,7 +246,6 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): | ||||
|  | ||||
|     .. versionchanged:: 0.6 - added multiple database support. | ||||
|     """ | ||||
|     global _connections | ||||
|     if alias not in _connections: | ||||
|         register_connection(alias, db, **kwargs) | ||||
|  | ||||
|   | ||||
| @@ -2,12 +2,12 @@ from mongoengine.common import _import_class | ||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||
|  | ||||
|  | ||||
| __all__ = ("switch_db", "switch_collection", "no_dereference", | ||||
|            "no_sub_classes", "query_counter") | ||||
| __all__ = ('switch_db', 'switch_collection', 'no_dereference', | ||||
|            'no_sub_classes', 'query_counter') | ||||
|  | ||||
|  | ||||
| class switch_db(object): | ||||
|     """ switch_db alias context manager. | ||||
|     """switch_db alias context manager. | ||||
|  | ||||
|     Example :: | ||||
|  | ||||
| @@ -18,15 +18,14 @@ class switch_db(object): | ||||
|         class Group(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         Group(name="test").save()  # Saves in the default db | ||||
|         Group(name='test').save()  # Saves in the default db | ||||
|  | ||||
|         with switch_db(Group, 'testdb-1') as Group: | ||||
|             Group(name="hello testdb!").save()  # Saves in testdb-1 | ||||
|  | ||||
|             Group(name='hello testdb!').save()  # Saves in testdb-1 | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cls, db_alias): | ||||
|         """ Construct the switch_db context manager | ||||
|         """Construct the switch_db context manager | ||||
|  | ||||
|         :param cls: the class to change the registered db | ||||
|         :param db_alias: the name of the specific database to use | ||||
| @@ -34,37 +33,36 @@ class switch_db(object): | ||||
|         self.cls = cls | ||||
|         self.collection = cls._get_collection() | ||||
|         self.db_alias = db_alias | ||||
|         self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) | ||||
|         self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME) | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ change the db_alias and clear the cached collection """ | ||||
|         self.cls._meta["db_alias"] = self.db_alias | ||||
|         """Change the db_alias and clear the cached collection.""" | ||||
|         self.cls._meta['db_alias'] = self.db_alias | ||||
|         self.cls._collection = None | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the db_alias and collection """ | ||||
|         self.cls._meta["db_alias"] = self.ori_db_alias | ||||
|         """Reset the db_alias and collection.""" | ||||
|         self.cls._meta['db_alias'] = self.ori_db_alias | ||||
|         self.cls._collection = self.collection | ||||
|  | ||||
|  | ||||
| class switch_collection(object): | ||||
|     """ switch_collection alias context manager. | ||||
|     """switch_collection alias context manager. | ||||
|  | ||||
|     Example :: | ||||
|  | ||||
|         class Group(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         Group(name="test").save()  # Saves in the default db | ||||
|         Group(name='test').save()  # Saves in the default db | ||||
|  | ||||
|         with switch_collection(Group, 'group1') as Group: | ||||
|             Group(name="hello testdb!").save()  # Saves in group1 collection | ||||
|  | ||||
|             Group(name='hello testdb!').save()  # Saves in group1 collection | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cls, collection_name): | ||||
|         """ Construct the switch_collection context manager | ||||
|         """Construct the switch_collection context manager. | ||||
|  | ||||
|         :param cls: the class to change the registered db | ||||
|         :param collection_name: the name of the collection to use | ||||
| @@ -75,7 +73,7 @@ class switch_collection(object): | ||||
|         self.collection_name = collection_name | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ change the _get_collection_name and clear the cached collection """ | ||||
|         """Change the _get_collection_name and clear the cached collection.""" | ||||
|  | ||||
|         @classmethod | ||||
|         def _get_collection_name(cls): | ||||
| @@ -86,24 +84,23 @@ class switch_collection(object): | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the collection """ | ||||
|         """Reset the collection.""" | ||||
|         self.cls._collection = self.ori_collection | ||||
|         self.cls._get_collection_name = self.ori_get_collection_name | ||||
|  | ||||
|  | ||||
| class no_dereference(object): | ||||
|     """ no_dereference context manager. | ||||
|     """no_dereference context manager. | ||||
|  | ||||
|     Turns off all dereferencing in Documents for the duration of the context | ||||
|     manager:: | ||||
|  | ||||
|         with no_dereference(Group) as Group: | ||||
|             Group.objects.find() | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cls): | ||||
|         """ Construct the no_dereference context manager. | ||||
|         """Construct the no_dereference context manager. | ||||
|  | ||||
|         :param cls: the class to turn dereferencing off on | ||||
|         """ | ||||
| @@ -119,103 +116,102 @@ class no_dereference(object): | ||||
|                                                ComplexBaseField))] | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ change the objects default and _auto_dereference values""" | ||||
|         """Change the objects default and _auto_dereference values.""" | ||||
|         for field in self.deref_fields: | ||||
|             self.cls._fields[field]._auto_dereference = False | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the default and _auto_dereference values""" | ||||
|         """Reset the default and _auto_dereference values.""" | ||||
|         for field in self.deref_fields: | ||||
|             self.cls._fields[field]._auto_dereference = True | ||||
|         return self.cls | ||||
|  | ||||
|  | ||||
| class no_sub_classes(object): | ||||
|     """ no_sub_classes context manager. | ||||
|     """no_sub_classes context manager. | ||||
|  | ||||
|     Only returns instances of this class and no sub (inherited) classes:: | ||||
|  | ||||
|         with no_sub_classes(Group) as Group: | ||||
|             Group.objects.find() | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, cls): | ||||
|         """ Construct the no_sub_classes context manager. | ||||
|         """Construct the no_sub_classes context manager. | ||||
|  | ||||
|         :param cls: the class to turn querying sub classes on | ||||
|         """ | ||||
|         self.cls = cls | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ change the objects default and _auto_dereference values""" | ||||
|         """Change the objects default and _auto_dereference values.""" | ||||
|         self.cls._all_subclasses = self.cls._subclasses | ||||
|         self.cls._subclasses = (self.cls,) | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the default and _auto_dereference values""" | ||||
|         """Reset the default and _auto_dereference values.""" | ||||
|         self.cls._subclasses = self.cls._all_subclasses | ||||
|         delattr(self.cls, '_all_subclasses') | ||||
|         return self.cls | ||||
|  | ||||
|  | ||||
| class query_counter(object): | ||||
|     """ Query_counter context manager to get the number of queries. """ | ||||
|     """Query_counter context manager to get the number of queries.""" | ||||
|  | ||||
|     def __init__(self): | ||||
|         """ Construct the query_counter. """ | ||||
|         """Construct the query_counter.""" | ||||
|         self.counter = 0 | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """ On every with block we need to drop the profile collection. """ | ||||
|         """On every with block we need to drop the profile collection.""" | ||||
|         self.db.set_profiling_level(0) | ||||
|         self.db.system.profile.drop() | ||||
|         self.db.set_profiling_level(2) | ||||
|         return self | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """ Reset the profiling level. """ | ||||
|         """Reset the profiling level.""" | ||||
|         self.db.set_profiling_level(0) | ||||
|  | ||||
|     def __eq__(self, value): | ||||
|         """ == Compare querycounter. """ | ||||
|         """== Compare querycounter.""" | ||||
|         counter = self._get_count() | ||||
|         return value == counter | ||||
|  | ||||
|     def __ne__(self, value): | ||||
|         """ != Compare querycounter. """ | ||||
|         """!= Compare querycounter.""" | ||||
|         return not self.__eq__(value) | ||||
|  | ||||
|     def __lt__(self, value): | ||||
|         """ < Compare querycounter. """ | ||||
|         """< Compare querycounter.""" | ||||
|         return self._get_count() < value | ||||
|  | ||||
|     def __le__(self, value): | ||||
|         """ <= Compare querycounter. """ | ||||
|         """<= Compare querycounter.""" | ||||
|         return self._get_count() <= value | ||||
|  | ||||
|     def __gt__(self, value): | ||||
|         """ > Compare querycounter. """ | ||||
|         """> Compare querycounter.""" | ||||
|         return self._get_count() > value | ||||
|  | ||||
|     def __ge__(self, value): | ||||
|         """ >= Compare querycounter. """ | ||||
|         """>= Compare querycounter.""" | ||||
|         return self._get_count() >= value | ||||
|  | ||||
|     def __int__(self): | ||||
|         """ int representation. """ | ||||
|         """int representation.""" | ||||
|         return self._get_count() | ||||
|  | ||||
|     def __repr__(self): | ||||
|         """ repr query_counter as the number of queries. """ | ||||
|         """repr query_counter as the number of queries.""" | ||||
|         return u"%s" % self._get_count() | ||||
|  | ||||
|     def _get_count(self): | ||||
|         """ Get the number of queries. """ | ||||
|         ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}} | ||||
|         """Get the number of queries.""" | ||||
|         ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}} | ||||
|         count = self.db.system.profile.find(ignore_query).count() - self.counter | ||||
|         self.counter += 1 | ||||
|         return count | ||||
|   | ||||
| @@ -1,14 +1,12 @@ | ||||
| from bson import DBRef, SON | ||||
| import six | ||||
|  | ||||
| from .base import ( | ||||
|     BaseDict, BaseList, EmbeddedDocumentList, | ||||
|     TopLevelDocumentMetaclass, get_document | ||||
| ) | ||||
| from .connection import get_db | ||||
| from .document import Document, EmbeddedDocument | ||||
| from .fields import DictField, ListField, MapField, ReferenceField | ||||
| from .python_support import txt_type | ||||
| from .queryset import QuerySet | ||||
| from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, | ||||
|                               TopLevelDocumentMetaclass, get_document) | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.document import Document, EmbeddedDocument | ||||
| from mongoengine.fields import DictField, ListField, MapField, ReferenceField | ||||
| from mongoengine.queryset import QuerySet | ||||
|  | ||||
|  | ||||
| class DeReference(object): | ||||
| @@ -25,7 +23,7 @@ class DeReference(object): | ||||
|             :class:`~mongoengine.base.ComplexBaseField` | ||||
|         :param get: A boolean determining if being called by __get__ | ||||
|         """ | ||||
|         if items is None or isinstance(items, basestring): | ||||
|         if items is None or isinstance(items, six.string_types): | ||||
|             return items | ||||
|  | ||||
|         # cheapest way to convert a queryset to a list | ||||
| @@ -68,11 +66,11 @@ class DeReference(object): | ||||
|  | ||||
|                         items = _get_items(items) | ||||
|                     else: | ||||
|                         items = dict([ | ||||
|                             (k, field.to_python(v)) | ||||
|                             if not isinstance(v, (DBRef, Document)) else (k, v) | ||||
|                             for k, v in items.iteritems()] | ||||
|                         ) | ||||
|                         items = { | ||||
|                             k: (v if isinstance(v, (DBRef, Document)) | ||||
|                                 else field.to_python(v)) | ||||
|                             for k, v in items.iteritems() | ||||
|                         } | ||||
|  | ||||
|         self.reference_map = self._find_references(items) | ||||
|         self.object_map = self._fetch_objects(doc_type=doc_type) | ||||
| @@ -90,14 +88,14 @@ class DeReference(object): | ||||
|             return reference_map | ||||
|  | ||||
|         # Determine the iterator to use | ||||
|         if not hasattr(items, 'items'): | ||||
|             iterator = enumerate(items) | ||||
|         if isinstance(items, dict): | ||||
|             iterator = items.values() | ||||
|         else: | ||||
|             iterator = items.iteritems() | ||||
|             iterator = items | ||||
|  | ||||
|         # Recursively find dbreferences | ||||
|         depth += 1 | ||||
|         for k, item in iterator: | ||||
|         for item in iterator: | ||||
|             if isinstance(item, (Document, EmbeddedDocument)): | ||||
|                 for field_name, field in item._fields.iteritems(): | ||||
|                     v = item._data.get(field_name, None) | ||||
| @@ -151,7 +149,7 @@ class DeReference(object): | ||||
|                     references = get_db()[collection].find({'_id': {'$in': refs}}) | ||||
|                     for ref in references: | ||||
|                         if '_cls' in ref: | ||||
|                             doc = get_document(ref["_cls"])._from_son(ref) | ||||
|                             doc = get_document(ref['_cls'])._from_son(ref) | ||||
|                         elif doc_type is None: | ||||
|                             doc = get_document( | ||||
|                                 ''.join(x.capitalize() | ||||
| @@ -218,7 +216,7 @@ class DeReference(object): | ||||
|             if k in self.object_map and not is_list: | ||||
|                 data[k] = self.object_map[k] | ||||
|             elif isinstance(v, (Document, EmbeddedDocument)): | ||||
|                 for field_name, field in v._fields.iteritems(): | ||||
|                 for field_name in v._fields: | ||||
|                     v = data[k]._data.get(field_name, None) | ||||
|                     if isinstance(v, DBRef): | ||||
|                         data[k]._data[field_name] = self.object_map.get( | ||||
| @@ -227,7 +225,7 @@ class DeReference(object): | ||||
|                         data[k]._data[field_name] = self.object_map.get( | ||||
|                             (v['_ref'].collection, v['_ref'].id), v) | ||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||
|                         item_name = txt_type("{0}.{1}.{2}").format(name, k, field_name) | ||||
|                         item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name) | ||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name) | ||||
|             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||
|                 item_name = '%s.%s' % (name, k) if name else name | ||||
|   | ||||
| @@ -4,18 +4,12 @@ import warnings | ||||
| from bson.dbref import DBRef | ||||
| import pymongo | ||||
| from pymongo.read_preferences import ReadPreference | ||||
| import six | ||||
|  | ||||
| from mongoengine import signals | ||||
| from mongoengine.base import ( | ||||
|     ALLOW_INHERITANCE, | ||||
|     BaseDict, | ||||
|     BaseDocument, | ||||
|     BaseList, | ||||
|     DocumentMetaclass, | ||||
|     EmbeddedDocumentList, | ||||
|     TopLevelDocumentMetaclass, | ||||
|     get_document | ||||
| ) | ||||
| from mongoengine.base import (BaseDict, BaseDocument, BaseList, | ||||
|                               DocumentMetaclass, EmbeddedDocumentList, | ||||
|                               TopLevelDocumentMetaclass, get_document) | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||
| from mongoengine.context_managers import switch_collection, switch_db | ||||
| @@ -31,12 +25,10 @@ __all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument', | ||||
|  | ||||
|  | ||||
| def includes_cls(fields): | ||||
|     """ Helper function used for ensuring and comparing indexes | ||||
|     """ | ||||
|  | ||||
|     """Helper function used for ensuring and comparing indexes.""" | ||||
|     first_field = None | ||||
|     if len(fields): | ||||
|         if isinstance(fields[0], basestring): | ||||
|         if isinstance(fields[0], six.string_types): | ||||
|             first_field = fields[0] | ||||
|         elif isinstance(fields[0], (list, tuple)) and len(fields[0]): | ||||
|             first_field = fields[0][0] | ||||
| @@ -57,9 +49,8 @@ class EmbeddedDocument(BaseDocument): | ||||
|     to create a specialised version of the embedded document that will be | ||||
|     stored in the same collection. To facilitate this behaviour a `_cls` | ||||
|     field is added to documents (hidden though the MongoEngine interface). | ||||
|     To disable this behaviour and remove the dependence on the presence of | ||||
|     `_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` | ||||
|     dictionary. | ||||
|     To enable this behaviour set :attr:`allow_inheritance` to ``True`` in the | ||||
|     :attr:`meta` dictionary. | ||||
|     """ | ||||
|  | ||||
|     __slots__ = ('_instance', ) | ||||
| @@ -82,6 +73,15 @@ class EmbeddedDocument(BaseDocument): | ||||
|     def __ne__(self, other): | ||||
|         return not self.__eq__(other) | ||||
|  | ||||
|     def to_mongo(self, *args, **kwargs): | ||||
|         data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs) | ||||
|  | ||||
|         # remove _id from the SON if it's in it and it's None | ||||
|         if '_id' in data and data['_id'] is None: | ||||
|             del data['_id'] | ||||
|  | ||||
|         return data | ||||
|  | ||||
|     def save(self, *args, **kwargs): | ||||
|         self._instance.save(*args, **kwargs) | ||||
|  | ||||
| @@ -106,9 +106,8 @@ class Document(BaseDocument): | ||||
|     create a specialised version of the document that will be stored in the | ||||
|     same collection. To facilitate this behaviour a `_cls` | ||||
|     field is added to documents (hidden though the MongoEngine interface). | ||||
|     To disable this behaviour and remove the dependence on the presence of | ||||
|     `_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` | ||||
|     dictionary. | ||||
|     To enable this behaviourset :attr:`allow_inheritance` to ``True`` in the | ||||
|     :attr:`meta` dictionary. | ||||
|  | ||||
|     A :class:`~mongoengine.Document` may use a **Capped Collection** by | ||||
|     specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` | ||||
| @@ -149,26 +148,22 @@ class Document(BaseDocument): | ||||
|  | ||||
|     __slots__ = ('__objects',) | ||||
|  | ||||
|     def pk(): | ||||
|         """Primary key alias | ||||
|         """ | ||||
|     @property | ||||
|     def pk(self): | ||||
|         """Get the primary key.""" | ||||
|         if 'id_field' not in self._meta: | ||||
|             return None | ||||
|         return getattr(self, self._meta['id_field']) | ||||
|  | ||||
|         def fget(self): | ||||
|             if 'id_field' not in self._meta: | ||||
|                 return None | ||||
|             return getattr(self, self._meta['id_field']) | ||||
|  | ||||
|         def fset(self, value): | ||||
|             return setattr(self, self._meta['id_field'], value) | ||||
|  | ||||
|         return property(fget, fset) | ||||
|  | ||||
|     pk = pk() | ||||
|     @pk.setter | ||||
|     def pk(self, value): | ||||
|         """Set the primary key.""" | ||||
|         return setattr(self, self._meta['id_field'], value) | ||||
|  | ||||
|     @classmethod | ||||
|     def _get_db(cls): | ||||
|         """Some Model using other db_alias""" | ||||
|         return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)) | ||||
|         return get_db(cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)) | ||||
|  | ||||
|     @classmethod | ||||
|     def _get_collection(cls): | ||||
| @@ -211,7 +206,20 @@ class Document(BaseDocument): | ||||
|                 cls.ensure_indexes() | ||||
|         return cls._collection | ||||
|  | ||||
|     def modify(self, query={}, **update): | ||||
|     def to_mongo(self, *args, **kwargs): | ||||
|         data = super(Document, self).to_mongo(*args, **kwargs) | ||||
|  | ||||
|         # If '_id' is None, try and set it from self._data. If that | ||||
|         # doesn't exist either, remote '_id' from the SON completely. | ||||
|         if data['_id'] is None: | ||||
|             if self._data.get('id') is None: | ||||
|                 del data['_id'] | ||||
|             else: | ||||
|                 data['_id'] = self._data['id'] | ||||
|  | ||||
|         return data | ||||
|  | ||||
|     def modify(self, query=None, **update): | ||||
|         """Perform an atomic update of the document in the database and reload | ||||
|         the document object using updated version. | ||||
|  | ||||
| @@ -225,17 +233,19 @@ class Document(BaseDocument): | ||||
|             database matches the query | ||||
|         :param update: Django-style update keyword arguments | ||||
|         """ | ||||
|         if query is None: | ||||
|             query = {} | ||||
|  | ||||
|         if self.pk is None: | ||||
|             raise InvalidDocumentError("The document does not have a primary key.") | ||||
|             raise InvalidDocumentError('The document does not have a primary key.') | ||||
|  | ||||
|         id_field = self._meta["id_field"] | ||||
|         id_field = self._meta['id_field'] | ||||
|         query = query.copy() if isinstance(query, dict) else query.to_query(self) | ||||
|  | ||||
|         if id_field not in query: | ||||
|             query[id_field] = self.pk | ||||
|         elif query[id_field] != self.pk: | ||||
|             raise InvalidQueryError("Invalid document modify query: it must modify only this document.") | ||||
|             raise InvalidQueryError('Invalid document modify query: it must modify only this document.') | ||||
|  | ||||
|         updated = self._qs(**query).modify(new=True, **update) | ||||
|         if updated is None: | ||||
| @@ -310,7 +320,7 @@ class Document(BaseDocument): | ||||
|             self.validate(clean=clean) | ||||
|  | ||||
|         if write_concern is None: | ||||
|             write_concern = {"w": 1} | ||||
|             write_concern = {'w': 1} | ||||
|  | ||||
|         doc = self.to_mongo() | ||||
|  | ||||
| @@ -347,7 +357,7 @@ class Document(BaseDocument): | ||||
|                 else: | ||||
|                     select_dict = {} | ||||
|                 select_dict['_id'] = object_id | ||||
|                 shard_key = self.__class__._meta.get('shard_key', tuple()) | ||||
|                 shard_key = self._meta.get('shard_key', tuple()) | ||||
|                 for k in shard_key: | ||||
|                     path = self._lookup_field(k.split('.')) | ||||
|                     actual_key = [p.db_field for p in path] | ||||
| @@ -358,7 +368,7 @@ class Document(BaseDocument): | ||||
|  | ||||
|                 def is_new_object(last_error): | ||||
|                     if last_error is not None: | ||||
|                         updated = last_error.get("updatedExisting") | ||||
|                         updated = last_error.get('updatedExisting') | ||||
|                         if updated is not None: | ||||
|                             return not updated | ||||
|                     return created | ||||
| @@ -366,14 +376,14 @@ class Document(BaseDocument): | ||||
|                 update_query = {} | ||||
|  | ||||
|                 if updates: | ||||
|                     update_query["$set"] = updates | ||||
|                     update_query['$set'] = updates | ||||
|                 if removals: | ||||
|                     update_query["$unset"] = removals | ||||
|                     update_query['$unset'] = removals | ||||
|                 if updates or removals: | ||||
|                     upsert = save_condition is None | ||||
|                     last_error = collection.update(select_dict, update_query, | ||||
|                                                    upsert=upsert, **write_concern) | ||||
|                     if not upsert and last_error["n"] == 0: | ||||
|                     if not upsert and last_error['n'] == 0: | ||||
|                         raise SaveConditionError('Race condition preventing' | ||||
|                                                  ' document update detected') | ||||
|                     created = is_new_object(last_error) | ||||
| @@ -384,26 +394,27 @@ class Document(BaseDocument): | ||||
|  | ||||
|             if cascade: | ||||
|                 kwargs = { | ||||
|                     "force_insert": force_insert, | ||||
|                     "validate": validate, | ||||
|                     "write_concern": write_concern, | ||||
|                     "cascade": cascade | ||||
|                     'force_insert': force_insert, | ||||
|                     'validate': validate, | ||||
|                     'write_concern': write_concern, | ||||
|                     'cascade': cascade | ||||
|                 } | ||||
|                 if cascade_kwargs:  # Allow granular control over cascades | ||||
|                     kwargs.update(cascade_kwargs) | ||||
|                 kwargs['_refs'] = _refs | ||||
|                 self.cascade_save(**kwargs) | ||||
|         except pymongo.errors.DuplicateKeyError, err: | ||||
|         except pymongo.errors.DuplicateKeyError as err: | ||||
|             message = u'Tried to save duplicate unique keys (%s)' | ||||
|             raise NotUniqueError(message % unicode(err)) | ||||
|         except pymongo.errors.OperationFailure, err: | ||||
|             raise NotUniqueError(message % six.text_type(err)) | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             message = 'Could not save document (%s)' | ||||
|             if re.match('^E1100[01] duplicate key', unicode(err)): | ||||
|             if re.match('^E1100[01] duplicate key', six.text_type(err)): | ||||
|                 # E11000 - duplicate key error index | ||||
|                 # E11001 - duplicate key on update | ||||
|                 message = u'Tried to save duplicate unique keys (%s)' | ||||
|                 raise NotUniqueError(message % unicode(err)) | ||||
|             raise OperationError(message % unicode(err)) | ||||
|                 raise NotUniqueError(message % six.text_type(err)) | ||||
|             raise OperationError(message % six.text_type(err)) | ||||
|  | ||||
|         id_field = self._meta['id_field'] | ||||
|         if created or id_field not in self._meta.get('shard_key', []): | ||||
|             self[id_field] = self._fields[id_field].to_python(object_id) | ||||
| @@ -414,10 +425,11 @@ class Document(BaseDocument): | ||||
|         self._created = False | ||||
|         return self | ||||
|  | ||||
|     def cascade_save(self, *args, **kwargs): | ||||
|         """Recursively saves any references / | ||||
|            generic references on the document""" | ||||
|         _refs = kwargs.get('_refs', []) or [] | ||||
|     def cascade_save(self, **kwargs): | ||||
|         """Recursively save any references and generic references on the | ||||
|         document. | ||||
|         """ | ||||
|         _refs = kwargs.get('_refs') or [] | ||||
|  | ||||
|         ReferenceField = _import_class('ReferenceField') | ||||
|         GenericReferenceField = _import_class('GenericReferenceField') | ||||
| @@ -443,16 +455,17 @@ class Document(BaseDocument): | ||||
|  | ||||
|     @property | ||||
|     def _qs(self): | ||||
|         """ | ||||
|         Returns the queryset to use for updating / reloading / deletions | ||||
|         """ | ||||
|         """Return the queryset to use for updating / reloading / deletions.""" | ||||
|         if not hasattr(self, '__objects'): | ||||
|             self.__objects = QuerySet(self, self._get_collection()) | ||||
|         return self.__objects | ||||
|  | ||||
|     @property | ||||
|     def _object_key(self): | ||||
|         """Dict to identify object in collection | ||||
|         """Get the query dict that can be used to fetch this object from | ||||
|         the database. Most of the time it's a simple PK lookup, but in | ||||
|         case of a sharded collection with a compound shard key, it can | ||||
|         contain a more complex query. | ||||
|         """ | ||||
|         select_dict = {'pk': self.pk} | ||||
|         shard_key = self.__class__._meta.get('shard_key', tuple()) | ||||
| @@ -472,11 +485,11 @@ class Document(BaseDocument): | ||||
|         Raises :class:`OperationError` if called on an object that has not yet | ||||
|         been saved. | ||||
|         """ | ||||
|         if not self.pk: | ||||
|         if self.pk is None: | ||||
|             if kwargs.get('upsert', False): | ||||
|                 query = self.to_mongo() | ||||
|                 if "_cls" in query: | ||||
|                     del query["_cls"] | ||||
|                 if '_cls' in query: | ||||
|                     del query['_cls'] | ||||
|                 return self._qs.filter(**query).update_one(**kwargs) | ||||
|             else: | ||||
|                 raise OperationError( | ||||
| @@ -513,7 +526,7 @@ class Document(BaseDocument): | ||||
|         try: | ||||
|             self._qs.filter( | ||||
|                 **self._object_key).delete(write_concern=write_concern, _from_doc_delete=True) | ||||
|         except pymongo.errors.OperationFailure, err: | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             message = u'Could not delete document (%s)' % err.message | ||||
|             raise OperationError(message) | ||||
|         signals.post_delete.send(self.__class__, document=self, **signal_kwargs) | ||||
| @@ -601,11 +614,12 @@ class Document(BaseDocument): | ||||
|         if fields and isinstance(fields[0], int): | ||||
|             max_depth = fields[0] | ||||
|             fields = fields[1:] | ||||
|         elif "max_depth" in kwargs: | ||||
|             max_depth = kwargs["max_depth"] | ||||
|         elif 'max_depth' in kwargs: | ||||
|             max_depth = kwargs['max_depth'] | ||||
|  | ||||
|         if self.pk is None: | ||||
|             raise self.DoesNotExist('Document does not exist') | ||||
|  | ||||
|         if not self.pk: | ||||
|             raise self.DoesNotExist("Document does not exist") | ||||
|         obj = self._qs.read_preference(ReadPreference.PRIMARY).filter( | ||||
|             **self._object_key).only(*fields).limit( | ||||
|             1).select_related(max_depth=max_depth) | ||||
| @@ -613,7 +627,7 @@ class Document(BaseDocument): | ||||
|         if obj: | ||||
|             obj = obj[0] | ||||
|         else: | ||||
|             raise self.DoesNotExist("Document does not exist") | ||||
|             raise self.DoesNotExist('Document does not exist') | ||||
|  | ||||
|         for field in obj._data: | ||||
|             if not fields or field in fields: | ||||
| @@ -655,8 +669,8 @@ class Document(BaseDocument): | ||||
|     def to_dbref(self): | ||||
|         """Returns an instance of :class:`~bson.dbref.DBRef` useful in | ||||
|         `__raw__` queries.""" | ||||
|         if not self.pk: | ||||
|             msg = "Only saved documents can have a valid dbref" | ||||
|         if self.pk is None: | ||||
|             msg = 'Only saved documents can have a valid dbref' | ||||
|             raise OperationError(msg) | ||||
|         return DBRef(self.__class__._get_collection_name(), self.pk) | ||||
|  | ||||
| @@ -711,7 +725,7 @@ class Document(BaseDocument): | ||||
|         fields = index_spec.pop('fields') | ||||
|         drop_dups = kwargs.get('drop_dups', False) | ||||
|         if IS_PYMONGO_3 and drop_dups: | ||||
|             msg = "drop_dups is deprecated and is removed when using PyMongo 3+." | ||||
|             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' | ||||
|             warnings.warn(msg, DeprecationWarning) | ||||
|         elif not IS_PYMONGO_3: | ||||
|             index_spec['drop_dups'] = drop_dups | ||||
| @@ -737,7 +751,7 @@ class Document(BaseDocument): | ||||
|             will be removed if PyMongo3+ is used | ||||
|         """ | ||||
|         if IS_PYMONGO_3 and drop_dups: | ||||
|             msg = "drop_dups is deprecated and is removed when using PyMongo 3+." | ||||
|             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' | ||||
|             warnings.warn(msg, DeprecationWarning) | ||||
|         elif not IS_PYMONGO_3: | ||||
|             kwargs.update({'drop_dups': drop_dups}) | ||||
| @@ -757,7 +771,7 @@ class Document(BaseDocument): | ||||
|         index_opts = cls._meta.get('index_opts') or {} | ||||
|         index_cls = cls._meta.get('index_cls', True) | ||||
|         if IS_PYMONGO_3 and drop_dups: | ||||
|             msg = "drop_dups is deprecated and is removed when using PyMongo 3+." | ||||
|             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' | ||||
|             warnings.warn(msg, DeprecationWarning) | ||||
|  | ||||
|         collection = cls._get_collection() | ||||
| @@ -795,8 +809,7 @@ class Document(BaseDocument): | ||||
|  | ||||
|         # If _cls is being used (for polymorphism), it needs an index, | ||||
|         # only if another index doesn't begin with _cls | ||||
|         if (index_cls and not cls_indexed and | ||||
|                 cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True): | ||||
|         if index_cls and not cls_indexed and cls._meta.get('allow_inheritance'): | ||||
|  | ||||
|             # we shouldn't pass 'cls' to the collection.ensureIndex options | ||||
|             # because of https://jira.mongodb.org/browse/SERVER-769 | ||||
| @@ -866,16 +879,15 @@ class Document(BaseDocument): | ||||
|         # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed | ||||
|         if [(u'_id', 1)] not in indexes: | ||||
|             indexes.append([(u'_id', 1)]) | ||||
|         if (cls._meta.get('index_cls', True) and | ||||
|                 cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True): | ||||
|         if cls._meta.get('index_cls', True) and cls._meta.get('allow_inheritance'): | ||||
|             indexes.append([(u'_cls', 1)]) | ||||
|  | ||||
|         return indexes | ||||
|  | ||||
|     @classmethod | ||||
|     def compare_indexes(cls): | ||||
|         """ Compares the indexes defined in MongoEngine with the ones existing | ||||
|         in the database. Returns any missing/extra indexes. | ||||
|         """ Compares the indexes defined in MongoEngine with the ones | ||||
|         existing in the database. Returns any missing/extra indexes. | ||||
|         """ | ||||
|  | ||||
|         required = cls.list_indexes() | ||||
| @@ -919,8 +931,9 @@ class DynamicDocument(Document): | ||||
|     _dynamic = True | ||||
|  | ||||
|     def __delattr__(self, *args, **kwargs): | ||||
|         """Deletes the attribute by setting to None and allowing _delta to unset | ||||
|         it""" | ||||
|         """Delete the attribute by setting to None and allowing _delta | ||||
|         to unset it. | ||||
|         """ | ||||
|         field_name = args[0] | ||||
|         if field_name in self._dynamic_fields: | ||||
|             setattr(self, field_name, None) | ||||
| @@ -942,8 +955,9 @@ class DynamicEmbeddedDocument(EmbeddedDocument): | ||||
|     _dynamic = True | ||||
|  | ||||
|     def __delattr__(self, *args, **kwargs): | ||||
|         """Deletes the attribute by setting to None and allowing _delta to unset | ||||
|         it""" | ||||
|         """Delete the attribute by setting to None and allowing _delta | ||||
|         to unset it. | ||||
|         """ | ||||
|         field_name = args[0] | ||||
|         if field_name in self._fields: | ||||
|             default = self._fields[field_name].default | ||||
| @@ -985,10 +999,10 @@ class MapReduceDocument(object): | ||||
|             try: | ||||
|                 self.key = id_field_type(self.key) | ||||
|             except Exception: | ||||
|                 raise Exception("Could not cast key as %s" % | ||||
|                 raise Exception('Could not cast key as %s' % | ||||
|                                 id_field_type.__name__) | ||||
|  | ||||
|         if not hasattr(self, "_key_object"): | ||||
|         if not hasattr(self, '_key_object'): | ||||
|             self._key_object = self._document.objects.with_id(self.key) | ||||
|             return self._key_object | ||||
|         return self._key_object | ||||
|   | ||||
| @@ -1,7 +1,6 @@ | ||||
| from collections import defaultdict | ||||
|  | ||||
| from mongoengine.python_support import txt_type | ||||
|  | ||||
| import six | ||||
|  | ||||
| __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', | ||||
|            'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', | ||||
| @@ -71,13 +70,13 @@ class ValidationError(AssertionError): | ||||
|     field_name = None | ||||
|     _message = None | ||||
|  | ||||
|     def __init__(self, message="", **kwargs): | ||||
|     def __init__(self, message='', **kwargs): | ||||
|         self.errors = kwargs.get('errors', {}) | ||||
|         self.field_name = kwargs.get('field_name') | ||||
|         self.message = message | ||||
|  | ||||
|     def __str__(self): | ||||
|         return txt_type(self.message) | ||||
|         return six.text_type(self.message) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return '%s(%s,)' % (self.__class__.__name__, self.message) | ||||
| @@ -111,17 +110,20 @@ class ValidationError(AssertionError): | ||||
|             errors_dict = {} | ||||
|             if not source: | ||||
|                 return errors_dict | ||||
|  | ||||
|             if isinstance(source, dict): | ||||
|                 for field_name, error in source.iteritems(): | ||||
|                     errors_dict[field_name] = build_dict(error) | ||||
|             elif isinstance(source, ValidationError) and source.errors: | ||||
|                 return build_dict(source.errors) | ||||
|             else: | ||||
|                 return unicode(source) | ||||
|                 return six.text_type(source) | ||||
|  | ||||
|             return errors_dict | ||||
|  | ||||
|         if not self.errors: | ||||
|             return {} | ||||
|  | ||||
|         return build_dict(self.errors) | ||||
|  | ||||
|     def _format_errors(self): | ||||
| @@ -134,10 +136,10 @@ class ValidationError(AssertionError): | ||||
|                 value = ' '.join( | ||||
|                     [generate_key(v, k) for k, v in value.iteritems()]) | ||||
|  | ||||
|             results = "%s.%s" % (prefix, value) if prefix else value | ||||
|             results = '%s.%s' % (prefix, value) if prefix else value | ||||
|             return results | ||||
|  | ||||
|         error_dict = defaultdict(list) | ||||
|         for k, v in self.to_dict().iteritems(): | ||||
|             error_dict[generate_key(v)].append(k) | ||||
|         return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()]) | ||||
|         return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()]) | ||||
|   | ||||
| @@ -3,7 +3,6 @@ import decimal | ||||
| import itertools | ||||
| import re | ||||
| import time | ||||
| import urllib2 | ||||
| import uuid | ||||
| import warnings | ||||
| from operator import itemgetter | ||||
| @@ -25,13 +24,13 @@ try: | ||||
| except ImportError: | ||||
|     Int64 = long | ||||
|  | ||||
| from .base import (BaseDocument, BaseField, ComplexBaseField, GeoJsonBaseField, | ||||
|                    ObjectIdField, get_document) | ||||
| from .connection import DEFAULT_CONNECTION_NAME, get_db | ||||
| from .document import Document, EmbeddedDocument | ||||
| from .errors import DoesNotExist, ValidationError | ||||
| from .python_support import PY3, StringIO, bin_type, str_types, txt_type | ||||
| from .queryset import DO_NOTHING, QuerySet | ||||
| from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField, | ||||
|                               GeoJsonBaseField, ObjectIdField, get_document) | ||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||
| from mongoengine.document import Document, EmbeddedDocument | ||||
| from mongoengine.errors import DoesNotExist, ValidationError | ||||
| from mongoengine.python_support import StringIO | ||||
| from mongoengine.queryset import DO_NOTHING, QuerySet | ||||
|  | ||||
| try: | ||||
|     from PIL import Image, ImageOps | ||||
| @@ -39,7 +38,7 @@ except ImportError: | ||||
|     Image = None | ||||
|     ImageOps = None | ||||
|  | ||||
| __all__ = [ | ||||
| __all__ = ( | ||||
|     'StringField', 'URLField', 'EmailField', 'IntField', 'LongField', | ||||
|     'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField', | ||||
|     'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField', | ||||
| @@ -50,14 +49,14 @@ __all__ = [ | ||||
|     'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField', | ||||
|     'GeoPointField', 'PointField', 'LineStringField', 'PolygonField', | ||||
|     'SequenceField', 'UUIDField', 'MultiPointField', 'MultiLineStringField', | ||||
|     'MultiPolygonField', 'GeoJsonBaseField'] | ||||
|     'MultiPolygonField', 'GeoJsonBaseField' | ||||
| ) | ||||
|  | ||||
| RECURSIVE_REFERENCE_CONSTANT = 'self' | ||||
|  | ||||
|  | ||||
| class StringField(BaseField): | ||||
|     """A unicode string field. | ||||
|     """ | ||||
|     """A unicode string field.""" | ||||
|  | ||||
|     def __init__(self, regex=None, max_length=None, min_length=None, **kwargs): | ||||
|         self.regex = re.compile(regex) if regex else None | ||||
| @@ -66,7 +65,7 @@ class StringField(BaseField): | ||||
|         super(StringField, self).__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if isinstance(value, unicode): | ||||
|         if isinstance(value, six.text_type): | ||||
|             return value | ||||
|         try: | ||||
|             value = value.decode('utf-8') | ||||
| @@ -75,7 +74,7 @@ class StringField(BaseField): | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, basestring): | ||||
|         if not isinstance(value, six.string_types): | ||||
|             self.error('StringField only accepts string values') | ||||
|  | ||||
|         if self.max_length is not None and len(value) > self.max_length: | ||||
| @@ -91,7 +90,7 @@ class StringField(BaseField): | ||||
|         return None | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if not isinstance(op, basestring): | ||||
|         if not isinstance(op, six.string_types): | ||||
|             return value | ||||
|  | ||||
|         if op.lstrip('i') in ('startswith', 'endswith', 'contains', 'exact'): | ||||
| @@ -148,17 +147,6 @@ class URLField(StringField): | ||||
|             self.error('Invalid URL: {}'.format(value)) | ||||
|             return | ||||
|  | ||||
|         if self.verify_exists: | ||||
|             warnings.warn( | ||||
|                 "The URLField verify_exists argument has intractable security " | ||||
|                 "and performance issues. Accordingly, it has been deprecated.", | ||||
|                 DeprecationWarning) | ||||
|             try: | ||||
|                 request = urllib2.Request(value) | ||||
|                 urllib2.urlopen(request) | ||||
|             except Exception, e: | ||||
|                 self.error('This URL appears to be a broken link: %s' % e) | ||||
|  | ||||
|  | ||||
| class EmailField(StringField): | ||||
|     """A field that validates input as an email address. | ||||
| @@ -182,8 +170,7 @@ class EmailField(StringField): | ||||
|  | ||||
|  | ||||
| class IntField(BaseField): | ||||
|     """An 32-bit integer field. | ||||
|     """ | ||||
|     """32-bit integer field.""" | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
|         self.min_value, self.max_value = min_value, max_value | ||||
| @@ -216,8 +203,7 @@ class IntField(BaseField): | ||||
|  | ||||
|  | ||||
| class LongField(BaseField): | ||||
|     """An 64-bit integer field. | ||||
|     """ | ||||
|     """64-bit integer field.""" | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
|         self.min_value, self.max_value = min_value, max_value | ||||
| @@ -253,8 +239,7 @@ class LongField(BaseField): | ||||
|  | ||||
|  | ||||
| class FloatField(BaseField): | ||||
|     """An floating point number field. | ||||
|     """ | ||||
|     """Floating point number field.""" | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
|         self.min_value, self.max_value = min_value, max_value | ||||
| @@ -291,7 +276,7 @@ class FloatField(BaseField): | ||||
|  | ||||
|  | ||||
| class DecimalField(BaseField): | ||||
|     """A fixed-point decimal number field. | ||||
|     """Fixed-point decimal number field. | ||||
|  | ||||
|     .. versionchanged:: 0.8 | ||||
|     .. versionadded:: 0.3 | ||||
| @@ -332,25 +317,25 @@ class DecimalField(BaseField): | ||||
|  | ||||
|         # Convert to string for python 2.6 before casting to Decimal | ||||
|         try: | ||||
|             value = decimal.Decimal("%s" % value) | ||||
|             value = decimal.Decimal('%s' % value) | ||||
|         except decimal.InvalidOperation: | ||||
|             return value | ||||
|         return value.quantize(decimal.Decimal(".%s" % ("0" * self.precision)), rounding=self.rounding) | ||||
|         return value.quantize(decimal.Decimal('.%s' % ('0' * self.precision)), rounding=self.rounding) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if value is None: | ||||
|             return value | ||||
|         if self.force_string: | ||||
|             return unicode(value) | ||||
|             return six.text_type(self.to_python(value)) | ||||
|         return float(self.to_python(value)) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, decimal.Decimal): | ||||
|             if not isinstance(value, basestring): | ||||
|                 value = unicode(value) | ||||
|             if not isinstance(value, six.string_types): | ||||
|                 value = six.text_type(value) | ||||
|             try: | ||||
|                 value = decimal.Decimal(value) | ||||
|             except Exception, exc: | ||||
|             except Exception as exc: | ||||
|                 self.error('Could not convert value to decimal: %s' % exc) | ||||
|  | ||||
|         if self.min_value is not None and value < self.min_value: | ||||
| @@ -364,7 +349,7 @@ class DecimalField(BaseField): | ||||
|  | ||||
|  | ||||
| class BooleanField(BaseField): | ||||
|     """A boolean field type. | ||||
|     """Boolean field type. | ||||
|  | ||||
|     .. versionadded:: 0.1.2 | ||||
|     """ | ||||
| @@ -382,7 +367,7 @@ class BooleanField(BaseField): | ||||
|  | ||||
|  | ||||
| class DateTimeField(BaseField): | ||||
|     """A datetime field. | ||||
|     """Datetime field. | ||||
|  | ||||
|     Uses the python-dateutil library if available alternatively use time.strptime | ||||
|     to parse the dates.  Note: python-dateutil's parser is fully featured and when | ||||
| @@ -410,7 +395,7 @@ class DateTimeField(BaseField): | ||||
|         if callable(value): | ||||
|             return value() | ||||
|  | ||||
|         if not isinstance(value, basestring): | ||||
|         if not isinstance(value, six.string_types): | ||||
|             return None | ||||
|  | ||||
|         # Attempt to parse a datetime: | ||||
| @@ -537,16 +522,19 @@ class EmbeddedDocumentField(BaseField): | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, document_type, **kwargs): | ||||
|         if not isinstance(document_type, basestring): | ||||
|             if not issubclass(document_type, EmbeddedDocument): | ||||
|                 self.error('Invalid embedded document class provided to an ' | ||||
|                            'EmbeddedDocumentField') | ||||
|         if ( | ||||
|             not isinstance(document_type, six.string_types) and | ||||
|             not issubclass(document_type, EmbeddedDocument) | ||||
|         ): | ||||
|             self.error('Invalid embedded document class provided to an ' | ||||
|                        'EmbeddedDocumentField') | ||||
|  | ||||
|         self.document_type_obj = document_type | ||||
|         super(EmbeddedDocumentField, self).__init__(**kwargs) | ||||
|  | ||||
|     @property | ||||
|     def document_type(self): | ||||
|         if isinstance(self.document_type_obj, basestring): | ||||
|         if isinstance(self.document_type_obj, six.string_types): | ||||
|             if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: | ||||
|                 self.document_type_obj = self.owner_document | ||||
|             else: | ||||
| @@ -577,7 +565,7 @@ class EmbeddedDocumentField(BaseField): | ||||
|         return self.document_type._fields.get(member_name) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if not isinstance(value, self.document_type): | ||||
|         if value is not None and not isinstance(value, self.document_type): | ||||
|             value = self.document_type._from_son(value) | ||||
|         super(EmbeddedDocumentField, self).prepare_query_value(op, value) | ||||
|         return self.to_mongo(value) | ||||
| @@ -631,7 +619,7 @@ class DynamicField(BaseField): | ||||
|         """Convert a Python type to a MongoDB compatible type. | ||||
|         """ | ||||
|  | ||||
|         if isinstance(value, basestring): | ||||
|         if isinstance(value, six.string_types): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, 'to_mongo'): | ||||
| @@ -639,7 +627,7 @@ class DynamicField(BaseField): | ||||
|             val = value.to_mongo(use_db_field, fields) | ||||
|             # If we its a document thats not inherited add _cls | ||||
|             if isinstance(value, Document): | ||||
|                 val = {"_ref": value.to_dbref(), "_cls": cls.__name__} | ||||
|                 val = {'_ref': value.to_dbref(), '_cls': cls.__name__} | ||||
|             if isinstance(value, EmbeddedDocument): | ||||
|                 val['_cls'] = cls.__name__ | ||||
|             return val | ||||
| @@ -650,7 +638,7 @@ class DynamicField(BaseField): | ||||
|         is_list = False | ||||
|         if not hasattr(value, 'items'): | ||||
|             is_list = True | ||||
|             value = dict([(k, v) for k, v in enumerate(value)]) | ||||
|             value = {k: v for k, v in enumerate(value)} | ||||
|  | ||||
|         data = {} | ||||
|         for k, v in value.iteritems(): | ||||
| @@ -674,12 +662,12 @@ class DynamicField(BaseField): | ||||
|         return member_name | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if isinstance(value, basestring): | ||||
|         if isinstance(value, six.string_types): | ||||
|             return StringField().prepare_query_value(op, value) | ||||
|         return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value)) | ||||
|  | ||||
|     def validate(self, value, clean=True): | ||||
|         if hasattr(value, "validate"): | ||||
|         if hasattr(value, 'validate'): | ||||
|             value.validate(clean=clean) | ||||
|  | ||||
|  | ||||
| @@ -699,21 +687,27 @@ class ListField(ComplexBaseField): | ||||
|         super(ListField, self).__init__(**kwargs) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Make sure that a list of valid fields is being used. | ||||
|         """ | ||||
|         """Make sure that a list of valid fields is being used.""" | ||||
|         if (not isinstance(value, (list, tuple, QuerySet)) or | ||||
|                 isinstance(value, basestring)): | ||||
|                 isinstance(value, six.string_types)): | ||||
|             self.error('Only lists and tuples may be used in a list field') | ||||
|         super(ListField, self).validate(value) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if self.field: | ||||
|             if op in ('set', 'unset', None) and ( | ||||
|                     not isinstance(value, basestring) and | ||||
|                     not isinstance(value, BaseDocument) and | ||||
|                     hasattr(value, '__iter__')): | ||||
|  | ||||
|             # If the value is iterable and it's not a string nor a | ||||
|             # BaseDocument, call prepare_query_value for each of its items. | ||||
|             if ( | ||||
|                 op in ('set', 'unset', None) and | ||||
|                 hasattr(value, '__iter__') and | ||||
|                 not isinstance(value, six.string_types) and | ||||
|                 not isinstance(value, BaseDocument) | ||||
|             ): | ||||
|                 return [self.field.prepare_query_value(op, v) for v in value] | ||||
|  | ||||
|             return self.field.prepare_query_value(op, value) | ||||
|  | ||||
|         return super(ListField, self).prepare_query_value(op, value) | ||||
|  | ||||
|  | ||||
| @@ -726,7 +720,6 @@ class EmbeddedDocumentListField(ListField): | ||||
|         :class:`~mongoengine.EmbeddedDocument`. | ||||
|  | ||||
|     .. versionadded:: 0.9 | ||||
|  | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, document_type, **kwargs): | ||||
| @@ -775,17 +768,17 @@ class SortedListField(ListField): | ||||
|  | ||||
|  | ||||
| def key_not_string(d): | ||||
|     """ Helper function to recursively determine if any key in a dictionary is | ||||
|     not a string. | ||||
|     """Helper function to recursively determine if any key in a | ||||
|     dictionary is not a string. | ||||
|     """ | ||||
|     for k, v in d.items(): | ||||
|         if not isinstance(k, basestring) or (isinstance(v, dict) and key_not_string(v)): | ||||
|         if not isinstance(k, six.string_types) or (isinstance(v, dict) and key_not_string(v)): | ||||
|             return True | ||||
|  | ||||
|  | ||||
| def key_has_dot_or_dollar(d): | ||||
|     """ Helper function to recursively determine if any key in a dictionary | ||||
|     contains a dot or a dollar sign. | ||||
|     """Helper function to recursively determine if any key in a | ||||
|     dictionary contains a dot or a dollar sign. | ||||
|     """ | ||||
|     for k, v in d.items(): | ||||
|         if ('.' in k or '$' in k) or (isinstance(v, dict) and key_has_dot_or_dollar(v)): | ||||
| @@ -813,14 +806,13 @@ class DictField(ComplexBaseField): | ||||
|         super(DictField, self).__init__(*args, **kwargs) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Make sure that a list of valid fields is being used. | ||||
|         """ | ||||
|         """Make sure that a list of valid fields is being used.""" | ||||
|         if not isinstance(value, dict): | ||||
|             self.error('Only dictionaries may be used in a DictField') | ||||
|  | ||||
|         if key_not_string(value): | ||||
|             msg = ("Invalid dictionary key - documents must " | ||||
|                    "have only string keys") | ||||
|             msg = ('Invalid dictionary key - documents must ' | ||||
|                    'have only string keys') | ||||
|             self.error(msg) | ||||
|         if key_has_dot_or_dollar(value): | ||||
|             self.error('Invalid dictionary key name - keys may not contain "."' | ||||
| @@ -835,14 +827,15 @@ class DictField(ComplexBaseField): | ||||
|                            'istartswith', 'endswith', 'iendswith', | ||||
|                            'exact', 'iexact'] | ||||
|  | ||||
|         if op in match_operators and isinstance(value, basestring): | ||||
|         if op in match_operators and isinstance(value, six.string_types): | ||||
|             return StringField().prepare_query_value(op, value) | ||||
|  | ||||
|         if hasattr(self.field, 'field'): | ||||
|             if op in ('set', 'unset') and isinstance(value, dict): | ||||
|                 return dict( | ||||
|                     (k, self.field.prepare_query_value(op, v)) | ||||
|                     for k, v in value.items()) | ||||
|                 return { | ||||
|                     k: self.field.prepare_query_value(op, v) | ||||
|                     for k, v in value.items() | ||||
|                 } | ||||
|             return self.field.prepare_query_value(op, value) | ||||
|  | ||||
|         return super(DictField, self).prepare_query_value(op, value) | ||||
| @@ -911,10 +904,12 @@ class ReferenceField(BaseField): | ||||
|             A reference to an abstract document type is always stored as a | ||||
|             :class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`. | ||||
|         """ | ||||
|         if not isinstance(document_type, basestring): | ||||
|             if not issubclass(document_type, (Document, basestring)): | ||||
|                 self.error('Argument to ReferenceField constructor must be a ' | ||||
|                            'document class or a string') | ||||
|         if ( | ||||
|             not isinstance(document_type, six.string_types) and | ||||
|             not issubclass(document_type, Document) | ||||
|         ): | ||||
|             self.error('Argument to ReferenceField constructor must be a ' | ||||
|                        'document class or a string') | ||||
|  | ||||
|         self.dbref = dbref | ||||
|         self.document_type_obj = document_type | ||||
| @@ -923,7 +918,7 @@ class ReferenceField(BaseField): | ||||
|  | ||||
|     @property | ||||
|     def document_type(self): | ||||
|         if isinstance(self.document_type_obj, basestring): | ||||
|         if isinstance(self.document_type_obj, six.string_types): | ||||
|             if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: | ||||
|                 self.document_type_obj = self.owner_document | ||||
|             else: | ||||
| @@ -931,8 +926,7 @@ class ReferenceField(BaseField): | ||||
|         return self.document_type_obj | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         """Descriptor to allow lazy dereferencing. | ||||
|         """ | ||||
|         """Descriptor to allow lazy dereferencing.""" | ||||
|         if instance is None: | ||||
|             # Document class being used rather than a document object | ||||
|             return self | ||||
| @@ -989,8 +983,7 @@ class ReferenceField(BaseField): | ||||
|         return id_ | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         """Convert a MongoDB-compatible type to a Python type. | ||||
|         """ | ||||
|         """Convert a MongoDB-compatible type to a Python type.""" | ||||
|         if (not self.dbref and | ||||
|                 not isinstance(value, (DBRef, Document, EmbeddedDocument))): | ||||
|             collection = self.document_type._get_collection_name() | ||||
| @@ -1006,7 +999,7 @@ class ReferenceField(BaseField): | ||||
|     def validate(self, value): | ||||
|  | ||||
|         if not isinstance(value, (self.document_type, DBRef)): | ||||
|             self.error("A ReferenceField only accepts DBRef or documents") | ||||
|             self.error('A ReferenceField only accepts DBRef or documents') | ||||
|  | ||||
|         if isinstance(value, Document) and value.id is None: | ||||
|             self.error('You can only reference documents once they have been ' | ||||
| @@ -1030,14 +1023,19 @@ class CachedReferenceField(BaseField): | ||||
|     .. versionadded:: 0.9 | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, document_type, fields=[], auto_sync=True, **kwargs): | ||||
|     def __init__(self, document_type, fields=None, auto_sync=True, **kwargs): | ||||
|         """Initialises the Cached Reference Field. | ||||
|  | ||||
|         :param fields:  A list of fields to be cached in document | ||||
|         :param auto_sync: if True documents are auto updated. | ||||
|         """ | ||||
|         if not isinstance(document_type, basestring) and \ | ||||
|                 not issubclass(document_type, (Document, basestring)): | ||||
|         if fields is None: | ||||
|             fields = [] | ||||
|  | ||||
|         if ( | ||||
|             not isinstance(document_type, six.string_types) and | ||||
|             not issubclass(document_type, Document) | ||||
|         ): | ||||
|             self.error('Argument to CachedReferenceField constructor must be a' | ||||
|                        ' document class or a string') | ||||
|  | ||||
| @@ -1053,18 +1051,20 @@ class CachedReferenceField(BaseField): | ||||
|                                   sender=self.document_type) | ||||
|  | ||||
|     def on_document_pre_save(self, sender, document, created, **kwargs): | ||||
|         if not created: | ||||
|             update_kwargs = dict( | ||||
|                 ('set__%s__%s' % (self.name, k), v) | ||||
|                 for k, v in document._delta()[0].items() | ||||
|                 if k in self.fields) | ||||
|         if created: | ||||
|             return None | ||||
|  | ||||
|             if update_kwargs: | ||||
|                 filter_kwargs = {} | ||||
|                 filter_kwargs[self.name] = document | ||||
|         update_kwargs = { | ||||
|             'set__%s__%s' % (self.name, key): val | ||||
|             for key, val in document._delta()[0].items() | ||||
|             if key in self.fields | ||||
|         } | ||||
|         if update_kwargs: | ||||
|             filter_kwargs = {} | ||||
|             filter_kwargs[self.name] = document | ||||
|  | ||||
|                 self.owner_document.objects( | ||||
|                     **filter_kwargs).update(**update_kwargs) | ||||
|             self.owner_document.objects( | ||||
|                 **filter_kwargs).update(**update_kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if isinstance(value, dict): | ||||
| @@ -1077,7 +1077,7 @@ class CachedReferenceField(BaseField): | ||||
|  | ||||
|     @property | ||||
|     def document_type(self): | ||||
|         if isinstance(self.document_type_obj, basestring): | ||||
|         if isinstance(self.document_type_obj, six.string_types): | ||||
|             if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: | ||||
|                 self.document_type_obj = self.owner_document | ||||
|             else: | ||||
| @@ -1117,7 +1117,7 @@ class CachedReferenceField(BaseField): | ||||
|             # TODO: should raise here or will fail next statement | ||||
|  | ||||
|         value = SON(( | ||||
|             ("_id", id_field.to_mongo(id_)), | ||||
|             ('_id', id_field.to_mongo(id_)), | ||||
|         )) | ||||
|  | ||||
|         if fields: | ||||
| @@ -1143,7 +1143,7 @@ class CachedReferenceField(BaseField): | ||||
|     def validate(self, value): | ||||
|  | ||||
|         if not isinstance(value, self.document_type): | ||||
|             self.error("A CachedReferenceField only accepts documents") | ||||
|             self.error('A CachedReferenceField only accepts documents') | ||||
|  | ||||
|         if isinstance(value, Document) and value.id is None: | ||||
|             self.error('You can only reference documents once they have been ' | ||||
| @@ -1191,13 +1191,13 @@ class GenericReferenceField(BaseField): | ||||
|         # Keep the choices as a list of allowed Document class names | ||||
|         if choices: | ||||
|             for choice in choices: | ||||
|                 if isinstance(choice, basestring): | ||||
|                 if isinstance(choice, six.string_types): | ||||
|                     self.choices.append(choice) | ||||
|                 elif isinstance(choice, type) and issubclass(choice, Document): | ||||
|                     self.choices.append(choice._class_name) | ||||
|                 else: | ||||
|                     self.error('Invalid choices provided: must be a list of' | ||||
|                                'Document subclasses and/or basestrings') | ||||
|                                'Document subclasses and/or six.string_typess') | ||||
|  | ||||
|     def _validate_choices(self, value): | ||||
|         if isinstance(value, dict): | ||||
| @@ -1249,7 +1249,7 @@ class GenericReferenceField(BaseField): | ||||
|         if document is None: | ||||
|             return None | ||||
|  | ||||
|         if isinstance(document, (dict, SON)): | ||||
|         if isinstance(document, (dict, SON, ObjectId, DBRef)): | ||||
|             return document | ||||
|  | ||||
|         id_field_name = document.__class__._meta['id_field'] | ||||
| @@ -1280,8 +1280,7 @@ class GenericReferenceField(BaseField): | ||||
|  | ||||
|  | ||||
| class BinaryField(BaseField): | ||||
|     """A binary data field. | ||||
|     """ | ||||
|     """A binary data field.""" | ||||
|  | ||||
|     def __init__(self, max_bytes=None, **kwargs): | ||||
|         self.max_bytes = max_bytes | ||||
| @@ -1289,18 +1288,18 @@ class BinaryField(BaseField): | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         """Handle bytearrays in python 3.1""" | ||||
|         if PY3 and isinstance(value, bytearray): | ||||
|             value = bin_type(value) | ||||
|         if six.PY3 and isinstance(value, bytearray): | ||||
|             value = six.binary_type(value) | ||||
|         return super(BinaryField, self).__set__(instance, value) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         return Binary(value) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, (bin_type, txt_type, Binary)): | ||||
|             self.error("BinaryField only accepts instances of " | ||||
|                        "(%s, %s, Binary)" % ( | ||||
|                            bin_type.__name__, txt_type.__name__)) | ||||
|         if not isinstance(value, (six.binary_type, six.text_type, Binary)): | ||||
|             self.error('BinaryField only accepts instances of ' | ||||
|                        '(%s, %s, Binary)' % ( | ||||
|                            six.binary_type.__name__, six.text_type.__name__)) | ||||
|  | ||||
|         if self.max_bytes is not None and len(value) > self.max_bytes: | ||||
|             self.error('Binary value is too long') | ||||
| @@ -1384,11 +1383,13 @@ class GridFSProxy(object): | ||||
|                 get_db(self.db_alias), self.collection_name) | ||||
|         return self._fs | ||||
|  | ||||
|     def get(self, id=None): | ||||
|         if id: | ||||
|             self.grid_id = id | ||||
|     def get(self, grid_id=None): | ||||
|         if grid_id: | ||||
|             self.grid_id = grid_id | ||||
|  | ||||
|         if self.grid_id is None: | ||||
|             return None | ||||
|  | ||||
|         try: | ||||
|             if self.gridout is None: | ||||
|                 self.gridout = self.fs.get(self.grid_id) | ||||
| @@ -1432,7 +1433,7 @@ class GridFSProxy(object): | ||||
|             try: | ||||
|                 return gridout.read(size) | ||||
|             except Exception: | ||||
|                 return "" | ||||
|                 return '' | ||||
|  | ||||
|     def delete(self): | ||||
|         # Delete file from GridFS, FileField still remains | ||||
| @@ -1464,9 +1465,8 @@ class FileField(BaseField): | ||||
|     """ | ||||
|     proxy_class = GridFSProxy | ||||
|  | ||||
|     def __init__(self, | ||||
|                  db_alias=DEFAULT_CONNECTION_NAME, | ||||
|                  collection_name="fs", **kwargs): | ||||
|     def __init__(self, db_alias=DEFAULT_CONNECTION_NAME, collection_name='fs', | ||||
|                  **kwargs): | ||||
|         super(FileField, self).__init__(**kwargs) | ||||
|         self.collection_name = collection_name | ||||
|         self.db_alias = db_alias | ||||
| @@ -1488,8 +1488,10 @@ class FileField(BaseField): | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         key = self.name | ||||
|         if ((hasattr(value, 'read') and not | ||||
|                 isinstance(value, GridFSProxy)) or isinstance(value, str_types)): | ||||
|         if ( | ||||
|             (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or | ||||
|             isinstance(value, (six.binary_type, six.string_types)) | ||||
|         ): | ||||
|             # using "FileField() = file/string" notation | ||||
|             grid_file = instance._data.get(self.name) | ||||
|             # If a file already exists, delete it | ||||
| @@ -1558,7 +1560,7 @@ class ImageGridFsProxy(GridFSProxy): | ||||
|         try: | ||||
|             img = Image.open(file_obj) | ||||
|             img_format = img.format | ||||
|         except Exception, e: | ||||
|         except Exception as e: | ||||
|             raise ValidationError('Invalid image: %s' % e) | ||||
|  | ||||
|         # Progressive JPEG | ||||
| @@ -1667,10 +1669,10 @@ class ImageGridFsProxy(GridFSProxy): | ||||
|             return self.fs.get(out.thumbnail_id) | ||||
|  | ||||
|     def write(self, *args, **kwargs): | ||||
|         raise RuntimeError("Please use \"put\" method instead") | ||||
|         raise RuntimeError('Please use "put" method instead') | ||||
|  | ||||
|     def writelines(self, *args, **kwargs): | ||||
|         raise RuntimeError("Please use \"put\" method instead") | ||||
|         raise RuntimeError('Please use "put" method instead') | ||||
|  | ||||
|  | ||||
| class ImproperlyConfigured(Exception): | ||||
| @@ -1695,14 +1697,17 @@ class ImageField(FileField): | ||||
|     def __init__(self, size=None, thumbnail_size=None, | ||||
|                  collection_name='images', **kwargs): | ||||
|         if not Image: | ||||
|             raise ImproperlyConfigured("PIL library was not found") | ||||
|             raise ImproperlyConfigured('PIL library was not found') | ||||
|  | ||||
|         params_size = ('width', 'height', 'force') | ||||
|         extra_args = dict(size=size, thumbnail_size=thumbnail_size) | ||||
|         extra_args = { | ||||
|             'size': size, | ||||
|             'thumbnail_size': thumbnail_size | ||||
|         } | ||||
|         for att_name, att in extra_args.items(): | ||||
|             value = None | ||||
|             if isinstance(att, (tuple, list)): | ||||
|                 if PY3: | ||||
|                 if six.PY3: | ||||
|                     value = dict(itertools.zip_longest(params_size, att, | ||||
|                                                        fillvalue=None)) | ||||
|                 else: | ||||
| @@ -1763,10 +1768,10 @@ class SequenceField(BaseField): | ||||
|         Generate and Increment the counter | ||||
|         """ | ||||
|         sequence_name = self.get_sequence_name() | ||||
|         sequence_id = "%s.%s" % (sequence_name, self.name) | ||||
|         sequence_id = '%s.%s' % (sequence_name, self.name) | ||||
|         collection = get_db(alias=self.db_alias)[self.collection_name] | ||||
|         counter = collection.find_and_modify(query={"_id": sequence_id}, | ||||
|                                              update={"$inc": {"next": 1}}, | ||||
|         counter = collection.find_and_modify(query={'_id': sequence_id}, | ||||
|                                              update={'$inc': {'next': 1}}, | ||||
|                                              new=True, | ||||
|                                              upsert=True) | ||||
|         return self.value_decorator(counter['next']) | ||||
| @@ -1789,9 +1794,9 @@ class SequenceField(BaseField): | ||||
|         as it is only fixed on set. | ||||
|         """ | ||||
|         sequence_name = self.get_sequence_name() | ||||
|         sequence_id = "%s.%s" % (sequence_name, self.name) | ||||
|         sequence_id = '%s.%s' % (sequence_name, self.name) | ||||
|         collection = get_db(alias=self.db_alias)[self.collection_name] | ||||
|         data = collection.find_one({"_id": sequence_id}) | ||||
|         data = collection.find_one({'_id': sequence_id}) | ||||
|  | ||||
|         if data: | ||||
|             return self.value_decorator(data['next'] + 1) | ||||
| @@ -1861,8 +1866,8 @@ class UUIDField(BaseField): | ||||
|         if not self._binary: | ||||
|             original_value = value | ||||
|             try: | ||||
|                 if not isinstance(value, basestring): | ||||
|                     value = unicode(value) | ||||
|                 if not isinstance(value, six.string_types): | ||||
|                     value = six.text_type(value) | ||||
|                 return uuid.UUID(value) | ||||
|             except Exception: | ||||
|                 return original_value | ||||
| @@ -1870,8 +1875,8 @@ class UUIDField(BaseField): | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if not self._binary: | ||||
|             return unicode(value) | ||||
|         elif isinstance(value, basestring): | ||||
|             return six.text_type(value) | ||||
|         elif isinstance(value, six.string_types): | ||||
|             return uuid.UUID(value) | ||||
|         return value | ||||
|  | ||||
| @@ -1882,11 +1887,11 @@ class UUIDField(BaseField): | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, uuid.UUID): | ||||
|             if not isinstance(value, basestring): | ||||
|             if not isinstance(value, six.string_types): | ||||
|                 value = str(value) | ||||
|             try: | ||||
|                 uuid.UUID(value) | ||||
|             except Exception, exc: | ||||
|             except Exception as exc: | ||||
|                 self.error('Could not convert to UUID: %s' % exc) | ||||
|  | ||||
|  | ||||
| @@ -1904,19 +1909,18 @@ class GeoPointField(BaseField): | ||||
|     _geo_index = pymongo.GEO2D | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Make sure that a geo-value is of type (x, y) | ||||
|         """ | ||||
|         """Make sure that a geo-value is of type (x, y)""" | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             self.error('GeoPointField can only accept tuples or lists ' | ||||
|                        'of (x, y)') | ||||
|  | ||||
|         if not len(value) == 2: | ||||
|             self.error("Value (%s) must be a two-dimensional point" % | ||||
|             self.error('Value (%s) must be a two-dimensional point' % | ||||
|                        repr(value)) | ||||
|         elif (not isinstance(value[0], (float, int)) or | ||||
|               not isinstance(value[1], (float, int))): | ||||
|             self.error( | ||||
|                 "Both values (%s) in point must be float or int" % repr(value)) | ||||
|                 'Both values (%s) in point must be float or int' % repr(value)) | ||||
|  | ||||
|  | ||||
| class PointField(GeoJsonBaseField): | ||||
| @@ -1926,8 +1930,8 @@ class PointField(GeoJsonBaseField): | ||||
|  | ||||
|     .. code-block:: js | ||||
|  | ||||
|         { "type" : "Point" , | ||||
|           "coordinates" : [x, y]} | ||||
|         {'type' : 'Point' , | ||||
|          'coordinates' : [x, y]} | ||||
|  | ||||
|     You can either pass a dict with the full information or a list | ||||
|     to set the value. | ||||
| @@ -1936,7 +1940,7 @@ class PointField(GeoJsonBaseField): | ||||
|  | ||||
|     .. versionadded:: 0.8 | ||||
|     """ | ||||
|     _type = "Point" | ||||
|     _type = 'Point' | ||||
|  | ||||
|  | ||||
| class LineStringField(GeoJsonBaseField): | ||||
| @@ -1946,8 +1950,8 @@ class LineStringField(GeoJsonBaseField): | ||||
|  | ||||
|     .. code-block:: js | ||||
|  | ||||
|         { "type" : "LineString" , | ||||
|           "coordinates" : [[x1, y1], [x1, y1] ... [xn, yn]]} | ||||
|         {'type' : 'LineString' , | ||||
|          'coordinates' : [[x1, y1], [x1, y1] ... [xn, yn]]} | ||||
|  | ||||
|     You can either pass a dict with the full information or a list of points. | ||||
|  | ||||
| @@ -1955,7 +1959,7 @@ class LineStringField(GeoJsonBaseField): | ||||
|  | ||||
|     .. versionadded:: 0.8 | ||||
|     """ | ||||
|     _type = "LineString" | ||||
|     _type = 'LineString' | ||||
|  | ||||
|  | ||||
| class PolygonField(GeoJsonBaseField): | ||||
| @@ -1965,9 +1969,9 @@ class PolygonField(GeoJsonBaseField): | ||||
|  | ||||
|     .. code-block:: js | ||||
|  | ||||
|         { "type" : "Polygon" , | ||||
|           "coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]], | ||||
|                            [[x1, y1], [x1, y1] ... [xn, yn]]} | ||||
|         {'type' : 'Polygon' , | ||||
|          'coordinates' : [[[x1, y1], [x1, y1] ... [xn, yn]], | ||||
|                           [[x1, y1], [x1, y1] ... [xn, yn]]} | ||||
|  | ||||
|     You can either pass a dict with the full information or a list | ||||
|     of LineStrings. The first LineString being the outside and the rest being | ||||
| @@ -1977,7 +1981,7 @@ class PolygonField(GeoJsonBaseField): | ||||
|  | ||||
|     .. versionadded:: 0.8 | ||||
|     """ | ||||
|     _type = "Polygon" | ||||
|     _type = 'Polygon' | ||||
|  | ||||
|  | ||||
| class MultiPointField(GeoJsonBaseField): | ||||
| @@ -1987,8 +1991,8 @@ class MultiPointField(GeoJsonBaseField): | ||||
|  | ||||
|     .. code-block:: js | ||||
|  | ||||
|         { "type" : "MultiPoint" , | ||||
|           "coordinates" : [[x1, y1], [x2, y2]]} | ||||
|         {'type' : 'MultiPoint' , | ||||
|          'coordinates' : [[x1, y1], [x2, y2]]} | ||||
|  | ||||
|     You can either pass a dict with the full information or a list | ||||
|     to set the value. | ||||
| @@ -1997,7 +2001,7 @@ class MultiPointField(GeoJsonBaseField): | ||||
|  | ||||
|     .. versionadded:: 0.9 | ||||
|     """ | ||||
|     _type = "MultiPoint" | ||||
|     _type = 'MultiPoint' | ||||
|  | ||||
|  | ||||
| class MultiLineStringField(GeoJsonBaseField): | ||||
| @@ -2007,9 +2011,9 @@ class MultiLineStringField(GeoJsonBaseField): | ||||
|  | ||||
|     .. code-block:: js | ||||
|  | ||||
|         { "type" : "MultiLineString" , | ||||
|           "coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]], | ||||
|                            [[x1, y1], [x1, y1] ... [xn, yn]]]} | ||||
|         {'type' : 'MultiLineString' , | ||||
|          'coordinates' : [[[x1, y1], [x1, y1] ... [xn, yn]], | ||||
|                           [[x1, y1], [x1, y1] ... [xn, yn]]]} | ||||
|  | ||||
|     You can either pass a dict with the full information or a list of points. | ||||
|  | ||||
| @@ -2017,7 +2021,7 @@ class MultiLineStringField(GeoJsonBaseField): | ||||
|  | ||||
|     .. versionadded:: 0.9 | ||||
|     """ | ||||
|     _type = "MultiLineString" | ||||
|     _type = 'MultiLineString' | ||||
|  | ||||
|  | ||||
| class MultiPolygonField(GeoJsonBaseField): | ||||
| @@ -2027,14 +2031,14 @@ class MultiPolygonField(GeoJsonBaseField): | ||||
|  | ||||
|     .. code-block:: js | ||||
|  | ||||
|         { "type" : "MultiPolygon" , | ||||
|           "coordinates" : [[ | ||||
|                 [[x1, y1], [x1, y1] ... [xn, yn]], | ||||
|                 [[x1, y1], [x1, y1] ... [xn, yn]] | ||||
|             ], [ | ||||
|                 [[x1, y1], [x1, y1] ... [xn, yn]], | ||||
|                 [[x1, y1], [x1, y1] ... [xn, yn]] | ||||
|             ] | ||||
|         {'type' : 'MultiPolygon' , | ||||
|          'coordinates' : [[ | ||||
|                [[x1, y1], [x1, y1] ... [xn, yn]], | ||||
|                [[x1, y1], [x1, y1] ... [xn, yn]] | ||||
|            ], [ | ||||
|                [[x1, y1], [x1, y1] ... [xn, yn]], | ||||
|                [[x1, y1], [x1, y1] ... [xn, yn]] | ||||
|            ] | ||||
|         } | ||||
|  | ||||
|     You can either pass a dict with the full information or a list | ||||
| @@ -2044,4 +2048,4 @@ class MultiPolygonField(GeoJsonBaseField): | ||||
|  | ||||
|     .. versionadded:: 0.9 | ||||
|     """ | ||||
|     _type = "MultiPolygon" | ||||
|     _type = 'MultiPolygon' | ||||
|   | ||||
| @@ -1,7 +1,9 @@ | ||||
| """Helper functions and types to aid with Python 2.5 - 3 support.""" | ||||
|  | ||||
| import sys | ||||
| """ | ||||
| Helper functions, constants, and types to aid with Python v2.7 - v3.x and | ||||
| PyMongo v2.7 - v3.x support. | ||||
| """ | ||||
| import pymongo | ||||
| import six | ||||
|  | ||||
|  | ||||
| if pymongo.version_tuple[0] < 3: | ||||
| @@ -9,29 +11,15 @@ if pymongo.version_tuple[0] < 3: | ||||
| else: | ||||
|     IS_PYMONGO_3 = True | ||||
|  | ||||
| PY3 = sys.version_info[0] == 3 | ||||
|  | ||||
| if PY3: | ||||
|     import codecs | ||||
|     from io import BytesIO as StringIO | ||||
| # six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. | ||||
| StringIO = six.BytesIO | ||||
|  | ||||
|     # return s converted to binary.  b('test') should be equivalent to b'test' | ||||
|     def b(s): | ||||
|         return codecs.latin_1_encode(s)[0] | ||||
|  | ||||
|     bin_type = bytes | ||||
|     txt_type = str | ||||
| else: | ||||
| # Additionally for Py2, try to use the faster cStringIO, if available | ||||
| if not six.PY3: | ||||
|     try: | ||||
|         from cStringIO import StringIO | ||||
|         import cStringIO | ||||
|     except ImportError: | ||||
|         from StringIO import StringIO | ||||
|  | ||||
|     # Conversion to binary only necessary in Python 3 | ||||
|     def b(s): | ||||
|         return s | ||||
|  | ||||
|     bin_type = str | ||||
|     txt_type = unicode | ||||
|  | ||||
| str_types = (bin_type, txt_type) | ||||
|         pass | ||||
|     else: | ||||
|         StringIO = cStringIO.StringIO | ||||
|   | ||||
| @@ -1,11 +1,17 @@ | ||||
| from mongoengine.errors import (DoesNotExist, InvalidQueryError, | ||||
|                                 MultipleObjectsReturned, NotUniqueError, | ||||
|                                 OperationError) | ||||
| from mongoengine.errors import * | ||||
| from mongoengine.queryset.field_list import * | ||||
| from mongoengine.queryset.manager import * | ||||
| from mongoengine.queryset.queryset import * | ||||
| from mongoengine.queryset.transform import * | ||||
| from mongoengine.queryset.visitor import * | ||||
|  | ||||
| __all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ + | ||||
|            transform.__all__ + visitor.__all__) | ||||
| # Expose just the public subset of all imported objects and constants. | ||||
| __all__ = ( | ||||
|     'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager', | ||||
|     'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL', | ||||
|  | ||||
|     # Errors that might be related to a queryset, mostly here for backward | ||||
|     # compatibility | ||||
|     'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned', | ||||
|     'NotUniqueError', 'OperationError', | ||||
| ) | ||||
|   | ||||
| @@ -12,9 +12,10 @@ from bson.code import Code | ||||
| import pymongo | ||||
| import pymongo.errors | ||||
| from pymongo.common import validate_read_preference | ||||
| import six | ||||
|  | ||||
| from mongoengine import signals | ||||
| from mongoengine.base.common import get_document | ||||
| from mongoengine.base import get_document | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.context_managers import switch_db | ||||
| @@ -73,15 +74,16 @@ class BaseQuerySet(object): | ||||
|         # subclasses of the class being used | ||||
|         if document._meta.get('allow_inheritance') is True: | ||||
|             if len(self._document._subclasses) == 1: | ||||
|                 self._initial_query = {"_cls": self._document._subclasses[0]} | ||||
|                 self._initial_query = {'_cls': self._document._subclasses[0]} | ||||
|             else: | ||||
|                 self._initial_query = { | ||||
|                     "_cls": {"$in": self._document._subclasses}} | ||||
|                     '_cls': {'$in': self._document._subclasses}} | ||||
|             self._loaded_fields = QueryFieldList(always_include=['_cls']) | ||||
|         self._cursor_obj = None | ||||
|         self._limit = None | ||||
|         self._skip = None | ||||
|         self._hint = -1  # Using -1 as None is a valid value for hint | ||||
|         self._batch_size = None | ||||
|         self.only_fields = [] | ||||
|         self._max_time_ms = None | ||||
|  | ||||
| @@ -104,8 +106,8 @@ class BaseQuerySet(object): | ||||
|         if q_obj: | ||||
|             # make sure proper query object is passed | ||||
|             if not isinstance(q_obj, QNode): | ||||
|                 msg = ("Not a query object: %s. " | ||||
|                        "Did you intend to use key=value?" % q_obj) | ||||
|                 msg = ('Not a query object: %s. ' | ||||
|                        'Did you intend to use key=value?' % q_obj) | ||||
|                 raise InvalidQueryError(msg) | ||||
|             query &= q_obj | ||||
|  | ||||
| @@ -132,10 +134,10 @@ class BaseQuerySet(object): | ||||
|         obj_dict = self.__dict__.copy() | ||||
|  | ||||
|         # don't picke collection, instead pickle collection params | ||||
|         obj_dict.pop("_collection_obj") | ||||
|         obj_dict.pop('_collection_obj') | ||||
|  | ||||
|         # don't pickle cursor | ||||
|         obj_dict["_cursor_obj"] = None | ||||
|         obj_dict['_cursor_obj'] = None | ||||
|  | ||||
|         return obj_dict | ||||
|  | ||||
| @@ -146,7 +148,7 @@ class BaseQuerySet(object): | ||||
|         See https://github.com/MongoEngine/mongoengine/issues/442 | ||||
|         """ | ||||
|  | ||||
|         obj_dict["_collection_obj"] = obj_dict["_document"]._get_collection() | ||||
|         obj_dict['_collection_obj'] = obj_dict['_document']._get_collection() | ||||
|  | ||||
|         # update attributes | ||||
|         self.__dict__.update(obj_dict) | ||||
| @@ -165,7 +167,7 @@ class BaseQuerySet(object): | ||||
|                 queryset._skip, queryset._limit = key.start, key.stop | ||||
|                 if key.start and key.stop: | ||||
|                     queryset._limit = key.stop - key.start | ||||
|             except IndexError, err: | ||||
|             except IndexError as err: | ||||
|                 # PyMongo raises an error if key.start == key.stop, catch it, | ||||
|                 # bin it, kill it. | ||||
|                 start = key.start or 0 | ||||
| @@ -198,19 +200,16 @@ class BaseQuerySet(object): | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def _has_data(self): | ||||
|         """ Retrieves whether cursor has any data. """ | ||||
|  | ||||
|         """Return True if cursor has any data.""" | ||||
|         queryset = self.order_by() | ||||
|         return False if queryset.first() is None else True | ||||
|  | ||||
|     def __nonzero__(self): | ||||
|         """ Avoid to open all records in an if stmt in Py2. """ | ||||
|  | ||||
|         """Avoid to open all records in an if stmt in Py2.""" | ||||
|         return self._has_data() | ||||
|  | ||||
|     def __bool__(self): | ||||
|         """ Avoid to open all records in an if stmt in Py3. """ | ||||
|  | ||||
|         """Avoid to open all records in an if stmt in Py3.""" | ||||
|         return self._has_data() | ||||
|  | ||||
|     # Core functions | ||||
| @@ -238,7 +237,7 @@ class BaseQuerySet(object): | ||||
|         queryset = self.clone() | ||||
|         if queryset._search_text: | ||||
|             raise OperationError( | ||||
|                 "It is not possible to use search_text two times.") | ||||
|                 'It is not possible to use search_text two times.') | ||||
|  | ||||
|         query_kwargs = SON({'$search': text}) | ||||
|         if language: | ||||
| @@ -267,7 +266,7 @@ class BaseQuerySet(object): | ||||
|         try: | ||||
|             result = queryset.next() | ||||
|         except StopIteration: | ||||
|             msg = ("%s matching query does not exist." | ||||
|             msg = ('%s matching query does not exist.' | ||||
|                    % queryset._document._class_name) | ||||
|             raise queryset._document.DoesNotExist(msg) | ||||
|         try: | ||||
| @@ -275,6 +274,8 @@ class BaseQuerySet(object): | ||||
|         except StopIteration: | ||||
|             return result | ||||
|  | ||||
|         # If we were able to retrieve the 2nd doc, rewind the cursor and | ||||
|         # raise the MultipleObjectsReturned exception. | ||||
|         queryset.rewind() | ||||
|         message = u'%d items returned, instead of 1' % queryset.count() | ||||
|         raise queryset._document.MultipleObjectsReturned(message) | ||||
| @@ -287,8 +288,7 @@ class BaseQuerySet(object): | ||||
|         return self._document(**kwargs).save() | ||||
|  | ||||
|     def first(self): | ||||
|         """Retrieve the first object matching the query. | ||||
|         """ | ||||
|         """Retrieve the first object matching the query.""" | ||||
|         queryset = self.clone() | ||||
|         try: | ||||
|             result = queryset[0] | ||||
| @@ -337,7 +337,7 @@ class BaseQuerySet(object): | ||||
|                        % str(self._document)) | ||||
|                 raise OperationError(msg) | ||||
|             if doc.pk and not doc._created: | ||||
|                 msg = "Some documents have ObjectIds use doc.update() instead" | ||||
|                 msg = 'Some documents have ObjectIds use doc.update() instead' | ||||
|                 raise OperationError(msg) | ||||
|  | ||||
|         signal_kwargs = signal_kwargs or {} | ||||
| @@ -347,17 +347,17 @@ class BaseQuerySet(object): | ||||
|         raw = [doc.to_mongo() for doc in docs] | ||||
|         try: | ||||
|             ids = self._collection.insert(raw, **write_concern) | ||||
|         except pymongo.errors.DuplicateKeyError, err: | ||||
|         except pymongo.errors.DuplicateKeyError as err: | ||||
|             message = 'Could not save document (%s)' | ||||
|             raise NotUniqueError(message % unicode(err)) | ||||
|         except pymongo.errors.OperationFailure, err: | ||||
|             raise NotUniqueError(message % six.text_type(err)) | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             message = 'Could not save document (%s)' | ||||
|             if re.match('^E1100[01] duplicate key', unicode(err)): | ||||
|             if re.match('^E1100[01] duplicate key', six.text_type(err)): | ||||
|                 # E11000 - duplicate key error index | ||||
|                 # E11001 - duplicate key on update | ||||
|                 message = u'Tried to save duplicate unique keys (%s)' | ||||
|                 raise NotUniqueError(message % unicode(err)) | ||||
|             raise OperationError(message % unicode(err)) | ||||
|                 raise NotUniqueError(message % six.text_type(err)) | ||||
|             raise OperationError(message % six.text_type(err)) | ||||
|  | ||||
|         if not load_bulk: | ||||
|             signals.post_bulk_insert.send( | ||||
| @@ -383,7 +383,8 @@ class BaseQuerySet(object): | ||||
|             return 0 | ||||
|         return self._cursor.count(with_limit_and_skip=with_limit_and_skip) | ||||
|  | ||||
|     def delete(self, write_concern=None, _from_doc_delete=False, cascade_refs=None): | ||||
|     def delete(self, write_concern=None, _from_doc_delete=False, | ||||
|                cascade_refs=None): | ||||
|         """Delete the documents matched by the query. | ||||
|  | ||||
|         :param write_concern: Extra keyword arguments are passed down which | ||||
| @@ -406,8 +407,9 @@ class BaseQuerySet(object): | ||||
|         # Handle deletes where skips or limits have been applied or | ||||
|         # there is an untriggered delete signal | ||||
|         has_delete_signal = signals.signals_available and ( | ||||
|             signals.pre_delete.has_receivers_for(self._document) or | ||||
|             signals.post_delete.has_receivers_for(self._document)) | ||||
|             signals.pre_delete.has_receivers_for(doc) or | ||||
|             signals.post_delete.has_receivers_for(doc) | ||||
|         ) | ||||
|  | ||||
|         call_document_delete = (queryset._skip or queryset._limit or | ||||
|                                 has_delete_signal) and not _from_doc_delete | ||||
| @@ -420,37 +422,44 @@ class BaseQuerySet(object): | ||||
|             return cnt | ||||
|  | ||||
|         delete_rules = doc._meta.get('delete_rules') or {} | ||||
|         delete_rules = list(delete_rules.items()) | ||||
|  | ||||
|         # Check for DENY rules before actually deleting/nullifying any other | ||||
|         # references | ||||
|         for rule_entry in delete_rules: | ||||
|         for rule_entry, rule in delete_rules: | ||||
|             document_cls, field_name = rule_entry | ||||
|             if document_cls._meta.get('abstract'): | ||||
|                 continue | ||||
|             rule = doc._meta['delete_rules'][rule_entry] | ||||
|             if rule == DENY and document_cls.objects( | ||||
|                     **{field_name + '__in': self}).count() > 0: | ||||
|                 msg = ("Could not delete document (%s.%s refers to it)" | ||||
|                        % (document_cls.__name__, field_name)) | ||||
|                 raise OperationError(msg) | ||||
|  | ||||
|         for rule_entry in delete_rules: | ||||
|             if rule == DENY: | ||||
|                 refs = document_cls.objects(**{field_name + '__in': self}) | ||||
|                 if refs.limit(1).count() > 0: | ||||
|                     raise OperationError( | ||||
|                         'Could not delete document (%s.%s refers to it)' | ||||
|                         % (document_cls.__name__, field_name) | ||||
|                     ) | ||||
|  | ||||
|         # Check all the other rules | ||||
|         for rule_entry, rule in delete_rules: | ||||
|             document_cls, field_name = rule_entry | ||||
|             if document_cls._meta.get('abstract'): | ||||
|                 continue | ||||
|             rule = doc._meta['delete_rules'][rule_entry] | ||||
|  | ||||
|             if rule == CASCADE: | ||||
|                 cascade_refs = set() if cascade_refs is None else cascade_refs | ||||
|                 # Handle recursive reference | ||||
|                 if doc._collection == document_cls._collection: | ||||
|                     for ref in queryset: | ||||
|                         cascade_refs.add(ref.id) | ||||
|                 ref_q = document_cls.objects(**{field_name + '__in': self, 'id__nin': cascade_refs}) | ||||
|                 ref_q_count = ref_q.count() | ||||
|                 if ref_q_count > 0: | ||||
|                     ref_q.delete(write_concern=write_concern, cascade_refs=cascade_refs) | ||||
|                 refs = document_cls.objects(**{field_name + '__in': self, | ||||
|                                                'pk__nin': cascade_refs}) | ||||
|                 if refs.count() > 0: | ||||
|                     refs.delete(write_concern=write_concern, | ||||
|                                 cascade_refs=cascade_refs) | ||||
|             elif rule == NULLIFY: | ||||
|                 document_cls.objects(**{field_name + '__in': self}).update( | ||||
|                     write_concern=write_concern, **{'unset__%s' % field_name: 1}) | ||||
|                     write_concern=write_concern, | ||||
|                     **{'unset__%s' % field_name: 1}) | ||||
|             elif rule == PULL: | ||||
|                 document_cls.objects(**{field_name + '__in': self}).update( | ||||
|                     write_concern=write_concern, | ||||
| @@ -458,7 +467,7 @@ class BaseQuerySet(object): | ||||
|  | ||||
|         result = queryset._collection.remove(queryset._query, **write_concern) | ||||
|         if result: | ||||
|             return result.get("n") | ||||
|             return result.get('n') | ||||
|  | ||||
|     def update(self, upsert=False, multi=True, write_concern=None, | ||||
|                full_result=False, **update): | ||||
| @@ -479,7 +488,7 @@ class BaseQuerySet(object): | ||||
|         .. versionadded:: 0.2 | ||||
|         """ | ||||
|         if not update and not upsert: | ||||
|             raise OperationError("No update parameters, would remove data") | ||||
|             raise OperationError('No update parameters, would remove data') | ||||
|  | ||||
|         if write_concern is None: | ||||
|             write_concern = {} | ||||
| @@ -492,9 +501,9 @@ class BaseQuerySet(object): | ||||
|         # then ensure we add _cls to the update operation | ||||
|         if upsert and '_cls' in query: | ||||
|             if '$set' in update: | ||||
|                 update["$set"]["_cls"] = queryset._document._class_name | ||||
|                 update['$set']['_cls'] = queryset._document._class_name | ||||
|             else: | ||||
|                 update["$set"] = {"_cls": queryset._document._class_name} | ||||
|                 update['$set'] = {'_cls': queryset._document._class_name} | ||||
|         try: | ||||
|             result = queryset._collection.update(query, update, multi=multi, | ||||
|                                                  upsert=upsert, **write_concern) | ||||
| @@ -502,13 +511,13 @@ class BaseQuerySet(object): | ||||
|                 return result | ||||
|             elif result: | ||||
|                 return result['n'] | ||||
|         except pymongo.errors.DuplicateKeyError, err: | ||||
|             raise NotUniqueError(u'Update failed (%s)' % unicode(err)) | ||||
|         except pymongo.errors.OperationFailure, err: | ||||
|             if unicode(err) == u'multi not coded yet': | ||||
|         except pymongo.errors.DuplicateKeyError as err: | ||||
|             raise NotUniqueError(u'Update failed (%s)' % six.text_type(err)) | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             if six.text_type(err) == u'multi not coded yet': | ||||
|                 message = u'update() method requires MongoDB 1.1.3+' | ||||
|                 raise OperationError(message) | ||||
|             raise OperationError(u'Update failed (%s)' % unicode(err)) | ||||
|             raise OperationError(u'Update failed (%s)' % six.text_type(err)) | ||||
|  | ||||
|     def upsert_one(self, write_concern=None, **update): | ||||
|         """Overwrite or add the first document matched by the query. | ||||
| @@ -579,11 +588,11 @@ class BaseQuerySet(object): | ||||
|         """ | ||||
|  | ||||
|         if remove and new: | ||||
|             raise OperationError("Conflicting parameters: remove and new") | ||||
|             raise OperationError('Conflicting parameters: remove and new') | ||||
|  | ||||
|         if not update and not upsert and not remove: | ||||
|             raise OperationError( | ||||
|                 "No update parameters, must either update or remove") | ||||
|                 'No update parameters, must either update or remove') | ||||
|  | ||||
|         queryset = self.clone() | ||||
|         query = queryset._query | ||||
| @@ -594,7 +603,7 @@ class BaseQuerySet(object): | ||||
|         try: | ||||
|             if IS_PYMONGO_3: | ||||
|                 if full_response: | ||||
|                     msg = "With PyMongo 3+, it is not possible anymore to get the full response." | ||||
|                     msg = 'With PyMongo 3+, it is not possible anymore to get the full response.' | ||||
|                     warnings.warn(msg, DeprecationWarning) | ||||
|                 if remove: | ||||
|                     result = queryset._collection.find_one_and_delete( | ||||
| @@ -612,14 +621,14 @@ class BaseQuerySet(object): | ||||
|                 result = queryset._collection.find_and_modify( | ||||
|                     query, update, upsert=upsert, sort=sort, remove=remove, new=new, | ||||
|                     full_response=full_response, **self._cursor_args) | ||||
|         except pymongo.errors.DuplicateKeyError, err: | ||||
|             raise NotUniqueError(u"Update failed (%s)" % err) | ||||
|         except pymongo.errors.OperationFailure, err: | ||||
|             raise OperationError(u"Update failed (%s)" % err) | ||||
|         except pymongo.errors.DuplicateKeyError as err: | ||||
|             raise NotUniqueError(u'Update failed (%s)' % err) | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             raise OperationError(u'Update failed (%s)' % err) | ||||
|  | ||||
|         if full_response: | ||||
|             if result["value"] is not None: | ||||
|                 result["value"] = self._document._from_son(result["value"], only_fields=self.only_fields) | ||||
|             if result['value'] is not None: | ||||
|                 result['value'] = self._document._from_son(result['value'], only_fields=self.only_fields) | ||||
|         else: | ||||
|             if result is not None: | ||||
|                 result = self._document._from_son(result, only_fields=self.only_fields) | ||||
| @@ -637,7 +646,7 @@ class BaseQuerySet(object): | ||||
|         """ | ||||
|         queryset = self.clone() | ||||
|         if not queryset._query_obj.empty: | ||||
|             msg = "Cannot use a filter whilst using `with_id`" | ||||
|             msg = 'Cannot use a filter whilst using `with_id`' | ||||
|             raise InvalidQueryError(msg) | ||||
|         return queryset.filter(pk=object_id).first() | ||||
|  | ||||
| @@ -681,7 +690,7 @@ class BaseQuerySet(object): | ||||
|         Only return instances of this document and not any inherited documents | ||||
|         """ | ||||
|         if self._document._meta.get('allow_inheritance') is True: | ||||
|             self._initial_query = {"_cls": self._document._class_name} | ||||
|             self._initial_query = {'_cls': self._document._class_name} | ||||
|  | ||||
|         return self | ||||
|  | ||||
| @@ -781,6 +790,19 @@ class BaseQuerySet(object): | ||||
|         queryset._hint = index | ||||
|         return queryset | ||||
|  | ||||
|     def batch_size(self, size): | ||||
|         """Limit the number of documents returned in a single batch (each | ||||
|         batch requires a round trip to the server). | ||||
|  | ||||
|         See http://api.mongodb.com/python/current/api/pymongo/cursor.html#pymongo.cursor.Cursor.batch_size | ||||
|         for details. | ||||
|  | ||||
|         :param size: desired size of each batch. | ||||
|         """ | ||||
|         queryset = self.clone() | ||||
|         queryset._batch_size = size | ||||
|         return queryset | ||||
|  | ||||
|     def distinct(self, field): | ||||
|         """Return a list of distinct values for a given field. | ||||
|  | ||||
| @@ -794,49 +816,56 @@ class BaseQuerySet(object): | ||||
|         .. versionchanged:: 0.6 - Improved db_field refrence handling | ||||
|         """ | ||||
|         queryset = self.clone() | ||||
|  | ||||
|         try: | ||||
|             field = self._fields_to_dbfields([field]).pop() | ||||
|         finally: | ||||
|             distinct = self._dereference(queryset._cursor.distinct(field), 1, | ||||
|                                          name=field, instance=self._document) | ||||
|         except LookUpError: | ||||
|             pass | ||||
|  | ||||
|             doc_field = self._document._fields.get(field.split('.', 1)[0]) | ||||
|             instance = False | ||||
|             # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) | ||||
|             EmbeddedDocumentField = _import_class('EmbeddedDocumentField') | ||||
|             ListField = _import_class('ListField') | ||||
|             GenericEmbeddedDocumentField = _import_class('GenericEmbeddedDocumentField') | ||||
|             if isinstance(doc_field, ListField): | ||||
|                 doc_field = getattr(doc_field, "field", doc_field) | ||||
|             if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): | ||||
|                 instance = getattr(doc_field, "document_type", False) | ||||
|             # handle distinct on subdocuments | ||||
|             if '.' in field: | ||||
|                 for field_part in field.split('.')[1:]: | ||||
|                     # if looping on embedded document, get the document type instance | ||||
|                     if instance and isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): | ||||
|                         doc_field = instance | ||||
|                     # now get the subdocument | ||||
|                     doc_field = getattr(doc_field, field_part, doc_field) | ||||
|                     # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) | ||||
|                     if isinstance(doc_field, ListField): | ||||
|                         doc_field = getattr(doc_field, "field", doc_field) | ||||
|                     if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): | ||||
|                         instance = getattr(doc_field, "document_type", False) | ||||
|             if instance and isinstance(doc_field, (EmbeddedDocumentField, | ||||
|                                                    GenericEmbeddedDocumentField)): | ||||
|                 distinct = [instance(**doc) for doc in distinct] | ||||
|             return distinct | ||||
|         distinct = self._dereference(queryset._cursor.distinct(field), 1, | ||||
|                                      name=field, instance=self._document) | ||||
|  | ||||
|         doc_field = self._document._fields.get(field.split('.', 1)[0]) | ||||
|         instance = None | ||||
|  | ||||
|         # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) | ||||
|         EmbeddedDocumentField = _import_class('EmbeddedDocumentField') | ||||
|         ListField = _import_class('ListField') | ||||
|         GenericEmbeddedDocumentField = _import_class('GenericEmbeddedDocumentField') | ||||
|         if isinstance(doc_field, ListField): | ||||
|             doc_field = getattr(doc_field, 'field', doc_field) | ||||
|         if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): | ||||
|             instance = getattr(doc_field, 'document_type', None) | ||||
|  | ||||
|         # handle distinct on subdocuments | ||||
|         if '.' in field: | ||||
|             for field_part in field.split('.')[1:]: | ||||
|                 # if looping on embedded document, get the document type instance | ||||
|                 if instance and isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): | ||||
|                     doc_field = instance | ||||
|                 # now get the subdocument | ||||
|                 doc_field = getattr(doc_field, field_part, doc_field) | ||||
|                 # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) | ||||
|                 if isinstance(doc_field, ListField): | ||||
|                     doc_field = getattr(doc_field, 'field', doc_field) | ||||
|                 if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): | ||||
|                     instance = getattr(doc_field, 'document_type', None) | ||||
|  | ||||
|         if instance and isinstance(doc_field, (EmbeddedDocumentField, | ||||
|                                                GenericEmbeddedDocumentField)): | ||||
|             distinct = [instance(**doc) for doc in distinct] | ||||
|  | ||||
|         return distinct | ||||
|  | ||||
|     def only(self, *fields): | ||||
|         """Load only a subset of this document's fields. :: | ||||
|  | ||||
|             post = BlogPost.objects(...).only("title", "author.name") | ||||
|             post = BlogPost.objects(...).only('title', 'author.name') | ||||
|  | ||||
|         .. note :: `only()` is chainable and will perform a union :: | ||||
|             So with the following it will fetch both: `title` and `author.name`:: | ||||
|  | ||||
|                 post = BlogPost.objects.only("title").only("author.name") | ||||
|                 post = BlogPost.objects.only('title').only('author.name') | ||||
|  | ||||
|         :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any | ||||
|         field filters. | ||||
| @@ -846,19 +875,19 @@ class BaseQuerySet(object): | ||||
|         .. versionadded:: 0.3 | ||||
|         .. versionchanged:: 0.5 - Added subfield support | ||||
|         """ | ||||
|         fields = dict([(f, QueryFieldList.ONLY) for f in fields]) | ||||
|         fields = {f: QueryFieldList.ONLY for f in fields} | ||||
|         self.only_fields = fields.keys() | ||||
|         return self.fields(True, **fields) | ||||
|  | ||||
|     def exclude(self, *fields): | ||||
|         """Opposite to .only(), exclude some document's fields. :: | ||||
|  | ||||
|             post = BlogPost.objects(...).exclude("comments") | ||||
|             post = BlogPost.objects(...).exclude('comments') | ||||
|  | ||||
|         .. note :: `exclude()` is chainable and will perform a union :: | ||||
|             So with the following it will exclude both: `title` and `author.name`:: | ||||
|  | ||||
|                 post = BlogPost.objects.exclude("title").exclude("author.name") | ||||
|                 post = BlogPost.objects.exclude('title').exclude('author.name') | ||||
|  | ||||
|         :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any | ||||
|         field filters. | ||||
| @@ -867,7 +896,7 @@ class BaseQuerySet(object): | ||||
|  | ||||
|         .. versionadded:: 0.5 | ||||
|         """ | ||||
|         fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields]) | ||||
|         fields = {f: QueryFieldList.EXCLUDE for f in fields} | ||||
|         return self.fields(**fields) | ||||
|  | ||||
|     def fields(self, _only_called=False, **kwargs): | ||||
| @@ -888,7 +917,7 @@ class BaseQuerySet(object): | ||||
|         """ | ||||
|  | ||||
|         # Check for an operator and transform to mongo-style if there is | ||||
|         operators = ["slice"] | ||||
|         operators = ['slice'] | ||||
|         cleaned_fields = [] | ||||
|         for key, value in kwargs.items(): | ||||
|             parts = key.split('__') | ||||
| @@ -912,7 +941,7 @@ class BaseQuerySet(object): | ||||
|         """Include all fields. Reset all previously calls of .only() or | ||||
|         .exclude(). :: | ||||
|  | ||||
|             post = BlogPost.objects.exclude("comments").all_fields() | ||||
|             post = BlogPost.objects.exclude('comments').all_fields() | ||||
|  | ||||
|         .. versionadded:: 0.5 | ||||
|         """ | ||||
| @@ -933,6 +962,14 @@ class BaseQuerySet(object): | ||||
|         queryset._ordering = queryset._get_order_by(keys) | ||||
|         return queryset | ||||
|  | ||||
|     def comment(self, text): | ||||
|         """Add a comment to the query. | ||||
|  | ||||
|         See https://docs.mongodb.com/manual/reference/method/cursor.comment/#cursor.comment | ||||
|         for details. | ||||
|         """ | ||||
|         return self._chainable_method('comment', text) | ||||
|  | ||||
|     def explain(self, format=False): | ||||
|         """Return an explain plan record for the | ||||
|         :class:`~mongoengine.queryset.QuerySet`\ 's cursor. | ||||
| @@ -940,8 +977,15 @@ class BaseQuerySet(object): | ||||
|         :param format: format the plan before returning it | ||||
|         """ | ||||
|         plan = self._cursor.explain() | ||||
|  | ||||
|         # TODO remove this option completely - it's useless. If somebody | ||||
|         # wants to pretty-print the output, they easily can. | ||||
|         if format: | ||||
|             msg = ('"format" param of BaseQuerySet.explain has been ' | ||||
|                    'deprecated and will be removed in future versions.') | ||||
|             warnings.warn(msg, DeprecationWarning) | ||||
|             plan = pprint.pformat(plan) | ||||
|  | ||||
|         return plan | ||||
|  | ||||
|     # DEPRECATED. Has no more impact on PyMongo 3+ | ||||
| @@ -954,7 +998,7 @@ class BaseQuerySet(object): | ||||
|         .. deprecated:: Ignored with PyMongo 3+ | ||||
|         """ | ||||
|         if IS_PYMONGO_3: | ||||
|             msg = "snapshot is deprecated as it has no impact when using PyMongo 3+." | ||||
|             msg = 'snapshot is deprecated as it has no impact when using PyMongo 3+.' | ||||
|             warnings.warn(msg, DeprecationWarning) | ||||
|         queryset = self.clone() | ||||
|         queryset._snapshot = enabled | ||||
| @@ -980,7 +1024,7 @@ class BaseQuerySet(object): | ||||
|         .. deprecated:: Ignored with PyMongo 3+ | ||||
|         """ | ||||
|         if IS_PYMONGO_3: | ||||
|             msg = "slave_okay is deprecated as it has no impact when using PyMongo 3+." | ||||
|             msg = 'slave_okay is deprecated as it has no impact when using PyMongo 3+.' | ||||
|             warnings.warn(msg, DeprecationWarning) | ||||
|         queryset = self.clone() | ||||
|         queryset._slave_okay = enabled | ||||
| @@ -1042,7 +1086,7 @@ class BaseQuerySet(object): | ||||
|  | ||||
|         :param ms: the number of milliseconds before killing the query on the server | ||||
|         """ | ||||
|         return self._chainable_method("max_time_ms", ms) | ||||
|         return self._chainable_method('max_time_ms', ms) | ||||
|  | ||||
|     # JSON Helpers | ||||
|  | ||||
| @@ -1125,19 +1169,19 @@ class BaseQuerySet(object): | ||||
|  | ||||
|         MapReduceDocument = _import_class('MapReduceDocument') | ||||
|  | ||||
|         if not hasattr(self._collection, "map_reduce"): | ||||
|             raise NotImplementedError("Requires MongoDB >= 1.7.1") | ||||
|         if not hasattr(self._collection, 'map_reduce'): | ||||
|             raise NotImplementedError('Requires MongoDB >= 1.7.1') | ||||
|  | ||||
|         map_f_scope = {} | ||||
|         if isinstance(map_f, Code): | ||||
|             map_f_scope = map_f.scope | ||||
|             map_f = unicode(map_f) | ||||
|             map_f = six.text_type(map_f) | ||||
|         map_f = Code(queryset._sub_js_fields(map_f), map_f_scope) | ||||
|  | ||||
|         reduce_f_scope = {} | ||||
|         if isinstance(reduce_f, Code): | ||||
|             reduce_f_scope = reduce_f.scope | ||||
|             reduce_f = unicode(reduce_f) | ||||
|             reduce_f = six.text_type(reduce_f) | ||||
|         reduce_f_code = queryset._sub_js_fields(reduce_f) | ||||
|         reduce_f = Code(reduce_f_code, reduce_f_scope) | ||||
|  | ||||
| @@ -1147,7 +1191,7 @@ class BaseQuerySet(object): | ||||
|             finalize_f_scope = {} | ||||
|             if isinstance(finalize_f, Code): | ||||
|                 finalize_f_scope = finalize_f.scope | ||||
|                 finalize_f = unicode(finalize_f) | ||||
|                 finalize_f = six.text_type(finalize_f) | ||||
|             finalize_f_code = queryset._sub_js_fields(finalize_f) | ||||
|             finalize_f = Code(finalize_f_code, finalize_f_scope) | ||||
|             mr_args['finalize'] = finalize_f | ||||
| @@ -1163,7 +1207,7 @@ class BaseQuerySet(object): | ||||
|         else: | ||||
|             map_reduce_function = 'map_reduce' | ||||
|  | ||||
|             if isinstance(output, basestring): | ||||
|             if isinstance(output, six.string_types): | ||||
|                 mr_args['out'] = output | ||||
|  | ||||
|             elif isinstance(output, dict): | ||||
| @@ -1176,7 +1220,7 @@ class BaseQuerySet(object): | ||||
|                         break | ||||
|  | ||||
|                 else: | ||||
|                     raise OperationError("actionData not specified for output") | ||||
|                     raise OperationError('actionData not specified for output') | ||||
|  | ||||
|                 db_alias = output.get('db_alias') | ||||
|                 remaing_args = ['db', 'sharded', 'nonAtomic'] | ||||
| @@ -1406,7 +1450,7 @@ class BaseQuerySet(object): | ||||
|             # snapshot is not handled at all by PyMongo 3+ | ||||
|             # TODO: evaluate similar possibilities using modifiers | ||||
|             if self._snapshot: | ||||
|                 msg = "The snapshot option is not anymore available with PyMongo 3+" | ||||
|                 msg = 'The snapshot option is not anymore available with PyMongo 3+' | ||||
|                 warnings.warn(msg, DeprecationWarning) | ||||
|             cursor_args = { | ||||
|                 'no_cursor_timeout': not self._timeout | ||||
| @@ -1418,7 +1462,7 @@ class BaseQuerySet(object): | ||||
|             if fields_name not in cursor_args: | ||||
|                 cursor_args[fields_name] = {} | ||||
|  | ||||
|             cursor_args[fields_name]['_text_score'] = {'$meta': "textScore"} | ||||
|             cursor_args[fields_name]['_text_score'] = {'$meta': 'textScore'} | ||||
|  | ||||
|         return cursor_args | ||||
|  | ||||
| @@ -1459,6 +1503,9 @@ class BaseQuerySet(object): | ||||
|             if self._hint != -1: | ||||
|                 self._cursor_obj.hint(self._hint) | ||||
|  | ||||
|             if self._batch_size is not None: | ||||
|                 self._cursor_obj.batch_size(self._batch_size) | ||||
|  | ||||
|         return self._cursor_obj | ||||
|  | ||||
|     def __deepcopy__(self, memo): | ||||
| @@ -1470,8 +1517,8 @@ class BaseQuerySet(object): | ||||
|         if self._mongo_query is None: | ||||
|             self._mongo_query = self._query_obj.to_query(self._document) | ||||
|             if self._class_check and self._initial_query: | ||||
|                 if "_cls" in self._mongo_query: | ||||
|                     self._mongo_query = {"$and": [self._initial_query, self._mongo_query]} | ||||
|                 if '_cls' in self._mongo_query: | ||||
|                     self._mongo_query = {'$and': [self._initial_query, self._mongo_query]} | ||||
|                 else: | ||||
|                     self._mongo_query.update(self._initial_query) | ||||
|         return self._mongo_query | ||||
| @@ -1483,8 +1530,7 @@ class BaseQuerySet(object): | ||||
|         return self.__dereference | ||||
|  | ||||
|     def no_dereference(self): | ||||
|         """Turn off any dereferencing for the results of this queryset. | ||||
|         """ | ||||
|         """Turn off any dereferencing for the results of this queryset.""" | ||||
|         queryset = self.clone() | ||||
|         queryset._auto_dereference = False | ||||
|         return queryset | ||||
| @@ -1513,7 +1559,7 @@ class BaseQuerySet(object): | ||||
|                     emit(null, 1); | ||||
|                 } | ||||
|             } | ||||
|         """ % dict(field=field) | ||||
|         """ % {'field': field} | ||||
|         reduce_func = """ | ||||
|             function(key, values) { | ||||
|                 var total = 0; | ||||
| @@ -1535,8 +1581,8 @@ class BaseQuerySet(object): | ||||
|  | ||||
|         if normalize: | ||||
|             count = sum(frequencies.values()) | ||||
|             frequencies = dict([(k, float(v) / count) | ||||
|                                 for k, v in frequencies.items()]) | ||||
|             frequencies = {k: float(v) / count | ||||
|                            for k, v in frequencies.items()} | ||||
|  | ||||
|         return frequencies | ||||
|  | ||||
| @@ -1588,10 +1634,10 @@ class BaseQuerySet(object): | ||||
|             } | ||||
|         """ | ||||
|         total, data, types = self.exec_js(freq_func, field) | ||||
|         values = dict([(types.get(k), int(v)) for k, v in data.iteritems()]) | ||||
|         values = {types.get(k): int(v) for k, v in data.iteritems()} | ||||
|  | ||||
|         if normalize: | ||||
|             values = dict([(k, float(v) / total) for k, v in values.items()]) | ||||
|             values = {k: float(v) / total for k, v in values.items()} | ||||
|  | ||||
|         frequencies = {} | ||||
|         for k, v in values.iteritems(): | ||||
| @@ -1613,14 +1659,14 @@ class BaseQuerySet(object): | ||||
|                           for x in document._subclasses][1:] | ||||
|         for field in fields: | ||||
|             try: | ||||
|                 field = ".".join(f.db_field for f in | ||||
|                 field = '.'.join(f.db_field for f in | ||||
|                                  document._lookup_field(field.split('.'))) | ||||
|                 ret.append(field) | ||||
|             except LookUpError, err: | ||||
|             except LookUpError as err: | ||||
|                 found = False | ||||
|                 for subdoc in subclasses: | ||||
|                     try: | ||||
|                         subfield = ".".join(f.db_field for f in | ||||
|                         subfield = '.'.join(f.db_field for f in | ||||
|                                             subdoc._lookup_field(field.split('.'))) | ||||
|                         ret.append(subfield) | ||||
|                         found = True | ||||
| @@ -1633,15 +1679,14 @@ class BaseQuerySet(object): | ||||
|         return ret | ||||
|  | ||||
|     def _get_order_by(self, keys): | ||||
|         """Creates a list of order by fields | ||||
|         """ | ||||
|         """Creates a list of order by fields""" | ||||
|         key_list = [] | ||||
|         for key in keys: | ||||
|             if not key: | ||||
|                 continue | ||||
|  | ||||
|             if key == '$text_score': | ||||
|                 key_list.append(('_text_score', {'$meta': "textScore"})) | ||||
|                 key_list.append(('_text_score', {'$meta': 'textScore'})) | ||||
|                 continue | ||||
|  | ||||
|             direction = pymongo.ASCENDING | ||||
| @@ -1713,7 +1758,7 @@ class BaseQuerySet(object): | ||||
|                     # If we need to coerce types, we need to determine the | ||||
|                     # type of this field and use the corresponding | ||||
|                     # .to_python(...) | ||||
|                     from mongoengine.fields import EmbeddedDocumentField | ||||
|                     EmbeddedDocumentField = _import_class('EmbeddedDocumentField') | ||||
|  | ||||
|                     obj = self._document | ||||
|                     for chunk in path.split('.'): | ||||
| @@ -1747,7 +1792,7 @@ class BaseQuerySet(object): | ||||
|             field_name = match.group(1).split('.') | ||||
|             fields = self._document._lookup_field(field_name) | ||||
|             # Substitute the correct name for the field into the javascript | ||||
|             return ".".join([f.db_field for f in fields]) | ||||
|             return '.'.join([f.db_field for f in fields]) | ||||
|  | ||||
|         code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) | ||||
|         code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, | ||||
| @@ -1758,21 +1803,21 @@ class BaseQuerySet(object): | ||||
|         queryset = self.clone() | ||||
|         method = getattr(queryset._cursor, method_name) | ||||
|         method(val) | ||||
|         setattr(queryset, "_" + method_name, val) | ||||
|         setattr(queryset, '_' + method_name, val) | ||||
|         return queryset | ||||
|  | ||||
|     # Deprecated | ||||
|     def ensure_index(self, **kwargs): | ||||
|         """Deprecated use :func:`Document.ensure_index`""" | ||||
|         msg = ("Doc.objects()._ensure_index() is deprecated. " | ||||
|                "Use Doc.ensure_index() instead.") | ||||
|         msg = ('Doc.objects()._ensure_index() is deprecated. ' | ||||
|                'Use Doc.ensure_index() instead.') | ||||
|         warnings.warn(msg, DeprecationWarning) | ||||
|         self._document.__class__.ensure_index(**kwargs) | ||||
|         return self | ||||
|  | ||||
|     def _ensure_indexes(self): | ||||
|         """Deprecated use :func:`~Document.ensure_indexes`""" | ||||
|         msg = ("Doc.objects()._ensure_indexes() is deprecated. " | ||||
|                "Use Doc.ensure_indexes() instead.") | ||||
|         msg = ('Doc.objects()._ensure_indexes() is deprecated. ' | ||||
|                'Use Doc.ensure_indexes() instead.') | ||||
|         warnings.warn(msg, DeprecationWarning) | ||||
|         self._document.__class__.ensure_indexes() | ||||
|   | ||||
| @@ -67,7 +67,7 @@ class QueryFieldList(object): | ||||
|         return bool(self.fields) | ||||
|  | ||||
|     def as_dict(self): | ||||
|         field_list = dict((field, self.value) for field in self.fields) | ||||
|         field_list = {field: self.value for field in self.fields} | ||||
|         if self.slice: | ||||
|             field_list.update(self.slice) | ||||
|         if self._id is not None: | ||||
|   | ||||
| @@ -27,9 +27,10 @@ class QuerySet(BaseQuerySet): | ||||
|         in batches of ``ITER_CHUNK_SIZE``. | ||||
|  | ||||
|         If ``self._has_more`` the cursor hasn't been exhausted so cache then | ||||
|         batch.  Otherwise iterate the result_cache. | ||||
|         batch. Otherwise iterate the result_cache. | ||||
|         """ | ||||
|         self._iter = True | ||||
|  | ||||
|         if self._has_more: | ||||
|             return self._iter_results() | ||||
|  | ||||
| @@ -42,40 +43,56 @@ class QuerySet(BaseQuerySet): | ||||
|         """ | ||||
|         if self._len is not None: | ||||
|             return self._len | ||||
|  | ||||
|         # Populate the result cache with *all* of the docs in the cursor | ||||
|         if self._has_more: | ||||
|             # populate the cache | ||||
|             list(self._iter_results()) | ||||
|  | ||||
|         # Cache the length of the complete result cache and return it | ||||
|         self._len = len(self._result_cache) | ||||
|         return self._len | ||||
|  | ||||
|     def __repr__(self): | ||||
|         """Provides the string representation of the QuerySet | ||||
|         """ | ||||
|         """Provide a string representation of the QuerySet""" | ||||
|         if self._iter: | ||||
|             return '.. queryset mid-iteration ..' | ||||
|  | ||||
|         self._populate_cache() | ||||
|         data = self._result_cache[:REPR_OUTPUT_SIZE + 1] | ||||
|         if len(data) > REPR_OUTPUT_SIZE: | ||||
|             data[-1] = "...(remaining elements truncated)..." | ||||
|             data[-1] = '...(remaining elements truncated)...' | ||||
|         return repr(data) | ||||
|  | ||||
|     def _iter_results(self): | ||||
|         """A generator for iterating over the result cache. | ||||
|  | ||||
|         Also populates the cache if there are more possible results to yield. | ||||
|         Raises StopIteration when there are no more results""" | ||||
|         Also populates the cache if there are more possible results to | ||||
|         yield. Raises StopIteration when there are no more results. | ||||
|         """ | ||||
|         if self._result_cache is None: | ||||
|             self._result_cache = [] | ||||
|  | ||||
|         pos = 0 | ||||
|         while True: | ||||
|             upper = len(self._result_cache) | ||||
|             while pos < upper: | ||||
|  | ||||
|             # For all positions lower than the length of the current result | ||||
|             # cache, serve the docs straight from the cache w/o hitting the | ||||
|             # database. | ||||
|             # XXX it's VERY important to compute the len within the `while` | ||||
|             # condition because the result cache might expand mid-iteration | ||||
|             # (e.g. if we call len(qs) inside a loop that iterates over the | ||||
|             # queryset). Fortunately len(list) is O(1) in Python, so this | ||||
|             # doesn't cause performance issues. | ||||
|             while pos < len(self._result_cache): | ||||
|                 yield self._result_cache[pos] | ||||
|                 pos += 1 | ||||
|  | ||||
|             # Raise StopIteration if we already established there were no more | ||||
|             # docs in the db cursor. | ||||
|             if not self._has_more: | ||||
|                 raise StopIteration | ||||
|  | ||||
|             # Otherwise, populate more of the cache and repeat. | ||||
|             if len(self._result_cache) <= pos: | ||||
|                 self._populate_cache() | ||||
|  | ||||
| @@ -86,12 +103,22 @@ class QuerySet(BaseQuerySet): | ||||
|         """ | ||||
|         if self._result_cache is None: | ||||
|             self._result_cache = [] | ||||
|         if self._has_more: | ||||
|             try: | ||||
|                 for i in xrange(ITER_CHUNK_SIZE): | ||||
|                     self._result_cache.append(self.next()) | ||||
|             except StopIteration: | ||||
|                 self._has_more = False | ||||
|  | ||||
|         # Skip populating the cache if we already established there are no | ||||
|         # more docs to pull from the database. | ||||
|         if not self._has_more: | ||||
|             return | ||||
|  | ||||
|         # Pull in ITER_CHUNK_SIZE docs from the database and store them in | ||||
|         # the result cache. | ||||
|         try: | ||||
|             for _ in xrange(ITER_CHUNK_SIZE): | ||||
|                 self._result_cache.append(self.next()) | ||||
|         except StopIteration: | ||||
|             # Getting this exception means there are no more docs in the | ||||
|             # db cursor. Set _has_more to False so that we can use that | ||||
|             # information in other places. | ||||
|             self._has_more = False | ||||
|  | ||||
|     def count(self, with_limit_and_skip=False): | ||||
|         """Count the selected elements in the query. | ||||
| @@ -114,7 +141,7 @@ class QuerySet(BaseQuerySet): | ||||
|         .. versionadded:: 0.8.3 Convert to non caching queryset | ||||
|         """ | ||||
|         if self._result_cache is not None: | ||||
|             raise OperationError("QuerySet already cached") | ||||
|             raise OperationError('QuerySet already cached') | ||||
|         return self.clone_into(QuerySetNoCache(self._document, self._collection)) | ||||
|  | ||||
|  | ||||
| @@ -137,13 +164,14 @@ class QuerySetNoCache(BaseQuerySet): | ||||
|             return '.. queryset mid-iteration ..' | ||||
|  | ||||
|         data = [] | ||||
|         for i in xrange(REPR_OUTPUT_SIZE + 1): | ||||
|         for _ in xrange(REPR_OUTPUT_SIZE + 1): | ||||
|             try: | ||||
|                 data.append(self.next()) | ||||
|             except StopIteration: | ||||
|                 break | ||||
|  | ||||
|         if len(data) > REPR_OUTPUT_SIZE: | ||||
|             data[-1] = "...(remaining elements truncated)..." | ||||
|             data[-1] = '...(remaining elements truncated)...' | ||||
|  | ||||
|         self.rewind() | ||||
|         return repr(data) | ||||
|   | ||||
| @@ -1,9 +1,11 @@ | ||||
| from collections import defaultdict | ||||
|  | ||||
| from bson import SON | ||||
| from bson import ObjectId, SON | ||||
| from bson.dbref import DBRef | ||||
| import pymongo | ||||
| import six | ||||
|  | ||||
| from mongoengine.base.fields import UPDATE_OPERATORS | ||||
| from mongoengine.base import UPDATE_OPERATORS | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.connection import get_connection | ||||
| from mongoengine.errors import InvalidQueryError | ||||
| @@ -26,13 +28,13 @@ MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS + | ||||
|                    STRING_OPERATORS + CUSTOM_OPERATORS) | ||||
|  | ||||
|  | ||||
| # TODO make this less complex | ||||
| def query(_doc_cls=None, **kwargs): | ||||
|     """Transform a query from Django-style format to Mongo format. | ||||
|     """ | ||||
|     """Transform a query from Django-style format to Mongo format.""" | ||||
|     mongo_query = {} | ||||
|     merge_query = defaultdict(list) | ||||
|     for key, value in sorted(kwargs.items()): | ||||
|         if key == "__raw__": | ||||
|         if key == '__raw__': | ||||
|             mongo_query.update(value) | ||||
|             continue | ||||
|  | ||||
| @@ -45,7 +47,7 @@ def query(_doc_cls=None, **kwargs): | ||||
|             op = parts.pop() | ||||
|  | ||||
|         # Allow to escape operator-like field name by __ | ||||
|         if len(parts) > 1 and parts[-1] == "": | ||||
|         if len(parts) > 1 and parts[-1] == '': | ||||
|             parts.pop() | ||||
|  | ||||
|         negate = False | ||||
| @@ -57,16 +59,17 @@ def query(_doc_cls=None, **kwargs): | ||||
|             # Switch field names to proper names [set in Field(name='foo')] | ||||
|             try: | ||||
|                 fields = _doc_cls._lookup_field(parts) | ||||
|             except Exception, e: | ||||
|             except Exception as e: | ||||
|                 raise InvalidQueryError(e) | ||||
|             parts = [] | ||||
|  | ||||
|             CachedReferenceField = _import_class('CachedReferenceField') | ||||
|             GenericReferenceField = _import_class('GenericReferenceField') | ||||
|  | ||||
|             cleaned_fields = [] | ||||
|             for field in fields: | ||||
|                 append_field = True | ||||
|                 if isinstance(field, basestring): | ||||
|                 if isinstance(field, six.string_types): | ||||
|                     parts.append(field) | ||||
|                     append_field = False | ||||
|                 # is last and CachedReferenceField | ||||
| @@ -84,9 +87,9 @@ def query(_doc_cls=None, **kwargs): | ||||
|             singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] | ||||
|             singular_ops += STRING_OPERATORS | ||||
|             if op in singular_ops: | ||||
|                 if isinstance(field, basestring): | ||||
|                 if isinstance(field, six.string_types): | ||||
|                     if (op in STRING_OPERATORS and | ||||
|                             isinstance(value, basestring)): | ||||
|                             isinstance(value, six.string_types)): | ||||
|                         StringField = _import_class('StringField') | ||||
|                         value = StringField.prepare_query_value(op, value) | ||||
|                     else: | ||||
| @@ -101,6 +104,16 @@ def query(_doc_cls=None, **kwargs): | ||||
|                 # 'in', 'nin' and 'all' require a list of values | ||||
|                 value = [field.prepare_query_value(op, v) for v in value] | ||||
|  | ||||
|             # If we're querying a GenericReferenceField, we need to alter the | ||||
|             # key depending on the value: | ||||
|             # * If the value is a DBRef, the key should be "field_name._ref". | ||||
|             # * If the value is an ObjectId, the key should be "field_name._ref.$id". | ||||
|             if isinstance(field, GenericReferenceField): | ||||
|                 if isinstance(value, DBRef): | ||||
|                     parts[-1] += '._ref' | ||||
|                 elif isinstance(value, ObjectId): | ||||
|                     parts[-1] += '._ref.$id' | ||||
|  | ||||
|         # if op and op not in COMPARISON_OPERATORS: | ||||
|         if op: | ||||
|             if op in GEO_OPERATORS: | ||||
| @@ -116,10 +129,10 @@ def query(_doc_cls=None, **kwargs): | ||||
|                     value = query(field.field.document_type, **value) | ||||
|                 else: | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|                 value = {"$elemMatch": value} | ||||
|                 value = {'$elemMatch': value} | ||||
|             elif op in CUSTOM_OPERATORS: | ||||
|                 NotImplementedError("Custom method '%s' has not " | ||||
|                                     "been implemented" % op) | ||||
|                 NotImplementedError('Custom method "%s" has not ' | ||||
|                                     'been implemented' % op) | ||||
|             elif op not in STRING_OPERATORS: | ||||
|                 value = {'$' + op: value} | ||||
|  | ||||
| @@ -128,11 +141,13 @@ def query(_doc_cls=None, **kwargs): | ||||
|  | ||||
|         for i, part in indices: | ||||
|             parts.insert(i, part) | ||||
|  | ||||
|         key = '.'.join(parts) | ||||
|  | ||||
|         if op is None or key not in mongo_query: | ||||
|             mongo_query[key] = value | ||||
|         elif key in mongo_query: | ||||
|             if key in mongo_query and isinstance(mongo_query[key], dict): | ||||
|             if isinstance(mongo_query[key], dict): | ||||
|                 mongo_query[key].update(value) | ||||
|                 # $max/minDistance needs to come last - convert to SON | ||||
|                 value_dict = mongo_query[key] | ||||
| @@ -182,15 +197,16 @@ def query(_doc_cls=None, **kwargs): | ||||
|  | ||||
|  | ||||
| def update(_doc_cls=None, **update): | ||||
|     """Transform an update spec from Django-style format to Mongo format. | ||||
|     """Transform an update spec from Django-style format to Mongo | ||||
|     format. | ||||
|     """ | ||||
|     mongo_update = {} | ||||
|     for key, value in update.items(): | ||||
|         if key == "__raw__": | ||||
|         if key == '__raw__': | ||||
|             mongo_update.update(value) | ||||
|             continue | ||||
|         parts = key.split('__') | ||||
|         # if there is no operator, default to "set" | ||||
|         # if there is no operator, default to 'set' | ||||
|         if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: | ||||
|             parts.insert(0, 'set') | ||||
|         # Check for an operator and transform to mongo-style if there is | ||||
| @@ -209,21 +225,21 @@ def update(_doc_cls=None, **update): | ||||
|             elif op == 'add_to_set': | ||||
|                 op = 'addToSet' | ||||
|             elif op == 'set_on_insert': | ||||
|                 op = "setOnInsert" | ||||
|                 op = 'setOnInsert' | ||||
|  | ||||
|         match = None | ||||
|         if parts[-1] in COMPARISON_OPERATORS: | ||||
|             match = parts.pop() | ||||
|  | ||||
|         # Allow to escape operator-like field name by __ | ||||
|         if len(parts) > 1 and parts[-1] == "": | ||||
|         if len(parts) > 1 and parts[-1] == '': | ||||
|             parts.pop() | ||||
|  | ||||
|         if _doc_cls: | ||||
|             # Switch field names to proper names [set in Field(name='foo')] | ||||
|             try: | ||||
|                 fields = _doc_cls._lookup_field(parts) | ||||
|             except Exception, e: | ||||
|             except Exception as e: | ||||
|                 raise InvalidQueryError(e) | ||||
|             parts = [] | ||||
|  | ||||
| @@ -231,7 +247,7 @@ def update(_doc_cls=None, **update): | ||||
|             appended_sub_field = False | ||||
|             for field in fields: | ||||
|                 append_field = True | ||||
|                 if isinstance(field, basestring): | ||||
|                 if isinstance(field, six.string_types): | ||||
|                     # Convert the S operator to $ | ||||
|                     if field == 'S': | ||||
|                         field = '$' | ||||
| @@ -252,7 +268,7 @@ def update(_doc_cls=None, **update): | ||||
|             else: | ||||
|                 field = cleaned_fields[-1] | ||||
|  | ||||
|             GeoJsonBaseField = _import_class("GeoJsonBaseField") | ||||
|             GeoJsonBaseField = _import_class('GeoJsonBaseField') | ||||
|             if isinstance(field, GeoJsonBaseField): | ||||
|                 value = field.to_mongo(value) | ||||
|  | ||||
| @@ -266,7 +282,7 @@ def update(_doc_cls=None, **update): | ||||
|                     value = [field.prepare_query_value(op, v) for v in value] | ||||
|                 elif field.required or value is not None: | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|             elif op == "unset": | ||||
|             elif op == 'unset': | ||||
|                 value = 1 | ||||
|  | ||||
|         if match: | ||||
| @@ -276,16 +292,16 @@ def update(_doc_cls=None, **update): | ||||
|         key = '.'.join(parts) | ||||
|  | ||||
|         if not op: | ||||
|             raise InvalidQueryError("Updates must supply an operation " | ||||
|                                     "eg: set__FIELD=value") | ||||
|             raise InvalidQueryError('Updates must supply an operation ' | ||||
|                                     'eg: set__FIELD=value') | ||||
|  | ||||
|         if 'pull' in op and '.' in key: | ||||
|             # Dot operators don't work on pull operations | ||||
|             # unless they point to a list field | ||||
|             # Otherwise it uses nested dict syntax | ||||
|             if op == 'pullAll': | ||||
|                 raise InvalidQueryError("pullAll operations only support " | ||||
|                                         "a single field depth") | ||||
|                 raise InvalidQueryError('pullAll operations only support ' | ||||
|                                         'a single field depth') | ||||
|  | ||||
|             # Look for the last list field and use dot notation until there | ||||
|             field_classes = [c.__class__ for c in cleaned_fields] | ||||
| @@ -296,7 +312,7 @@ def update(_doc_cls=None, **update): | ||||
|                 # Then process as normal | ||||
|                 last_listField = len( | ||||
|                     cleaned_fields) - field_classes.index(ListField) | ||||
|                 key = ".".join(parts[:last_listField]) | ||||
|                 key = '.'.join(parts[:last_listField]) | ||||
|                 parts = parts[last_listField:] | ||||
|                 parts.insert(0, key) | ||||
|  | ||||
| @@ -304,7 +320,7 @@ def update(_doc_cls=None, **update): | ||||
|             for key in parts: | ||||
|                 value = {key: value} | ||||
|         elif op == 'addToSet' and isinstance(value, list): | ||||
|             value = {key: {"$each": value}} | ||||
|             value = {key: {'$each': value}} | ||||
|         else: | ||||
|             value = {key: value} | ||||
|         key = '$' + op | ||||
| @@ -318,78 +334,82 @@ def update(_doc_cls=None, **update): | ||||
|  | ||||
|  | ||||
| def _geo_operator(field, op, value): | ||||
|     """Helper to return the query for a given geo query""" | ||||
|     if op == "max_distance": | ||||
|     """Helper to return the query for a given geo query.""" | ||||
|     if op == 'max_distance': | ||||
|         value = {'$maxDistance': value} | ||||
|     elif op == "min_distance": | ||||
|     elif op == 'min_distance': | ||||
|         value = {'$minDistance': value} | ||||
|     elif field._geo_index == pymongo.GEO2D: | ||||
|         if op == "within_distance": | ||||
|         if op == 'within_distance': | ||||
|             value = {'$within': {'$center': value}} | ||||
|         elif op == "within_spherical_distance": | ||||
|         elif op == 'within_spherical_distance': | ||||
|             value = {'$within': {'$centerSphere': value}} | ||||
|         elif op == "within_polygon": | ||||
|         elif op == 'within_polygon': | ||||
|             value = {'$within': {'$polygon': value}} | ||||
|         elif op == "near": | ||||
|         elif op == 'near': | ||||
|             value = {'$near': value} | ||||
|         elif op == "near_sphere": | ||||
|         elif op == 'near_sphere': | ||||
|             value = {'$nearSphere': value} | ||||
|         elif op == 'within_box': | ||||
|             value = {'$within': {'$box': value}} | ||||
|         else: | ||||
|             raise NotImplementedError("Geo method '%s' has not " | ||||
|                                       "been implemented for a GeoPointField" % op) | ||||
|             raise NotImplementedError('Geo method "%s" has not been ' | ||||
|                                       'implemented for a GeoPointField' % op) | ||||
|     else: | ||||
|         if op == "geo_within": | ||||
|             value = {"$geoWithin": _infer_geometry(value)} | ||||
|         elif op == "geo_within_box": | ||||
|             value = {"$geoWithin": {"$box": value}} | ||||
|         elif op == "geo_within_polygon": | ||||
|             value = {"$geoWithin": {"$polygon": value}} | ||||
|         elif op == "geo_within_center": | ||||
|             value = {"$geoWithin": {"$center": value}} | ||||
|         elif op == "geo_within_sphere": | ||||
|             value = {"$geoWithin": {"$centerSphere": value}} | ||||
|         elif op == "geo_intersects": | ||||
|             value = {"$geoIntersects": _infer_geometry(value)} | ||||
|         elif op == "near": | ||||
|         if op == 'geo_within': | ||||
|             value = {'$geoWithin': _infer_geometry(value)} | ||||
|         elif op == 'geo_within_box': | ||||
|             value = {'$geoWithin': {'$box': value}} | ||||
|         elif op == 'geo_within_polygon': | ||||
|             value = {'$geoWithin': {'$polygon': value}} | ||||
|         elif op == 'geo_within_center': | ||||
|             value = {'$geoWithin': {'$center': value}} | ||||
|         elif op == 'geo_within_sphere': | ||||
|             value = {'$geoWithin': {'$centerSphere': value}} | ||||
|         elif op == 'geo_intersects': | ||||
|             value = {'$geoIntersects': _infer_geometry(value)} | ||||
|         elif op == 'near': | ||||
|             value = {'$near': _infer_geometry(value)} | ||||
|         else: | ||||
|             raise NotImplementedError("Geo method '%s' has not " | ||||
|                                       "been implemented for a %s " % (op, field._name)) | ||||
|             raise NotImplementedError( | ||||
|                 'Geo method "%s" has not been implemented for a %s ' | ||||
|                 % (op, field._name) | ||||
|             ) | ||||
|     return value | ||||
|  | ||||
|  | ||||
| def _infer_geometry(value): | ||||
|     """Helper method that tries to infer the $geometry shape for a given value""" | ||||
|     """Helper method that tries to infer the $geometry shape for a | ||||
|     given value. | ||||
|     """ | ||||
|     if isinstance(value, dict): | ||||
|         if "$geometry" in value: | ||||
|         if '$geometry' in value: | ||||
|             return value | ||||
|         elif 'coordinates' in value and 'type' in value: | ||||
|             return {"$geometry": value} | ||||
|         raise InvalidQueryError("Invalid $geometry dictionary should have " | ||||
|                                 "type and coordinates keys") | ||||
|             return {'$geometry': value} | ||||
|         raise InvalidQueryError('Invalid $geometry dictionary should have ' | ||||
|                                 'type and coordinates keys') | ||||
|     elif isinstance(value, (list, set)): | ||||
|         # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? | ||||
|         # TODO: should both TypeError and IndexError be alike interpreted? | ||||
|  | ||||
|         try: | ||||
|             value[0][0][0] | ||||
|             return {"$geometry": {"type": "Polygon", "coordinates": value}} | ||||
|             return {'$geometry': {'type': 'Polygon', 'coordinates': value}} | ||||
|         except (TypeError, IndexError): | ||||
|             pass | ||||
|  | ||||
|         try: | ||||
|             value[0][0] | ||||
|             return {"$geometry": {"type": "LineString", "coordinates": value}} | ||||
|             return {'$geometry': {'type': 'LineString', 'coordinates': value}} | ||||
|         except (TypeError, IndexError): | ||||
|             pass | ||||
|  | ||||
|         try: | ||||
|             value[0] | ||||
|             return {"$geometry": {"type": "Point", "coordinates": value}} | ||||
|             return {'$geometry': {'type': 'Point', 'coordinates': value}} | ||||
|         except (TypeError, IndexError): | ||||
|             pass | ||||
|  | ||||
|     raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary " | ||||
|                             "or (nested) lists of coordinate(s)") | ||||
|     raise InvalidQueryError('Invalid $geometry data. Can be either a ' | ||||
|                             'dictionary or (nested) lists of coordinate(s)') | ||||
|   | ||||
| @@ -69,9 +69,9 @@ class QueryCompilerVisitor(QNodeVisitor): | ||||
|         self.document = document | ||||
|  | ||||
|     def visit_combination(self, combination): | ||||
|         operator = "$and" | ||||
|         operator = '$and' | ||||
|         if combination.operation == combination.OR: | ||||
|             operator = "$or" | ||||
|             operator = '$or' | ||||
|         return {operator: combination.children} | ||||
|  | ||||
|     def visit_query(self, query): | ||||
| @@ -79,8 +79,7 @@ class QueryCompilerVisitor(QNodeVisitor): | ||||
|  | ||||
|  | ||||
| class QNode(object): | ||||
|     """Base class for nodes in query trees. | ||||
|     """ | ||||
|     """Base class for nodes in query trees.""" | ||||
|  | ||||
|     AND = 0 | ||||
|     OR = 1 | ||||
| @@ -94,7 +93,8 @@ class QNode(object): | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def _combine(self, other, operation): | ||||
|         """Combine this node with another node into a QCombination object. | ||||
|         """Combine this node with another node into a QCombination | ||||
|         object. | ||||
|         """ | ||||
|         if getattr(other, 'empty', True): | ||||
|             return self | ||||
| @@ -116,8 +116,8 @@ class QNode(object): | ||||
|  | ||||
|  | ||||
| class QCombination(QNode): | ||||
|     """Represents the combination of several conditions by a given logical | ||||
|     operator. | ||||
|     """Represents the combination of several conditions by a given | ||||
|     logical operator. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, operation, children): | ||||
|   | ||||
| @@ -1,7 +1,5 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| __all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation', | ||||
|            'post_save', 'pre_delete', 'post_delete'] | ||||
| __all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation', | ||||
|            'post_save', 'pre_delete', 'post_delete') | ||||
|  | ||||
| signals_available = False | ||||
| try: | ||||
| @@ -34,6 +32,7 @@ except ImportError: | ||||
|             temporarily_connected_to = _fail | ||||
|         del _fail | ||||
|  | ||||
|  | ||||
| # the namespace for code signals.  If you are not mongoengine code, do | ||||
| # not put signals in here.  Create your own namespace instead. | ||||
| _signals = Namespace() | ||||
|   | ||||
							
								
								
									
										14
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								setup.cfg
									
									
									
									
									
								
							| @@ -1,13 +1,11 @@ | ||||
| [nosetests] | ||||
| verbosity = 2 | ||||
| detailed-errors = 1 | ||||
| cover-erase = 1 | ||||
| cover-branches = 1 | ||||
| cover-package = mongoengine | ||||
| tests = tests | ||||
| verbosity=2 | ||||
| detailed-errors=1 | ||||
| tests=tests | ||||
| cover-package=mongoengine | ||||
|  | ||||
| [flake8] | ||||
| ignore=E501,F401,F403,F405,I201 | ||||
| exclude=build,dist,docs,venv,.tox,.eggs,tests | ||||
| max-complexity=42 | ||||
| exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests | ||||
| max-complexity=45 | ||||
| application-import-names=mongoengine,tests | ||||
|   | ||||
							
								
								
									
										25
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										25
									
								
								setup.py
									
									
									
									
									
								
							| @@ -21,8 +21,9 @@ except Exception: | ||||
|  | ||||
|  | ||||
| def get_version(version_tuple): | ||||
|     if not isinstance(version_tuple[-1], int): | ||||
|         return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1] | ||||
|     """Return the version tuple as a string, e.g. for (0, 10, 7), | ||||
|     return '0.10.7'. | ||||
|     """ | ||||
|     return '.'.join(map(str, version_tuple)) | ||||
|  | ||||
|  | ||||
| @@ -41,31 +42,29 @@ CLASSIFIERS = [ | ||||
|     'Operating System :: OS Independent', | ||||
|     'Programming Language :: Python', | ||||
|     "Programming Language :: Python :: 2", | ||||
|     "Programming Language :: Python :: 2.6", | ||||
|     "Programming Language :: Python :: 2.7", | ||||
|     "Programming Language :: Python :: 3", | ||||
|     "Programming Language :: Python :: 3.2", | ||||
|     "Programming Language :: Python :: 3.3", | ||||
|     "Programming Language :: Python :: 3.4", | ||||
|     "Programming Language :: Python :: 3.5", | ||||
|     "Programming Language :: Python :: Implementation :: CPython", | ||||
|     "Programming Language :: Python :: Implementation :: PyPy", | ||||
|     'Topic :: Database', | ||||
|     'Topic :: Software Development :: Libraries :: Python Modules', | ||||
| ] | ||||
|  | ||||
| extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])} | ||||
| extra_opts = { | ||||
|     'packages': find_packages(exclude=['tests', 'tests.*']), | ||||
|     'tests_require': ['nose', 'coverage==4.2', 'blinker', 'Pillow>=2.0.0'] | ||||
| } | ||||
| if sys.version_info[0] == 3: | ||||
|     extra_opts['use_2to3'] = True | ||||
|     extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0'] | ||||
|     if "test" in sys.argv or "nosetests" in sys.argv: | ||||
|     if 'test' in sys.argv or 'nosetests' in sys.argv: | ||||
|         extra_opts['packages'] = find_packages() | ||||
|         extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} | ||||
|         extra_opts['package_data'] = { | ||||
|             'tests': ['fields/mongoengine.png', 'fields/mongodb_leaf.png']} | ||||
| else: | ||||
|     # coverage 4 does not support Python 3.2 anymore | ||||
|     extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0', 'python-dateutil'] | ||||
|  | ||||
|     if sys.version_info[0] == 2 and sys.version_info[1] == 6: | ||||
|         extra_opts['tests_require'].append('unittest2') | ||||
|     extra_opts['tests_require'] += ['python-dateutil'] | ||||
|  | ||||
| setup( | ||||
|     name='mongoengine', | ||||
|   | ||||
| @@ -2,4 +2,3 @@ from all_warnings import AllWarnings | ||||
| from document import * | ||||
| from queryset import * | ||||
| from fields import * | ||||
| from migration import * | ||||
|   | ||||
| @@ -3,8 +3,6 @@ This test has been put into a module.  This is because it tests warnings that | ||||
| only get triggered on first hit.  This way we can ensure its imported into the | ||||
| top level and called first by the test suite. | ||||
| """ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
| import warnings | ||||
|  | ||||
|   | ||||
| @@ -1,5 +1,3 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from class_methods import * | ||||
|   | ||||
| @@ -1,6 +1,4 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
|   | ||||
| @@ -1,6 +1,4 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from bson import SON | ||||
|   | ||||
| @@ -1,6 +1,4 @@ | ||||
| import unittest | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| @@ -143,11 +141,9 @@ class DynamicTest(unittest.TestCase): | ||||
|  | ||||
|     def test_three_level_complex_data_lookups(self): | ||||
|         """Ensure you can query three level document dynamic fields""" | ||||
|         p = self.Person() | ||||
|         p.misc = {'hello': {'hello2': 'world'}} | ||||
|         p.save() | ||||
|         # from pprint import pprint as pp; import pdb; pdb.set_trace(); | ||||
|         print self.Person.objects(misc__hello__hello2='world') | ||||
|         p = self.Person.objects.create( | ||||
|             misc={'hello': {'hello2': 'world'}} | ||||
|         ) | ||||
|         self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count()) | ||||
|  | ||||
|     def test_complex_embedded_document_validation(self): | ||||
|   | ||||
| @@ -2,10 +2,8 @@ | ||||
| import unittest | ||||
| import sys | ||||
|  | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import pymongo | ||||
| from random import randint | ||||
|  | ||||
| from nose.plugins.skip import SkipTest | ||||
| from datetime import datetime | ||||
| @@ -17,11 +15,9 @@ __all__ = ("IndexesTest", ) | ||||
|  | ||||
|  | ||||
| class IndexesTest(unittest.TestCase): | ||||
|     _MAX_RAND = 10 ** 10 | ||||
|  | ||||
|     def setUp(self): | ||||
|         self.db_name = 'mongoenginetest_IndexesTest_' + str(randint(0, self._MAX_RAND)) | ||||
|         self.connection = connect(db=self.db_name) | ||||
|         self.connection = connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|         class Person(Document): | ||||
| @@ -560,8 +556,8 @@ class IndexesTest(unittest.TestCase): | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         for i in xrange(0, 10): | ||||
|             tags = [("tag %i" % n) for n in xrange(0, i % 2)] | ||||
|         for i in range(0, 10): | ||||
|             tags = [("tag %i" % n) for n in range(0, i % 2)] | ||||
|             BlogPost(tags=tags).save() | ||||
|  | ||||
|         self.assertEqual(BlogPost.objects.count(), 10) | ||||
|   | ||||
| @@ -1,6 +1,4 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
| import warnings | ||||
|  | ||||
| @@ -253,19 +251,17 @@ class InheritanceTest(unittest.TestCase): | ||||
|         self.assertEqual(classes, [Human]) | ||||
|  | ||||
|     def test_allow_inheritance(self): | ||||
|         """Ensure that inheritance may be disabled on simple classes and that | ||||
|         _cls and _subclasses will not be used. | ||||
|         """Ensure that inheritance is disabled by default on simple | ||||
|         classes and that _cls will not be used. | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         def create_dog_class(): | ||||
|         # can't inherit because Animal didn't explicitly allow inheritance | ||||
|         with self.assertRaises(ValueError): | ||||
|             class Dog(Animal): | ||||
|                 pass | ||||
|  | ||||
|         self.assertRaises(ValueError, create_dog_class) | ||||
|  | ||||
|         # Check that _cls etc aren't present on simple documents | ||||
|         dog = Animal(name='dog').save() | ||||
|         self.assertEqual(dog.to_mongo().keys(), ['_id', 'name']) | ||||
| @@ -275,17 +271,15 @@ class InheritanceTest(unittest.TestCase): | ||||
|         self.assertFalse('_cls' in obj) | ||||
|  | ||||
|     def test_cant_turn_off_inheritance_on_subclass(self): | ||||
|         """Ensure if inheritance is on in a subclass you cant turn it off | ||||
|         """Ensure if inheritance is on in a subclass you cant turn it off. | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         def create_mammal_class(): | ||||
|         with self.assertRaises(ValueError): | ||||
|             class Mammal(Animal): | ||||
|                 meta = {'allow_inheritance': False} | ||||
|         self.assertRaises(ValueError, create_mammal_class) | ||||
|  | ||||
|     def test_allow_inheritance_abstract_document(self): | ||||
|         """Ensure that abstract documents can set inheritance rules and that | ||||
| @@ -298,10 +292,9 @@ class InheritanceTest(unittest.TestCase): | ||||
|         class Animal(FinalDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         def create_mammal_class(): | ||||
|         with self.assertRaises(ValueError): | ||||
|             class Mammal(Animal): | ||||
|                 pass | ||||
|         self.assertRaises(ValueError, create_mammal_class) | ||||
|  | ||||
|         # Check that _cls isn't present in simple documents | ||||
|         doc = Animal(name='dog') | ||||
| @@ -360,29 +353,26 @@ class InheritanceTest(unittest.TestCase): | ||||
|         self.assertEqual(berlin.pk, berlin.auto_id_0) | ||||
|  | ||||
|     def test_abstract_document_creation_does_not_fail(self): | ||||
|  | ||||
|         class City(Document): | ||||
|             continent = StringField() | ||||
|             meta = {'abstract': True, | ||||
|                     'allow_inheritance': False} | ||||
|  | ||||
|         bkk = City(continent='asia') | ||||
|         self.assertEqual(None, bkk.pk) | ||||
|         # TODO: expected error? Shouldn't we create a new error type? | ||||
|         self.assertRaises(KeyError, lambda: setattr(bkk, 'pk', 1)) | ||||
|         with self.assertRaises(KeyError): | ||||
|             setattr(bkk, 'pk', 1) | ||||
|  | ||||
|     def test_allow_inheritance_embedded_document(self): | ||||
|         """Ensure embedded documents respect inheritance | ||||
|         """ | ||||
|  | ||||
|         """Ensure embedded documents respect inheritance.""" | ||||
|         class Comment(EmbeddedDocument): | ||||
|             content = StringField() | ||||
|  | ||||
|         def create_special_comment(): | ||||
|         with self.assertRaises(ValueError): | ||||
|             class SpecialComment(Comment): | ||||
|                 pass | ||||
|  | ||||
|         self.assertRaises(ValueError, create_special_comment) | ||||
|  | ||||
|         doc = Comment(content='test') | ||||
|         self.assertFalse('_cls' in doc.to_mongo()) | ||||
|  | ||||
| @@ -454,11 +444,11 @@ class InheritanceTest(unittest.TestCase): | ||||
|         self.assertEqual(Guppy._get_collection_name(), 'fish') | ||||
|         self.assertEqual(Human._get_collection_name(), 'human') | ||||
|  | ||||
|         def create_bad_abstract(): | ||||
|         # ensure that a subclass of a non-abstract class can't be abstract | ||||
|         with self.assertRaises(ValueError): | ||||
|             class EvilHuman(Human): | ||||
|                 evil = BooleanField(default=True) | ||||
|                 meta = {'abstract': True} | ||||
|         self.assertRaises(ValueError, create_bad_abstract) | ||||
|  | ||||
|     def test_abstract_embedded_documents(self): | ||||
|         # 789: EmbeddedDocument shouldn't inherit abstract | ||||
|   | ||||
| @@ -1,7 +1,4 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import bson | ||||
| import os | ||||
| import pickle | ||||
| @@ -16,12 +13,12 @@ from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest, | ||||
|                             PickleDynamicEmbedded, PickleDynamicTest) | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.base import get_document, _document_registry | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.errors import (NotRegistered, InvalidDocumentError, | ||||
|                                 InvalidQueryError, NotUniqueError, | ||||
|                                 FieldDoesNotExist, SaveConditionError) | ||||
| from mongoengine.queryset import NULLIFY, Q | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.base import get_document | ||||
| from mongoengine.context_managers import switch_db, query_counter | ||||
| from mongoengine import signals | ||||
|  | ||||
| @@ -102,21 +99,18 @@ class InstanceTest(unittest.TestCase): | ||||
|         self.assertEqual(options['size'], 4096) | ||||
|  | ||||
|         # Check that the document cannot be redefined with different options | ||||
|         def recreate_log_document(): | ||||
|             class Log(Document): | ||||
|                 date = DateTimeField(default=datetime.now) | ||||
|                 meta = { | ||||
|                     'max_documents': 11, | ||||
|                 } | ||||
|             # Create the collection by accessing Document.objects | ||||
|             Log.objects | ||||
|         self.assertRaises(InvalidCollectionError, recreate_log_document) | ||||
|         class Log(Document): | ||||
|             date = DateTimeField(default=datetime.now) | ||||
|             meta = { | ||||
|                 'max_documents': 11, | ||||
|             } | ||||
|  | ||||
|         Log.drop_collection() | ||||
|         # Accessing Document.objects creates the collection | ||||
|         with self.assertRaises(InvalidCollectionError): | ||||
|             Log.objects | ||||
|  | ||||
|     def test_capped_collection_default(self): | ||||
|         """Ensure that capped collections defaults work properly. | ||||
|         """ | ||||
|         """Ensure that capped collections defaults work properly.""" | ||||
|         class Log(Document): | ||||
|             date = DateTimeField(default=datetime.now) | ||||
|             meta = { | ||||
| @@ -134,16 +128,14 @@ class InstanceTest(unittest.TestCase): | ||||
|         self.assertEqual(options['size'], 10 * 2**20) | ||||
|  | ||||
|         # Check that the document with default value can be recreated | ||||
|         def recreate_log_document(): | ||||
|             class Log(Document): | ||||
|                 date = DateTimeField(default=datetime.now) | ||||
|                 meta = { | ||||
|                     'max_documents': 10, | ||||
|                 } | ||||
|             # Create the collection by accessing Document.objects | ||||
|             Log.objects | ||||
|         recreate_log_document() | ||||
|         Log.drop_collection() | ||||
|         class Log(Document): | ||||
|             date = DateTimeField(default=datetime.now) | ||||
|             meta = { | ||||
|                 'max_documents': 10, | ||||
|             } | ||||
|  | ||||
|         # Create the collection by accessing Document.objects | ||||
|         Log.objects | ||||
|  | ||||
|     def test_capped_collection_no_max_size_problems(self): | ||||
|         """Ensure that capped collections with odd max_size work properly. | ||||
| @@ -166,16 +158,14 @@ class InstanceTest(unittest.TestCase): | ||||
|         self.assertTrue(options['size'] >= 10000) | ||||
|  | ||||
|         # Check that the document with odd max_size value can be recreated | ||||
|         def recreate_log_document(): | ||||
|             class Log(Document): | ||||
|                 date = DateTimeField(default=datetime.now) | ||||
|                 meta = { | ||||
|                     'max_size': 10000, | ||||
|                 } | ||||
|             # Create the collection by accessing Document.objects | ||||
|             Log.objects | ||||
|         recreate_log_document() | ||||
|         Log.drop_collection() | ||||
|         class Log(Document): | ||||
|             date = DateTimeField(default=datetime.now) | ||||
|             meta = { | ||||
|                 'max_size': 10000, | ||||
|             } | ||||
|  | ||||
|         # Create the collection by accessing Document.objects | ||||
|         Log.objects | ||||
|  | ||||
|     def test_repr(self): | ||||
|         """Ensure that unicode representation works | ||||
| @@ -286,7 +276,7 @@ class InstanceTest(unittest.TestCase): | ||||
|  | ||||
|         list_stats = [] | ||||
|  | ||||
|         for i in xrange(10): | ||||
|         for i in range(10): | ||||
|             s = Stats() | ||||
|             s.save() | ||||
|             list_stats.append(s) | ||||
| @@ -356,14 +346,14 @@ class InstanceTest(unittest.TestCase): | ||||
|         self.assertEqual(User._fields['username'].db_field, '_id') | ||||
|         self.assertEqual(User._meta['id_field'], 'username') | ||||
|  | ||||
|         def create_invalid_user(): | ||||
|             User(name='test').save()  # no primary key field | ||||
|         self.assertRaises(ValidationError, create_invalid_user) | ||||
|         # test no primary key field | ||||
|         self.assertRaises(ValidationError, User(name='test').save) | ||||
|  | ||||
|         def define_invalid_user(): | ||||
|         # define a subclass with a different primary key field than the | ||||
|         # parent | ||||
|         with self.assertRaises(ValueError): | ||||
|             class EmailUser(User): | ||||
|                 email = StringField(primary_key=True) | ||||
|         self.assertRaises(ValueError, define_invalid_user) | ||||
|  | ||||
|         class EmailUser(User): | ||||
|             email = StringField() | ||||
| @@ -411,12 +401,10 @@ class InstanceTest(unittest.TestCase): | ||||
|  | ||||
|         # Mimic Place and NicePlace definitions being in a different file | ||||
|         # and the NicePlace model not being imported in at query time. | ||||
|         from mongoengine.base import _document_registry | ||||
|         del(_document_registry['Place.NicePlace']) | ||||
|  | ||||
|         def query_without_importing_nice_place(): | ||||
|             print Place.objects.all() | ||||
|         self.assertRaises(NotRegistered, query_without_importing_nice_place) | ||||
|         with self.assertRaises(NotRegistered): | ||||
|             list(Place.objects.all()) | ||||
|  | ||||
|     def test_document_registry_regressions(self): | ||||
|  | ||||
| @@ -745,7 +733,7 @@ class InstanceTest(unittest.TestCase): | ||||
|  | ||||
|         try: | ||||
|             t.save() | ||||
|         except ValidationError, e: | ||||
|         except ValidationError as e: | ||||
|             expect_msg = "Draft entries may not have a publication date." | ||||
|             self.assertTrue(expect_msg in e.message) | ||||
|             self.assertEqual(e.to_dict(), {'__all__': expect_msg}) | ||||
| @@ -784,7 +772,7 @@ class InstanceTest(unittest.TestCase): | ||||
|         t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25, z=15)) | ||||
|         try: | ||||
|             t.save() | ||||
|         except ValidationError, e: | ||||
|         except ValidationError as e: | ||||
|             expect_msg = "Value of z != x + y" | ||||
|             self.assertTrue(expect_msg in e.message) | ||||
|             self.assertEqual(e.to_dict(), {'doc': {'__all__': expect_msg}}) | ||||
| @@ -798,8 +786,10 @@ class InstanceTest(unittest.TestCase): | ||||
|  | ||||
|     def test_modify_empty(self): | ||||
|         doc = self.Person(name="bob", age=10).save() | ||||
|         self.assertRaises( | ||||
|             InvalidDocumentError, lambda: self.Person().modify(set__age=10)) | ||||
|  | ||||
|         with self.assertRaises(InvalidDocumentError): | ||||
|             self.Person().modify(set__age=10) | ||||
|  | ||||
|         self.assertDbEqual([dict(doc.to_mongo())]) | ||||
|  | ||||
|     def test_modify_invalid_query(self): | ||||
| @@ -807,9 +797,8 @@ class InstanceTest(unittest.TestCase): | ||||
|         doc2 = self.Person(name="jim", age=20).save() | ||||
|         docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())] | ||||
|  | ||||
|         self.assertRaises( | ||||
|             InvalidQueryError, | ||||
|             lambda: doc1.modify(dict(id=doc2.id), set__value=20)) | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             doc1.modify({'id': doc2.id}, set__value=20) | ||||
|  | ||||
|         self.assertDbEqual(docs) | ||||
|  | ||||
| @@ -818,7 +807,7 @@ class InstanceTest(unittest.TestCase): | ||||
|         doc2 = self.Person(name="jim", age=20).save() | ||||
|         docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())] | ||||
|  | ||||
|         assert not doc1.modify(dict(name=doc2.name), set__age=100) | ||||
|         assert not doc1.modify({'name': doc2.name}, set__age=100) | ||||
|  | ||||
|         self.assertDbEqual(docs) | ||||
|  | ||||
| @@ -827,7 +816,7 @@ class InstanceTest(unittest.TestCase): | ||||
|         doc2 = self.Person(id=ObjectId(), name="jim", age=20) | ||||
|         docs = [dict(doc1.to_mongo())] | ||||
|  | ||||
|         assert not doc2.modify(dict(name=doc2.name), set__age=100) | ||||
|         assert not doc2.modify({'name': doc2.name}, set__age=100) | ||||
|  | ||||
|         self.assertDbEqual(docs) | ||||
|  | ||||
| @@ -1293,12 +1282,11 @@ class InstanceTest(unittest.TestCase): | ||||
|  | ||||
|     def test_document_update(self): | ||||
|  | ||||
|         def update_not_saved_raises(): | ||||
|         # try updating a non-saved document | ||||
|         with self.assertRaises(OperationError): | ||||
|             person = self.Person(name='dcrosta') | ||||
|             person.update(set__name='Dan Crosta') | ||||
|  | ||||
|         self.assertRaises(OperationError, update_not_saved_raises) | ||||
|  | ||||
|         author = self.Person(name='dcrosta') | ||||
|         author.save() | ||||
|  | ||||
| @@ -1308,19 +1296,17 @@ class InstanceTest(unittest.TestCase): | ||||
|         p1 = self.Person.objects.first() | ||||
|         self.assertEqual(p1.name, author.name) | ||||
|  | ||||
|         def update_no_value_raises(): | ||||
|         # try sending an empty update | ||||
|         with self.assertRaises(OperationError): | ||||
|             person = self.Person.objects.first() | ||||
|             person.update() | ||||
|  | ||||
|         self.assertRaises(OperationError, update_no_value_raises) | ||||
|  | ||||
|         def update_no_op_should_default_to_set(): | ||||
|             person = self.Person.objects.first() | ||||
|             person.update(name="Dan") | ||||
|             person.reload() | ||||
|             return person.name | ||||
|  | ||||
|         self.assertEqual("Dan", update_no_op_should_default_to_set()) | ||||
|         # update that doesn't explicitly specify an operator should default | ||||
|         # to 'set__' | ||||
|         person = self.Person.objects.first() | ||||
|         person.update(name="Dan") | ||||
|         person.reload() | ||||
|         self.assertEqual("Dan", person.name) | ||||
|  | ||||
|     def test_update_unique_field(self): | ||||
|         class Doc(Document): | ||||
| @@ -1329,8 +1315,8 @@ class InstanceTest(unittest.TestCase): | ||||
|         doc1 = Doc(name="first").save() | ||||
|         doc2 = Doc(name="second").save() | ||||
|  | ||||
|         self.assertRaises(NotUniqueError, lambda: | ||||
|                           doc2.update(set__name=doc1.name)) | ||||
|         with self.assertRaises(NotUniqueError): | ||||
|             doc2.update(set__name=doc1.name) | ||||
|  | ||||
|     def test_embedded_update(self): | ||||
|         """ | ||||
| @@ -1848,15 +1834,13 @@ class InstanceTest(unittest.TestCase): | ||||
|  | ||||
|     def test_duplicate_db_fields_raise_invalid_document_error(self): | ||||
|         """Ensure a InvalidDocumentError is thrown if duplicate fields | ||||
|         declare the same db_field""" | ||||
|  | ||||
|         def throw_invalid_document_error(): | ||||
|         declare the same db_field. | ||||
|         """ | ||||
|         with self.assertRaises(InvalidDocumentError): | ||||
|             class Foo(Document): | ||||
|                 name = StringField() | ||||
|                 name2 = StringField(db_field='name') | ||||
|  | ||||
|         self.assertRaises(InvalidDocumentError, throw_invalid_document_error) | ||||
|  | ||||
|     def test_invalid_son(self): | ||||
|         """Raise an error if loading invalid data""" | ||||
|         class Occurrence(EmbeddedDocument): | ||||
| @@ -1868,11 +1852,13 @@ class InstanceTest(unittest.TestCase): | ||||
|             forms = ListField(StringField(), default=list) | ||||
|             occurs = ListField(EmbeddedDocumentField(Occurrence), default=list) | ||||
|  | ||||
|         def raise_invalid_document(): | ||||
|             Word._from_son({'stem': [1, 2, 3], 'forms': 1, 'count': 'one', | ||||
|                             'occurs': {"hello": None}}) | ||||
|  | ||||
|         self.assertRaises(InvalidDocumentError, raise_invalid_document) | ||||
|         with self.assertRaises(InvalidDocumentError): | ||||
|             Word._from_son({ | ||||
|                 'stem': [1, 2, 3], | ||||
|                 'forms': 1, | ||||
|                 'count': 'one', | ||||
|                 'occurs': {"hello": None} | ||||
|             }) | ||||
|  | ||||
|     def test_reverse_delete_rule_cascade_and_nullify(self): | ||||
|         """Ensure that a referenced document is also deleted upon deletion. | ||||
| @@ -2103,8 +2089,7 @@ class InstanceTest(unittest.TestCase): | ||||
|         self.assertEqual(Bar.objects.get().foo, None) | ||||
|  | ||||
|     def test_invalid_reverse_delete_rule_raise_errors(self): | ||||
|  | ||||
|         def throw_invalid_document_error(): | ||||
|         with self.assertRaises(InvalidDocumentError): | ||||
|             class Blog(Document): | ||||
|                 content = StringField() | ||||
|                 authors = MapField(ReferenceField( | ||||
| @@ -2114,21 +2099,15 @@ class InstanceTest(unittest.TestCase): | ||||
|                         self.Person, | ||||
|                         reverse_delete_rule=NULLIFY)) | ||||
|  | ||||
|         self.assertRaises(InvalidDocumentError, throw_invalid_document_error) | ||||
|  | ||||
|         def throw_invalid_document_error_embedded(): | ||||
|         with self.assertRaises(InvalidDocumentError): | ||||
|             class Parents(EmbeddedDocument): | ||||
|                 father = ReferenceField('Person', reverse_delete_rule=DENY) | ||||
|                 mother = ReferenceField('Person', reverse_delete_rule=DENY) | ||||
|  | ||||
|         self.assertRaises( | ||||
|             InvalidDocumentError, throw_invalid_document_error_embedded) | ||||
|  | ||||
|     def test_reverse_delete_rule_cascade_recurs(self): | ||||
|         """Ensure that a chain of documents is also deleted upon cascaded | ||||
|         deletion. | ||||
|         """ | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             content = StringField() | ||||
|             author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) | ||||
| @@ -2344,15 +2323,14 @@ class InstanceTest(unittest.TestCase): | ||||
|         pickle_doc.save() | ||||
|         pickle_doc.delete() | ||||
|  | ||||
|     def test_throw_invalid_document_error(self): | ||||
|  | ||||
|         # test handles people trying to upsert | ||||
|         def throw_invalid_document_error(): | ||||
|     def test_override_method_with_field(self): | ||||
|         """Test creating a field with a field name that would override | ||||
|         the "validate" method. | ||||
|         """ | ||||
|         with self.assertRaises(InvalidDocumentError): | ||||
|             class Blog(Document): | ||||
|                 validate = DictField() | ||||
|  | ||||
|         self.assertRaises(InvalidDocumentError, throw_invalid_document_error) | ||||
|  | ||||
|     def test_mutating_documents(self): | ||||
|  | ||||
|         class B(EmbeddedDocument): | ||||
| @@ -2815,11 +2793,10 @@ class InstanceTest(unittest.TestCase): | ||||
|         log.log = "Saving" | ||||
|         log.save() | ||||
|  | ||||
|         def change_shard_key(): | ||||
|         # try to change the shard key | ||||
|         with self.assertRaises(OperationError): | ||||
|             log.machine = "127.0.0.1" | ||||
|  | ||||
|         self.assertRaises(OperationError, change_shard_key) | ||||
|  | ||||
|     def test_shard_key_in_embedded_document(self): | ||||
|         class Foo(EmbeddedDocument): | ||||
|             foo = StringField() | ||||
| @@ -2840,12 +2817,11 @@ class InstanceTest(unittest.TestCase): | ||||
|         bar_doc.bar = 'baz' | ||||
|         bar_doc.save() | ||||
|  | ||||
|         def change_shard_key(): | ||||
|         # try to change the shard key | ||||
|         with self.assertRaises(OperationError): | ||||
|             bar_doc.foo.foo = 'something' | ||||
|             bar_doc.save() | ||||
|  | ||||
|         self.assertRaises(OperationError, change_shard_key) | ||||
|  | ||||
|     def test_shard_key_primary(self): | ||||
|         class LogEntry(Document): | ||||
|             machine = StringField(primary_key=True) | ||||
| @@ -2866,11 +2842,10 @@ class InstanceTest(unittest.TestCase): | ||||
|         log.log = "Saving" | ||||
|         log.save() | ||||
|  | ||||
|         def change_shard_key(): | ||||
|         # try to change the shard key | ||||
|         with self.assertRaises(OperationError): | ||||
|             log.machine = "127.0.0.1" | ||||
|  | ||||
|         self.assertRaises(OperationError, change_shard_key) | ||||
|  | ||||
|     def test_kwargs_simple(self): | ||||
|  | ||||
|         class Embedded(EmbeddedDocument): | ||||
| @@ -2955,11 +2930,9 @@ class InstanceTest(unittest.TestCase): | ||||
|     def test_bad_mixed_creation(self): | ||||
|         """Ensure that document gives correct error when duplicating arguments | ||||
|         """ | ||||
|         def construct_bad_instance(): | ||||
|         with self.assertRaises(TypeError): | ||||
|             return self.Person("Test User", 42, name="Bad User") | ||||
|  | ||||
|         self.assertRaises(TypeError, construct_bad_instance) | ||||
|  | ||||
|     def test_data_contains_id_field(self): | ||||
|         """Ensure that asking for _data returns 'id' | ||||
|         """ | ||||
| @@ -3118,17 +3091,17 @@ class InstanceTest(unittest.TestCase): | ||||
|         p4 = Person.objects()[0] | ||||
|         p4.save() | ||||
|         self.assertEquals(p4.height, 189) | ||||
|          | ||||
|  | ||||
|         # However the default will not be fixed in DB | ||||
|         self.assertEquals(Person.objects(height=189).count(), 0) | ||||
|          | ||||
|  | ||||
|         # alter DB for the new default | ||||
|         coll = Person._get_collection() | ||||
|         for person in Person.objects.as_pymongo(): | ||||
|             if 'height' not in person: | ||||
|                 person['height'] = 189 | ||||
|                 coll.save(person) | ||||
|                  | ||||
|  | ||||
|         self.assertEquals(Person.objects(height=189).count(), 1) | ||||
|  | ||||
|     def test_from_son(self): | ||||
| @@ -3202,5 +3175,20 @@ class InstanceTest(unittest.TestCase): | ||||
|             self.assertEqual(b._instance, a) | ||||
|         self.assertEqual(idx, 2) | ||||
|  | ||||
|     def test_falsey_pk(self): | ||||
|         """Ensure that we can create and update a document with Falsey PK. | ||||
|         """ | ||||
|         class Person(Document): | ||||
|             age = IntField(primary_key=True) | ||||
|             height = FloatField() | ||||
|  | ||||
|         person = Person() | ||||
|         person.age = 0 | ||||
|         person.height = 1.89 | ||||
|         person.save() | ||||
|  | ||||
|         person.update(set__height=2.0) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
|   | ||||
| @@ -1,6 +1,3 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
| import uuid | ||||
|  | ||||
|   | ||||
| @@ -1,7 +1,4 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
| from datetime import datetime | ||||
|  | ||||
| @@ -60,7 +57,7 @@ class ValidatorErrorTest(unittest.TestCase): | ||||
|  | ||||
|         try: | ||||
|             User().validate() | ||||
|         except ValidationError, e: | ||||
|         except ValidationError as e: | ||||
|             self.assertTrue("User:None" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 'username': 'Field is required', | ||||
| @@ -70,7 +67,7 @@ class ValidatorErrorTest(unittest.TestCase): | ||||
|         user.name = None | ||||
|         try: | ||||
|             user.save() | ||||
|         except ValidationError, e: | ||||
|         except ValidationError as e: | ||||
|             self.assertTrue("User:RossC0" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 'name': 'Field is required'}) | ||||
| @@ -118,7 +115,7 @@ class ValidatorErrorTest(unittest.TestCase): | ||||
|  | ||||
|         try: | ||||
|             Doc(id="bad").validate() | ||||
|         except ValidationError, e: | ||||
|         except ValidationError as e: | ||||
|             self.assertTrue("SubDoc:None" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 "e": {'val': 'OK could not be converted to int'}}) | ||||
| @@ -136,7 +133,7 @@ class ValidatorErrorTest(unittest.TestCase): | ||||
|         doc.e.val = "OK" | ||||
|         try: | ||||
|             doc.save() | ||||
|         except ValidationError, e: | ||||
|         except ValidationError as e: | ||||
|             self.assertTrue("Doc:test" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 "e": {'val': 'OK could not be converted to int'}}) | ||||
| @@ -156,14 +153,14 @@ class ValidatorErrorTest(unittest.TestCase): | ||||
|  | ||||
|         s = SubDoc() | ||||
|  | ||||
|         self.assertRaises(ValidationError, lambda: s.validate()) | ||||
|         self.assertRaises(ValidationError, s.validate) | ||||
|  | ||||
|         d1.e = s | ||||
|         d2.e = s | ||||
|  | ||||
|         del d1 | ||||
|  | ||||
|         self.assertRaises(ValidationError, lambda: d2.validate()) | ||||
|         self.assertRaises(ValidationError, d2.validate) | ||||
|  | ||||
|     def test_parent_reference_in_child_document(self): | ||||
|         """ | ||||
|   | ||||
| @@ -1,11 +1,7 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
|  | ||||
| import six | ||||
| from nose.plugins.skip import SkipTest | ||||
|  | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import datetime | ||||
| import unittest | ||||
| import uuid | ||||
| @@ -29,10 +25,9 @@ except ImportError: | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.base import _document_registry | ||||
| from mongoengine.base.datastructures import BaseDict, EmbeddedDocumentList | ||||
| from mongoengine.base import (BaseDict, BaseField, EmbeddedDocumentList, | ||||
|                               _document_registry) | ||||
| from mongoengine.errors import NotRegistered, DoesNotExist | ||||
| from mongoengine.python_support import PY3, b, bin_type | ||||
|  | ||||
| __all__ = ("FieldTest", "EmbeddedDocumentListFieldTestCase") | ||||
|  | ||||
| @@ -483,27 +478,41 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|     def test_decimal_storage(self): | ||||
|         class Person(Document): | ||||
|             btc = DecimalField(precision=4) | ||||
|             float_value = DecimalField(precision=4) | ||||
|             string_value = DecimalField(precision=4, force_string=True) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         Person(btc=10).save() | ||||
|         Person(btc=10.1).save() | ||||
|         Person(btc=10.11).save() | ||||
|         Person(btc="10.111").save() | ||||
|         Person(btc=Decimal("10.1111")).save() | ||||
|         Person(btc=Decimal("10.11111")).save() | ||||
|         values_to_store = [10, 10.1, 10.11, "10.111", Decimal("10.1111"), Decimal("10.11111")] | ||||
|         for store_at_creation in [True, False]: | ||||
|             for value in values_to_store: | ||||
|                 # to_python is called explicitly if values were sent in the kwargs of __init__ | ||||
|                 if store_at_creation: | ||||
|                     Person(float_value=value, string_value=value).save() | ||||
|                 else: | ||||
|                     person = Person.objects.create() | ||||
|                     person.float_value = value | ||||
|                     person.string_value = value | ||||
|                     person.save() | ||||
|  | ||||
|         # How its stored | ||||
|         expected = [{'btc': 10.0}, {'btc': 10.1}, {'btc': 10.11}, | ||||
|                     {'btc': 10.111}, {'btc': 10.1111}, {'btc': 10.1111}] | ||||
|         expected = [ | ||||
|             {'float_value': 10.0, 'string_value': '10.0000'}, | ||||
|             {'float_value': 10.1, 'string_value': '10.1000'}, | ||||
|             {'float_value': 10.11, 'string_value': '10.1100'}, | ||||
|             {'float_value': 10.111, 'string_value': '10.1110'}, | ||||
|             {'float_value': 10.1111, 'string_value': '10.1111'}, | ||||
|             {'float_value': 10.1111, 'string_value': '10.1111'}] | ||||
|         expected.extend(expected) | ||||
|         actual = list(Person.objects.exclude('id').as_pymongo()) | ||||
|         self.assertEqual(expected, actual) | ||||
|  | ||||
|         # How it comes out locally | ||||
|         expected = [Decimal('10.0000'), Decimal('10.1000'), Decimal('10.1100'), | ||||
|                     Decimal('10.1110'), Decimal('10.1111'), Decimal('10.1111')] | ||||
|         actual = list(Person.objects().scalar('btc')) | ||||
|         self.assertEqual(expected, actual) | ||||
|         expected.extend(expected) | ||||
|         for field_name in ['float_value', 'string_value']: | ||||
|             actual = list(Person.objects().scalar(field_name)) | ||||
|             self.assertEqual(expected, actual) | ||||
|  | ||||
|     def test_boolean_validation(self): | ||||
|         """Ensure that invalid values cannot be assigned to boolean fields. | ||||
| @@ -639,8 +648,8 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond and | ||||
|         # dropped | ||||
|         d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999) | ||||
|         d2 = datetime.datetime(1970, 01, 01, 00, 00, 01) | ||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) | ||||
|         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1) | ||||
|         log = LogEntry() | ||||
|         log.date = d1 | ||||
|         log.save() | ||||
| @@ -649,15 +658,15 @@ class FieldTest(unittest.TestCase): | ||||
|         self.assertEqual(log.date, d2) | ||||
|  | ||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond | ||||
|         d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999) | ||||
|         d2 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9000) | ||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) | ||||
|         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000) | ||||
|         log.date = d1 | ||||
|         log.save() | ||||
|         log.reload() | ||||
|         self.assertNotEqual(log.date, d1) | ||||
|         self.assertEqual(log.date, d2) | ||||
|  | ||||
|         if not PY3: | ||||
|         if not six.PY3: | ||||
|             # Pre UTC dates microseconds below 1000 are dropped | ||||
|             # This does not seem to be true in PY3 | ||||
|             d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) | ||||
| @@ -677,7 +686,7 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|         LogEntry.drop_collection() | ||||
|  | ||||
|         d1 = datetime.datetime(1970, 01, 01, 00, 00, 01) | ||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1) | ||||
|         log = LogEntry() | ||||
|         log.date = d1 | ||||
|         log.validate() | ||||
| @@ -694,8 +703,8 @@ class FieldTest(unittest.TestCase): | ||||
|         LogEntry.drop_collection() | ||||
|  | ||||
|         # create 60 log entries | ||||
|         for i in xrange(1950, 2010): | ||||
|             d = datetime.datetime(i, 01, 01, 00, 00, 01) | ||||
|         for i in range(1950, 2010): | ||||
|             d = datetime.datetime(i, 1, 1, 0, 0, 1) | ||||
|             LogEntry(date=d).save() | ||||
|  | ||||
|         self.assertEqual(LogEntry.objects.count(), 60) | ||||
| @@ -742,7 +751,7 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond and | ||||
|         # dropped - with default datetimefields | ||||
|         d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999) | ||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) | ||||
|         log = LogEntry() | ||||
|         log.date = d1 | ||||
|         log.save() | ||||
| @@ -751,7 +760,7 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond - with | ||||
|         # default datetimefields | ||||
|         d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999) | ||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) | ||||
|         log.date = d1 | ||||
|         log.save() | ||||
|         log.reload() | ||||
| @@ -768,7 +777,7 @@ class FieldTest(unittest.TestCase): | ||||
|         # Pre UTC microseconds above 1000 is wonky - with default datetimefields | ||||
|         # log.date has an invalid microsecond value so I can't construct | ||||
|         # a date to compare. | ||||
|         for i in xrange(1001, 3113, 33): | ||||
|         for i in range(1001, 3113, 33): | ||||
|             d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) | ||||
|             log.date = d1 | ||||
|             log.save() | ||||
| @@ -778,7 +787,7 @@ class FieldTest(unittest.TestCase): | ||||
|             self.assertEqual(log, log1) | ||||
|  | ||||
|         # Test string padding | ||||
|         microsecond = map(int, [math.pow(10, x) for x in xrange(6)]) | ||||
|         microsecond = map(int, [math.pow(10, x) for x in range(6)]) | ||||
|         mm = dd = hh = ii = ss = [1, 10] | ||||
|  | ||||
|         for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): | ||||
| @@ -800,7 +809,7 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|         LogEntry.drop_collection() | ||||
|  | ||||
|         d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999) | ||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) | ||||
|         log = LogEntry() | ||||
|         log.date = d1 | ||||
|         log.save() | ||||
| @@ -811,8 +820,8 @@ class FieldTest(unittest.TestCase): | ||||
|         LogEntry.drop_collection() | ||||
|  | ||||
|         # create 60 log entries | ||||
|         for i in xrange(1950, 2010): | ||||
|             d = datetime.datetime(i, 01, 01, 00, 00, 01, 999) | ||||
|         for i in range(1950, 2010): | ||||
|             d = datetime.datetime(i, 1, 1, 0, 0, 1, 999) | ||||
|             LogEntry(date=d).save() | ||||
|  | ||||
|         self.assertEqual(LogEntry.objects.count(), 60) | ||||
| @@ -1047,7 +1056,7 @@ class FieldTest(unittest.TestCase): | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|     def test_list_assignment(self): | ||||
|         """Ensure that list field element assignment and slicing work  | ||||
|         """Ensure that list field element assignment and slicing work | ||||
|         """ | ||||
|         class BlogPost(Document): | ||||
|             info = ListField() | ||||
| @@ -1057,12 +1066,12 @@ class FieldTest(unittest.TestCase): | ||||
|         post = BlogPost() | ||||
|         post.info = ['e1', 'e2', 3, '4', 5] | ||||
|         post.save() | ||||
|          | ||||
|  | ||||
|         post.info[0] = 1 | ||||
|         post.save() | ||||
|         post.reload() | ||||
|         self.assertEqual(post.info[0], 1) | ||||
|          | ||||
|  | ||||
|         post.info[1:3] = ['n2', 'n3'] | ||||
|         post.save() | ||||
|         post.reload() | ||||
| @@ -1120,12 +1129,11 @@ class FieldTest(unittest.TestCase): | ||||
|         e.mapping = [1] | ||||
|         e.save() | ||||
|  | ||||
|         def create_invalid_mapping(): | ||||
|         # try creating an invalid mapping | ||||
|         with self.assertRaises(ValidationError): | ||||
|             e.mapping = ["abc"] | ||||
|             e.save() | ||||
|  | ||||
|         self.assertRaises(ValidationError, create_invalid_mapping) | ||||
|  | ||||
|         Simple.drop_collection() | ||||
|  | ||||
|     def test_list_field_rejects_strings(self): | ||||
| @@ -1209,7 +1217,7 @@ class FieldTest(unittest.TestCase): | ||||
|         self.assertEqual(simple.widgets, [4]) | ||||
|  | ||||
|     def test_list_field_with_negative_indices(self): | ||||
|          | ||||
|  | ||||
|         class Simple(Document): | ||||
|             widgets = ListField() | ||||
|  | ||||
| @@ -1392,12 +1400,11 @@ class FieldTest(unittest.TestCase): | ||||
|         e.mapping['someint'] = 1 | ||||
|         e.save() | ||||
|  | ||||
|         def create_invalid_mapping(): | ||||
|         # try creating an invalid mapping | ||||
|         with self.assertRaises(ValidationError): | ||||
|             e.mapping['somestring'] = "abc" | ||||
|             e.save() | ||||
|  | ||||
|         self.assertRaises(ValidationError, create_invalid_mapping) | ||||
|  | ||||
|         Simple.drop_collection() | ||||
|  | ||||
|     def test_dictfield_complex(self): | ||||
| @@ -1470,11 +1477,10 @@ class FieldTest(unittest.TestCase): | ||||
|         self.assertEqual(BaseDict, type(e.mapping)) | ||||
|         self.assertEqual({"ints": [3, 4]}, e.mapping) | ||||
|  | ||||
|         def create_invalid_mapping(): | ||||
|         # try creating an invalid mapping | ||||
|         with self.assertRaises(ValueError): | ||||
|             e.update(set__mapping={"somestrings": ["foo", "bar", ]}) | ||||
|  | ||||
|         self.assertRaises(ValueError, create_invalid_mapping) | ||||
|  | ||||
|         Simple.drop_collection() | ||||
|  | ||||
|     def test_mapfield(self): | ||||
| @@ -1489,18 +1495,14 @@ class FieldTest(unittest.TestCase): | ||||
|         e.mapping['someint'] = 1 | ||||
|         e.save() | ||||
|  | ||||
|         def create_invalid_mapping(): | ||||
|         with self.assertRaises(ValidationError): | ||||
|             e.mapping['somestring'] = "abc" | ||||
|             e.save() | ||||
|  | ||||
|         self.assertRaises(ValidationError, create_invalid_mapping) | ||||
|  | ||||
|         def create_invalid_class(): | ||||
|         with self.assertRaises(ValidationError): | ||||
|             class NoDeclaredType(Document): | ||||
|                 mapping = MapField() | ||||
|  | ||||
|         self.assertRaises(ValidationError, create_invalid_class) | ||||
|  | ||||
|         Simple.drop_collection() | ||||
|  | ||||
|     def test_complex_mapfield(self): | ||||
| @@ -1529,14 +1531,10 @@ class FieldTest(unittest.TestCase): | ||||
|         self.assertTrue(isinstance(e2.mapping['somestring'], StringSetting)) | ||||
|         self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting)) | ||||
|  | ||||
|         def create_invalid_mapping(): | ||||
|         with self.assertRaises(ValidationError): | ||||
|             e.mapping['someint'] = 123 | ||||
|             e.save() | ||||
|  | ||||
|         self.assertRaises(ValidationError, create_invalid_mapping) | ||||
|  | ||||
|         Extensible.drop_collection() | ||||
|  | ||||
|     def test_embedded_mapfield_db_field(self): | ||||
|  | ||||
|         class Embedded(EmbeddedDocument): | ||||
| @@ -1746,8 +1744,8 @@ class FieldTest(unittest.TestCase): | ||||
|         # Reference is no longer valid | ||||
|         foo.delete() | ||||
|         bar = Bar.objects.get() | ||||
|         self.assertRaises(DoesNotExist, lambda: getattr(bar, 'ref')) | ||||
|         self.assertRaises(DoesNotExist, lambda: getattr(bar, 'generic_ref')) | ||||
|         self.assertRaises(DoesNotExist, getattr, bar, 'ref') | ||||
|         self.assertRaises(DoesNotExist, getattr, bar, 'generic_ref') | ||||
|  | ||||
|         # When auto_dereference is disabled, there is no trouble returning DBRef | ||||
|         bar = Bar.objects.get() | ||||
| @@ -1823,7 +1821,7 @@ class FieldTest(unittest.TestCase): | ||||
|                                'parent': "50a234ea469ac1eda42d347d"}) | ||||
|         mongoed = p1.to_mongo() | ||||
|         self.assertTrue(isinstance(mongoed['parent'], ObjectId)) | ||||
|          | ||||
|  | ||||
|     def test_cached_reference_field_get_and_save(self): | ||||
|         """ | ||||
|         Tests #1047: CachedReferenceField creates DBRefs on to_python, but can't save them on to_mongo | ||||
| @@ -1835,11 +1833,11 @@ class FieldTest(unittest.TestCase): | ||||
|         class Ocorrence(Document): | ||||
|             person = StringField() | ||||
|             animal = CachedReferenceField(Animal) | ||||
|          | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocorrence.drop_collection() | ||||
|          | ||||
|         Ocorrence(person="testte",  | ||||
|  | ||||
|         Ocorrence(person="testte", | ||||
|                   animal=Animal(name="Leopard", tag="heavy").save()).save() | ||||
|         p = Ocorrence.objects.get() | ||||
|         p.person = 'new_testte' | ||||
| @@ -2022,7 +2020,7 @@ class FieldTest(unittest.TestCase): | ||||
|         }) | ||||
|  | ||||
|     def test_cached_reference_fields_on_embedded_documents(self): | ||||
|         def build(): | ||||
|         with self.assertRaises(InvalidDocumentError): | ||||
|             class Test(Document): | ||||
|                 name = StringField() | ||||
|  | ||||
| @@ -2031,8 +2029,6 @@ class FieldTest(unittest.TestCase): | ||||
|                     'test': CachedReferenceField(Test) | ||||
|             }) | ||||
|  | ||||
|         self.assertRaises(InvalidDocumentError, build) | ||||
|  | ||||
|     def test_cached_reference_auto_sync(self): | ||||
|         class Person(Document): | ||||
|             TYPES = ( | ||||
| @@ -2810,6 +2806,38 @@ class FieldTest(unittest.TestCase): | ||||
|         Post.drop_collection() | ||||
|         User.drop_collection() | ||||
|  | ||||
|     def test_generic_reference_filter_by_dbref(self): | ||||
|         """Ensure we can search for a specific generic reference by | ||||
|         providing its ObjectId. | ||||
|         """ | ||||
|         class Doc(Document): | ||||
|             ref = GenericReferenceField() | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|  | ||||
|         doc1 = Doc.objects.create() | ||||
|         doc2 = Doc.objects.create(ref=doc1) | ||||
|  | ||||
|         doc = Doc.objects.get(ref=DBRef('doc', doc1.pk)) | ||||
|         self.assertEqual(doc, doc2) | ||||
|  | ||||
|     def test_generic_reference_filter_by_objectid(self): | ||||
|         """Ensure we can search for a specific generic reference by | ||||
|         providing its DBRef. | ||||
|         """ | ||||
|         class Doc(Document): | ||||
|             ref = GenericReferenceField() | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|  | ||||
|         doc1 = Doc.objects.create() | ||||
|         doc2 = Doc.objects.create(ref=doc1) | ||||
|  | ||||
|         self.assertTrue(isinstance(doc1.pk, ObjectId)) | ||||
|  | ||||
|         doc = Doc.objects.get(ref=doc1.pk) | ||||
|         self.assertEqual(doc, doc2) | ||||
|  | ||||
|     def test_binary_fields(self): | ||||
|         """Ensure that binary fields can be stored and retrieved. | ||||
|         """ | ||||
| @@ -2817,7 +2845,7 @@ class FieldTest(unittest.TestCase): | ||||
|             content_type = StringField() | ||||
|             blob = BinaryField() | ||||
|  | ||||
|         BLOB = b('\xe6\x00\xc4\xff\x07') | ||||
|         BLOB = six.b('\xe6\x00\xc4\xff\x07') | ||||
|         MIME_TYPE = 'application/octet-stream' | ||||
|  | ||||
|         Attachment.drop_collection() | ||||
| @@ -2827,7 +2855,7 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|         attachment_1 = Attachment.objects().first() | ||||
|         self.assertEqual(MIME_TYPE, attachment_1.content_type) | ||||
|         self.assertEqual(BLOB, bin_type(attachment_1.blob)) | ||||
|         self.assertEqual(BLOB, six.binary_type(attachment_1.blob)) | ||||
|  | ||||
|         Attachment.drop_collection() | ||||
|  | ||||
| @@ -2854,13 +2882,13 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|         attachment_required = AttachmentRequired() | ||||
|         self.assertRaises(ValidationError, attachment_required.validate) | ||||
|         attachment_required.blob = Binary(b('\xe6\x00\xc4\xff\x07')) | ||||
|         attachment_required.blob = Binary(six.b('\xe6\x00\xc4\xff\x07')) | ||||
|         attachment_required.validate() | ||||
|  | ||||
|         attachment_size_limit = AttachmentSizeLimit( | ||||
|             blob=b('\xe6\x00\xc4\xff\x07')) | ||||
|             blob=six.b('\xe6\x00\xc4\xff\x07')) | ||||
|         self.assertRaises(ValidationError, attachment_size_limit.validate) | ||||
|         attachment_size_limit.blob = b('\xe6\x00\xc4\xff') | ||||
|         attachment_size_limit.blob = six.b('\xe6\x00\xc4\xff') | ||||
|         attachment_size_limit.validate() | ||||
|  | ||||
|         Attachment.drop_collection() | ||||
| @@ -3001,28 +3029,32 @@ class FieldTest(unittest.TestCase): | ||||
|                 ('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), | ||||
|                 ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) | ||||
|             style = StringField(max_length=3, choices=( | ||||
|                 ('S', 'Small'), ('B', 'Baggy'), ('W', 'wide')), default='S') | ||||
|                 ('S', 'Small'), ('B', 'Baggy'), ('W', 'Wide')), default='W') | ||||
|  | ||||
|         Shirt.drop_collection() | ||||
|  | ||||
|         shirt = Shirt() | ||||
|         shirt1 = Shirt() | ||||
|         shirt2 = Shirt() | ||||
|  | ||||
|         self.assertEqual(shirt.get_size_display(), None) | ||||
|         self.assertEqual(shirt.get_style_display(), 'Small') | ||||
|         # Make sure get_<field>_display returns the default value (or None) | ||||
|         self.assertEqual(shirt1.get_size_display(), None) | ||||
|         self.assertEqual(shirt1.get_style_display(), 'Wide') | ||||
|  | ||||
|         shirt.size = "XXL" | ||||
|         shirt.style = "B" | ||||
|         self.assertEqual(shirt.get_size_display(), 'Extra Extra Large') | ||||
|         self.assertEqual(shirt.get_style_display(), 'Baggy') | ||||
|         shirt1.size = 'XXL' | ||||
|         shirt1.style = 'B' | ||||
|         shirt2.size = 'M' | ||||
|         shirt2.style = 'S' | ||||
|         self.assertEqual(shirt1.get_size_display(), 'Extra Extra Large') | ||||
|         self.assertEqual(shirt1.get_style_display(), 'Baggy') | ||||
|         self.assertEqual(shirt2.get_size_display(), 'Medium') | ||||
|         self.assertEqual(shirt2.get_style_display(), 'Small') | ||||
|  | ||||
|         # Set as Z - an invalid choice | ||||
|         shirt.size = "Z" | ||||
|         shirt.style = "Z" | ||||
|         self.assertEqual(shirt.get_size_display(), 'Z') | ||||
|         self.assertEqual(shirt.get_style_display(), 'Z') | ||||
|         self.assertRaises(ValidationError, shirt.validate) | ||||
|  | ||||
|         Shirt.drop_collection() | ||||
|         shirt1.size = 'Z' | ||||
|         shirt1.style = 'Z' | ||||
|         self.assertEqual(shirt1.get_size_display(), 'Z') | ||||
|         self.assertEqual(shirt1.get_style_display(), 'Z') | ||||
|         self.assertRaises(ValidationError, shirt1.validate) | ||||
|  | ||||
|     def test_simple_choices_validation(self): | ||||
|         """Ensure that value is in a container of allowed values. | ||||
| @@ -3102,7 +3134,7 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|         try: | ||||
|             shirt.validate() | ||||
|         except ValidationError, error: | ||||
|         except ValidationError as error: | ||||
|             # get the validation rules | ||||
|             error_dict = error.to_dict() | ||||
|             self.assertEqual(error_dict['size'], SIZE_MESSAGE) | ||||
| @@ -3131,7 +3163,7 @@ class FieldTest(unittest.TestCase): | ||||
|         self.db['mongoengine.counters'].drop() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in xrange(10): | ||||
|         for x in range(10): | ||||
|             Person(name="Person %s" % x).save() | ||||
|  | ||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) | ||||
| @@ -3155,7 +3187,7 @@ class FieldTest(unittest.TestCase): | ||||
|         self.db['mongoengine.counters'].drop() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in xrange(10): | ||||
|         for x in range(10): | ||||
|             Person(name="Person %s" % x).save() | ||||
|  | ||||
|         self.assertEqual(Person.id.get_next_value(), 11) | ||||
| @@ -3170,7 +3202,7 @@ class FieldTest(unittest.TestCase): | ||||
|         self.db['mongoengine.counters'].drop() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in xrange(10): | ||||
|         for x in range(10): | ||||
|             Person(name="Person %s" % x).save() | ||||
|  | ||||
|         self.assertEqual(Person.id.get_next_value(), '11') | ||||
| @@ -3186,7 +3218,7 @@ class FieldTest(unittest.TestCase): | ||||
|         self.db['mongoengine.counters'].drop() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in xrange(10): | ||||
|         for x in range(10): | ||||
|             Person(name="Person %s" % x).save() | ||||
|  | ||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) | ||||
| @@ -3211,7 +3243,7 @@ class FieldTest(unittest.TestCase): | ||||
|         self.db['mongoengine.counters'].drop() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in xrange(10): | ||||
|         for x in range(10): | ||||
|             Person(name="Person %s" % x).save() | ||||
|  | ||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) | ||||
| @@ -3273,7 +3305,7 @@ class FieldTest(unittest.TestCase): | ||||
|         Animal.drop_collection() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in xrange(10): | ||||
|         for x in range(10): | ||||
|             Animal(name="Animal %s" % x).save() | ||||
|             Person(name="Person %s" % x).save() | ||||
|  | ||||
| @@ -3303,7 +3335,7 @@ class FieldTest(unittest.TestCase): | ||||
|         self.db['mongoengine.counters'].drop() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in xrange(10): | ||||
|         for x in range(10): | ||||
|             p = Person(name="Person %s" % x) | ||||
|             p.save() | ||||
|  | ||||
| @@ -3490,7 +3522,7 @@ class FieldTest(unittest.TestCase): | ||||
|         self.assertRaises(ValidationError, post.validate) | ||||
|         try: | ||||
|             post.validate() | ||||
|         except ValidationError, error: | ||||
|         except ValidationError as error: | ||||
|             # ValidationError.errors property | ||||
|             self.assertTrue(hasattr(error, 'errors')) | ||||
|             self.assertTrue(isinstance(error.errors, dict)) | ||||
| @@ -3551,8 +3583,6 @@ class FieldTest(unittest.TestCase): | ||||
|         Ensure that tuples remain tuples when they are | ||||
|         inside a ComplexBaseField | ||||
|         """ | ||||
|         from mongoengine.base import BaseField | ||||
|  | ||||
|         class EnumField(BaseField): | ||||
|  | ||||
|             def __init__(self, **kwargs): | ||||
| @@ -3786,9 +3816,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|         filtered = self.post1.comments.filter() | ||||
|  | ||||
|         # Ensure nothing was changed | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertListEqual(filtered, self.post1.comments) | ||||
|         self.assertEqual(filtered, self.post1.comments) | ||||
|         self.assertListEqual(filtered, self.post1.comments) | ||||
|  | ||||
|     def test_single_keyword_filter(self): | ||||
|         """ | ||||
| @@ -3839,10 +3867,8 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|         Tests the filter method of a List of Embedded Documents | ||||
|         when the keyword is not a known keyword. | ||||
|         """ | ||||
|         # < 2.6 Incompatible > | ||||
|         # with self.assertRaises(AttributeError): | ||||
|         #    self.post2.comments.filter(year=2) | ||||
|         self.assertRaises(AttributeError, self.post2.comments.filter, year=2) | ||||
|         with self.assertRaises(AttributeError): | ||||
|             self.post2.comments.filter(year=2) | ||||
|  | ||||
|     def test_no_keyword_exclude(self): | ||||
|         """ | ||||
| @@ -3852,9 +3878,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|         filtered = self.post1.comments.exclude() | ||||
|  | ||||
|         # Ensure everything was removed | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertListEqual(filtered, []) | ||||
|         self.assertEqual(filtered, []) | ||||
|         self.assertListEqual(filtered, []) | ||||
|  | ||||
|     def test_single_keyword_exclude(self): | ||||
|         """ | ||||
| @@ -3900,10 +3924,8 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|         Tests the exclude method of a List of Embedded Documents | ||||
|         when the keyword is not a known keyword. | ||||
|         """ | ||||
|         # < 2.6 Incompatible > | ||||
|         # with self.assertRaises(AttributeError): | ||||
|         #    self.post2.comments.exclude(year=2) | ||||
|         self.assertRaises(AttributeError, self.post2.comments.exclude, year=2) | ||||
|         with self.assertRaises(AttributeError): | ||||
|             self.post2.comments.exclude(year=2) | ||||
|  | ||||
|     def test_chained_filter_exclude(self): | ||||
|         """ | ||||
| @@ -3941,10 +3963,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|         single keyword. | ||||
|         """ | ||||
|         comment = self.post1.comments.get(author='user1') | ||||
|  | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertIsInstance(comment, self.Comments) | ||||
|         self.assertTrue(isinstance(comment, self.Comments)) | ||||
|         self.assertIsInstance(comment, self.Comments) | ||||
|         self.assertEqual(comment.author, 'user1') | ||||
|  | ||||
|     def test_multi_keyword_get(self): | ||||
| @@ -3953,10 +3972,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|         multiple keywords. | ||||
|         """ | ||||
|         comment = self.post2.comments.get(author='user2', message='message2') | ||||
|  | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertIsInstance(comment, self.Comments) | ||||
|         self.assertTrue(isinstance(comment, self.Comments)) | ||||
|         self.assertIsInstance(comment, self.Comments) | ||||
|         self.assertEqual(comment.author, 'user2') | ||||
|         self.assertEqual(comment.message, 'message2') | ||||
|  | ||||
| @@ -3965,44 +3981,32 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|         Tests the get method of a List of Embedded Documents without | ||||
|         a keyword to return multiple documents. | ||||
|         """ | ||||
|         # < 2.6 Incompatible > | ||||
|         # with self.assertRaises(MultipleObjectsReturned): | ||||
|         #    self.post1.comments.get() | ||||
|         self.assertRaises(MultipleObjectsReturned, self.post1.comments.get) | ||||
|         with self.assertRaises(MultipleObjectsReturned): | ||||
|             self.post1.comments.get() | ||||
|  | ||||
|     def test_keyword_multiple_return_get(self): | ||||
|         """ | ||||
|         Tests the get method of a List of Embedded Documents with a keyword | ||||
|         to return multiple documents. | ||||
|         """ | ||||
|         # < 2.6 Incompatible > | ||||
|         # with self.assertRaises(MultipleObjectsReturned): | ||||
|         #    self.post2.comments.get(author='user2') | ||||
|         self.assertRaises( | ||||
|             MultipleObjectsReturned, self.post2.comments.get, author='user2' | ||||
|         ) | ||||
|         with self.assertRaises(MultipleObjectsReturned): | ||||
|             self.post2.comments.get(author='user2') | ||||
|  | ||||
|     def test_unknown_keyword_get(self): | ||||
|         """ | ||||
|         Tests the get method of a List of Embedded Documents with an | ||||
|         unknown keyword. | ||||
|         """ | ||||
|         # < 2.6 Incompatible > | ||||
|         # with self.assertRaises(AttributeError): | ||||
|         #    self.post2.comments.get(year=2020) | ||||
|         self.assertRaises(AttributeError, self.post2.comments.get, year=2020) | ||||
|         with self.assertRaises(AttributeError): | ||||
|             self.post2.comments.get(year=2020) | ||||
|  | ||||
|     def test_no_result_get(self): | ||||
|         """ | ||||
|         Tests the get method of a List of Embedded Documents where get | ||||
|         returns no results. | ||||
|         """ | ||||
|         # < 2.6 Incompatible > | ||||
|         # with self.assertRaises(DoesNotExist): | ||||
|         #    self.post1.comments.get(author='user3') | ||||
|         self.assertRaises( | ||||
|             DoesNotExist, self.post1.comments.get, author='user3' | ||||
|         ) | ||||
|         with self.assertRaises(DoesNotExist): | ||||
|             self.post1.comments.get(author='user3') | ||||
|  | ||||
|     def test_first(self): | ||||
|         """ | ||||
| @@ -4012,9 +4016,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|         comment = self.post1.comments.first() | ||||
|  | ||||
|         # Ensure a Comment object was returned. | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertIsInstance(comment, self.Comments) | ||||
|         self.assertTrue(isinstance(comment, self.Comments)) | ||||
|         self.assertIsInstance(comment, self.Comments) | ||||
|         self.assertEqual(comment, self.post1.comments[0]) | ||||
|  | ||||
|     def test_create(self): | ||||
| @@ -4027,22 +4029,14 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|         self.post1.save() | ||||
|  | ||||
|         # Ensure the returned value is the comment object. | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertIsInstance(comment, self.Comments) | ||||
|         self.assertTrue(isinstance(comment, self.Comments)) | ||||
|         self.assertIsInstance(comment, self.Comments) | ||||
|         self.assertEqual(comment.author, 'user4') | ||||
|         self.assertEqual(comment.message, 'message1') | ||||
|  | ||||
|         # Ensure the new comment was actually saved to the database. | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertIn( | ||||
|         #    comment, | ||||
|         #    self.BlogPost.objects(comments__author='user4')[0].comments | ||||
|         # ) | ||||
|         self.assertTrue( | ||||
|             comment in self.BlogPost.objects( | ||||
|                 comments__author='user4' | ||||
|             )[0].comments | ||||
|         self.assertIn( | ||||
|             comment, | ||||
|             self.BlogPost.objects(comments__author='user4')[0].comments | ||||
|         ) | ||||
|  | ||||
|     def test_filtered_create(self): | ||||
| @@ -4057,22 +4051,14 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|         self.post1.save() | ||||
|  | ||||
|         # Ensure the returned value is the comment object. | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertIsInstance(comment, self.Comments) | ||||
|         self.assertTrue(isinstance(comment, self.Comments)) | ||||
|         self.assertIsInstance(comment, self.Comments) | ||||
|         self.assertEqual(comment.author, 'user4') | ||||
|         self.assertEqual(comment.message, 'message1') | ||||
|  | ||||
|         # Ensure the new comment was actually saved to the database. | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertIn( | ||||
|         #    comment, | ||||
|         #    self.BlogPost.objects(comments__author='user4')[0].comments | ||||
|         # ) | ||||
|         self.assertTrue( | ||||
|             comment in self.BlogPost.objects( | ||||
|                 comments__author='user4' | ||||
|             )[0].comments | ||||
|         self.assertIn( | ||||
|             comment, | ||||
|             self.BlogPost.objects(comments__author='user4')[0].comments | ||||
|         ) | ||||
|  | ||||
|     def test_no_keyword_update(self): | ||||
| @@ -4085,22 +4071,14 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|         self.post1.save() | ||||
|  | ||||
|         # Ensure that nothing was altered. | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertIn( | ||||
|         #    original[0], | ||||
|         #    self.BlogPost.objects(id=self.post1.id)[0].comments | ||||
|         # ) | ||||
|         self.assertTrue( | ||||
|             original[0] in self.BlogPost.objects(id=self.post1.id)[0].comments | ||||
|         self.assertIn( | ||||
|             original[0], | ||||
|             self.BlogPost.objects(id=self.post1.id)[0].comments | ||||
|         ) | ||||
|  | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertIn( | ||||
|         #    original[1], | ||||
|         #    self.BlogPost.objects(id=self.post1.id)[0].comments | ||||
|         # ) | ||||
|         self.assertTrue( | ||||
|             original[1] in self.BlogPost.objects(id=self.post1.id)[0].comments | ||||
|         self.assertIn( | ||||
|             original[1], | ||||
|             self.BlogPost.objects(id=self.post1.id)[0].comments | ||||
|         ) | ||||
|  | ||||
|         # Ensure the method returned 0 as the number of entries | ||||
| @@ -4146,13 +4124,9 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|         comments.save() | ||||
|  | ||||
|         # Ensure that the new comment has been added to the database. | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertIn( | ||||
|         #    new_comment, | ||||
|         #    self.BlogPost.objects(id=self.post1.id)[0].comments | ||||
|         # ) | ||||
|         self.assertTrue( | ||||
|             new_comment in self.BlogPost.objects(id=self.post1.id)[0].comments | ||||
|         self.assertIn( | ||||
|             new_comment, | ||||
|             self.BlogPost.objects(id=self.post1.id)[0].comments | ||||
|         ) | ||||
|  | ||||
|     def test_delete(self): | ||||
| @@ -4164,23 +4138,15 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|  | ||||
|         # Ensure that all the comments under post1 were deleted in the | ||||
|         # database. | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertListEqual( | ||||
|         #    self.BlogPost.objects(id=self.post1.id)[0].comments, [] | ||||
|         # ) | ||||
|         self.assertEqual( | ||||
|         self.assertListEqual( | ||||
|             self.BlogPost.objects(id=self.post1.id)[0].comments, [] | ||||
|         ) | ||||
|  | ||||
|         # Ensure that post1 comments were deleted from the list. | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertListEqual(self.post1.comments, []) | ||||
|         self.assertEqual(self.post1.comments, []) | ||||
|         self.assertListEqual(self.post1.comments, []) | ||||
|  | ||||
|         # Ensure that comments still returned a EmbeddedDocumentList object. | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertIsInstance(self.post1.comments, EmbeddedDocumentList) | ||||
|         self.assertTrue(isinstance(self.post1.comments, EmbeddedDocumentList)) | ||||
|         self.assertIsInstance(self.post1.comments, EmbeddedDocumentList) | ||||
|  | ||||
|         # Ensure that the delete method returned 2 as the number of entries | ||||
|         # deleted from the database | ||||
| @@ -4220,21 +4186,15 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase): | ||||
|         self.post1.save() | ||||
|  | ||||
|         # Ensure that only the user2 comment was deleted. | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertNotIn( | ||||
|         #     comment, self.BlogPost.objects(id=self.post1.id)[0].comments | ||||
|         # ) | ||||
|         self.assertTrue( | ||||
|             comment not in self.BlogPost.objects(id=self.post1.id)[0].comments | ||||
|         self.assertNotIn( | ||||
|             comment, self.BlogPost.objects(id=self.post1.id)[0].comments | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             len(self.BlogPost.objects(id=self.post1.id)[0].comments), 1 | ||||
|         ) | ||||
|  | ||||
|         # Ensure that the user2 comment no longer exists in the list. | ||||
|         # < 2.6 Incompatible > | ||||
|         # self.assertNotIn(comment, self.post1.comments) | ||||
|         self.assertTrue(comment not in self.post1.comments) | ||||
|         self.assertNotIn(comment, self.post1.comments) | ||||
|         self.assertEqual(len(self.post1.comments), 1) | ||||
|  | ||||
|         # Ensure that the delete method returned 1 as the number of entries | ||||
|   | ||||
| @@ -1,18 +1,16 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import copy | ||||
| import os | ||||
| import unittest | ||||
| import tempfile | ||||
|  | ||||
| import gridfs | ||||
| import six | ||||
|  | ||||
| from nose.plugins.skip import SkipTest | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.python_support import b, StringIO | ||||
| from mongoengine.python_support import StringIO | ||||
|  | ||||
| try: | ||||
|     from PIL import Image | ||||
| @@ -49,7 +47,7 @@ class FileTest(unittest.TestCase): | ||||
|  | ||||
|         PutFile.drop_collection() | ||||
|  | ||||
|         text = b('Hello, World!') | ||||
|         text = six.b('Hello, World!') | ||||
|         content_type = 'text/plain' | ||||
|  | ||||
|         putfile = PutFile() | ||||
| @@ -88,8 +86,8 @@ class FileTest(unittest.TestCase): | ||||
|  | ||||
|         StreamFile.drop_collection() | ||||
|  | ||||
|         text = b('Hello, World!') | ||||
|         more_text = b('Foo Bar') | ||||
|         text = six.b('Hello, World!') | ||||
|         more_text = six.b('Foo Bar') | ||||
|         content_type = 'text/plain' | ||||
|  | ||||
|         streamfile = StreamFile() | ||||
| @@ -123,8 +121,8 @@ class FileTest(unittest.TestCase): | ||||
|  | ||||
|         StreamFile.drop_collection() | ||||
|  | ||||
|         text = b('Hello, World!') | ||||
|         more_text = b('Foo Bar') | ||||
|         text = six.b('Hello, World!') | ||||
|         more_text = six.b('Foo Bar') | ||||
|         content_type = 'text/plain' | ||||
|  | ||||
|         streamfile = StreamFile() | ||||
| @@ -155,8 +153,8 @@ class FileTest(unittest.TestCase): | ||||
|         class SetFile(Document): | ||||
|             the_file = FileField() | ||||
|  | ||||
|         text = b('Hello, World!') | ||||
|         more_text = b('Foo Bar') | ||||
|         text = six.b('Hello, World!') | ||||
|         more_text = six.b('Foo Bar') | ||||
|  | ||||
|         SetFile.drop_collection() | ||||
|  | ||||
| @@ -185,7 +183,7 @@ class FileTest(unittest.TestCase): | ||||
|         GridDocument.drop_collection() | ||||
|  | ||||
|         with tempfile.TemporaryFile() as f: | ||||
|             f.write(b("Hello World!")) | ||||
|             f.write(six.b("Hello World!")) | ||||
|             f.flush() | ||||
|  | ||||
|             # Test without default | ||||
| @@ -202,7 +200,7 @@ class FileTest(unittest.TestCase): | ||||
|             self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id) | ||||
|  | ||||
|             # Test with default | ||||
|             doc_d = GridDocument(the_file=b('')) | ||||
|             doc_d = GridDocument(the_file=six.b('')) | ||||
|             doc_d.save() | ||||
|  | ||||
|             doc_e = GridDocument.objects.with_id(doc_d.id) | ||||
| @@ -228,7 +226,7 @@ class FileTest(unittest.TestCase): | ||||
|         # First instance | ||||
|         test_file = TestFile() | ||||
|         test_file.name = "Hello, World!" | ||||
|         test_file.the_file.put(b('Hello, World!')) | ||||
|         test_file.the_file.put(six.b('Hello, World!')) | ||||
|         test_file.save() | ||||
|  | ||||
|         # Second instance | ||||
| @@ -282,7 +280,7 @@ class FileTest(unittest.TestCase): | ||||
|  | ||||
|         test_file = TestFile() | ||||
|         self.assertFalse(bool(test_file.the_file)) | ||||
|         test_file.the_file.put(b('Hello, World!'), content_type='text/plain') | ||||
|         test_file.the_file.put(six.b('Hello, World!'), content_type='text/plain') | ||||
|         test_file.save() | ||||
|         self.assertTrue(bool(test_file.the_file)) | ||||
|  | ||||
| @@ -297,66 +295,66 @@ class FileTest(unittest.TestCase): | ||||
|         test_file = TestFile() | ||||
|         self.assertFalse(test_file.the_file in [{"test": 1}]) | ||||
|  | ||||
|     def test_file_disk_space(self):  | ||||
|         """ Test disk space usage when we delete/replace a file """  | ||||
|     def test_file_disk_space(self): | ||||
|         """ Test disk space usage when we delete/replace a file """ | ||||
|         class TestFile(Document): | ||||
|             the_file = FileField() | ||||
|              | ||||
|         text = b('Hello, World!') | ||||
|  | ||||
|         text = six.b('Hello, World!') | ||||
|         content_type = 'text/plain' | ||||
|  | ||||
|         testfile = TestFile() | ||||
|         testfile.the_file.put(text, content_type=content_type, filename="hello") | ||||
|         testfile.save() | ||||
|          | ||||
|         # Now check fs.files and fs.chunks  | ||||
|  | ||||
|         # Now check fs.files and fs.chunks | ||||
|         db = TestFile._get_db() | ||||
|          | ||||
|  | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         self.assertEquals(len(list(files)), 1) | ||||
|         self.assertEquals(len(list(chunks)), 1) | ||||
|  | ||||
|         # Deleting the docoument should delete the files  | ||||
|         # Deleting the docoument should delete the files | ||||
|         testfile.delete() | ||||
|          | ||||
|  | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         self.assertEquals(len(list(files)), 0) | ||||
|         self.assertEquals(len(list(chunks)), 0) | ||||
|          | ||||
|         # Test case where we don't store a file in the first place  | ||||
|  | ||||
|         # Test case where we don't store a file in the first place | ||||
|         testfile = TestFile() | ||||
|         testfile.save() | ||||
|          | ||||
|  | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         self.assertEquals(len(list(files)), 0) | ||||
|         self.assertEquals(len(list(chunks)), 0) | ||||
|          | ||||
|  | ||||
|         testfile.delete() | ||||
|          | ||||
|  | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         self.assertEquals(len(list(files)), 0) | ||||
|         self.assertEquals(len(list(chunks)), 0) | ||||
|          | ||||
|         # Test case where we overwrite the file  | ||||
|  | ||||
|         # Test case where we overwrite the file | ||||
|         testfile = TestFile() | ||||
|         testfile.the_file.put(text, content_type=content_type, filename="hello") | ||||
|         testfile.save() | ||||
|          | ||||
|         text = b('Bonjour, World!') | ||||
|  | ||||
|         text = six.b('Bonjour, World!') | ||||
|         testfile.the_file.replace(text, content_type=content_type, filename="hello") | ||||
|         testfile.save() | ||||
|          | ||||
|  | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         self.assertEquals(len(list(files)), 1) | ||||
|         self.assertEquals(len(list(chunks)), 1) | ||||
|          | ||||
|  | ||||
|         testfile.delete() | ||||
|          | ||||
|  | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         self.assertEquals(len(list(files)), 0) | ||||
| @@ -372,14 +370,14 @@ class FileTest(unittest.TestCase): | ||||
|         TestImage.drop_collection() | ||||
|  | ||||
|         with tempfile.TemporaryFile() as f: | ||||
|             f.write(b("Hello World!")) | ||||
|             f.write(six.b("Hello World!")) | ||||
|             f.flush() | ||||
|  | ||||
|             t = TestImage() | ||||
|             try: | ||||
|                 t.image.put(f) | ||||
|                 self.fail("Should have raised an invalidation error") | ||||
|             except ValidationError, e: | ||||
|             except ValidationError as e: | ||||
|                 self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f) | ||||
|  | ||||
|         t = TestImage() | ||||
| @@ -496,7 +494,7 @@ class FileTest(unittest.TestCase): | ||||
|         # First instance | ||||
|         test_file = TestFile() | ||||
|         test_file.name = "Hello, World!" | ||||
|         test_file.the_file.put(b('Hello, World!'), | ||||
|         test_file.the_file.put(six.b('Hello, World!'), | ||||
|                           name="hello.txt") | ||||
|         test_file.save() | ||||
|  | ||||
| @@ -504,16 +502,15 @@ class FileTest(unittest.TestCase): | ||||
|         self.assertEqual(data.get('name'), 'hello.txt') | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         self.assertEqual(test_file.the_file.read(), | ||||
|                           b('Hello, World!')) | ||||
|         self.assertEqual(test_file.the_file.read(), six.b('Hello, World!')) | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         test_file.the_file = b('HELLO, WORLD!') | ||||
|         test_file.the_file = six.b('HELLO, WORLD!') | ||||
|         test_file.save() | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         self.assertEqual(test_file.the_file.read(), | ||||
|                           b('HELLO, WORLD!')) | ||||
|                          six.b('HELLO, WORLD!')) | ||||
|  | ||||
|     def test_copyable(self): | ||||
|         class PutFile(Document): | ||||
| @@ -521,7 +518,7 @@ class FileTest(unittest.TestCase): | ||||
|  | ||||
|         PutFile.drop_collection() | ||||
|  | ||||
|         text = b('Hello, World!') | ||||
|         text = six.b('Hello, World!') | ||||
|         content_type = 'text/plain' | ||||
|  | ||||
|         putfile = PutFile() | ||||
|   | ||||
| @@ -1,7 +1,4 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
|   | ||||
| @@ -1,11 +0,0 @@ | ||||
| import unittest | ||||
|  | ||||
| from convert_to_new_inheritance_model import * | ||||
| from decimalfield_as_float import * | ||||
| from referencefield_dbref_to_object_id import * | ||||
| from turn_off_inheritance import * | ||||
| from uuidfield_to_binary import * | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -1,51 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField | ||||
|  | ||||
| __all__ = ('ConvertToNewInheritanceModel', ) | ||||
|  | ||||
|  | ||||
| class ConvertToNewInheritanceModel(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_how_to_convert_to_the_new_inheritance_model(self): | ||||
|         """Demonstrates migrating from 0.7 to 0.8 | ||||
|         """ | ||||
|  | ||||
|         # 1. Declaration of the class | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': ['name'] | ||||
|             } | ||||
|  | ||||
|         # 2. Remove _types | ||||
|         collection = Animal._get_collection() | ||||
|         collection.update({}, {"$unset": {"_types": 1}}, multi=True) | ||||
|  | ||||
|         # 3. Confirm extra data is removed | ||||
|         count = collection.find({'_types': {"$exists": True}}).count() | ||||
|         self.assertEqual(0, count) | ||||
|  | ||||
|         # 4. Remove indexes | ||||
|         info = collection.index_information() | ||||
|         indexes_to_drop = [key for key, value in info.iteritems() | ||||
|                            if '_types' in dict(value['key'])] | ||||
|         for index in indexes_to_drop: | ||||
|             collection.drop_index(index) | ||||
|  | ||||
|         # 5. Recreate indexes | ||||
|         Animal.ensure_indexes() | ||||
| @@ -1,50 +0,0 @@ | ||||
|  # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
| import decimal | ||||
| from decimal import Decimal | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField, DecimalField, ListField | ||||
|  | ||||
| __all__ = ('ConvertDecimalField', ) | ||||
|  | ||||
|  | ||||
| class ConvertDecimalField(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def test_how_to_convert_decimal_fields(self): | ||||
|         """Demonstrates migrating from 0.7 to 0.8 | ||||
|         """ | ||||
|  | ||||
|         # 1. Old definition - using dbrefs | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             money = DecimalField(force_string=True) | ||||
|             monies = ListField(DecimalField(force_string=True)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         Person(name="Wilson Jr", money=Decimal("2.50"), | ||||
|                monies=[Decimal("2.10"), Decimal("5.00")]).save() | ||||
|  | ||||
|         # 2. Start the migration by changing the schema | ||||
|         # Change DecimalField - add precision and rounding settings | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             money = DecimalField(precision=2, rounding=decimal.ROUND_HALF_UP) | ||||
|             monies = ListField(DecimalField(precision=2, | ||||
|                                             rounding=decimal.ROUND_HALF_UP)) | ||||
|  | ||||
|         # 3. Loop all the objects and mark parent as changed | ||||
|         for p in Person.objects: | ||||
|             p._mark_as_changed('money') | ||||
|             p._mark_as_changed('monies') | ||||
|             p.save() | ||||
|  | ||||
|         # 4. Confirmation of the fix! | ||||
|         wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] | ||||
|         self.assertTrue(isinstance(wilson['money'], float)) | ||||
|         self.assertTrue(all([isinstance(m, float) for m in wilson['monies']])) | ||||
| @@ -1,52 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField, ReferenceField, ListField | ||||
|  | ||||
| __all__ = ('ConvertToObjectIdsModel', ) | ||||
|  | ||||
|  | ||||
| class ConvertToObjectIdsModel(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def test_how_to_convert_to_object_id_reference_fields(self): | ||||
|         """Demonstrates migrating from 0.7 to 0.8 | ||||
|         """ | ||||
|  | ||||
|         # 1. Old definition - using dbrefs | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             parent = ReferenceField('self', dbref=True) | ||||
|             friends = ListField(ReferenceField('self', dbref=True)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p1 = Person(name="Wilson", parent=None).save() | ||||
|         f1 = Person(name="John", parent=None).save() | ||||
|         f2 = Person(name="Paul", parent=None).save() | ||||
|         f3 = Person(name="George", parent=None).save() | ||||
|         f4 = Person(name="Ringo", parent=None).save() | ||||
|         Person(name="Wilson Jr", parent=p1, friends=[f1, f2, f3, f4]).save() | ||||
|  | ||||
|         # 2. Start the migration by changing the schema | ||||
|         # Change ReferenceField as now dbref defaults to False | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             parent = ReferenceField('self') | ||||
|             friends = ListField(ReferenceField('self')) | ||||
|  | ||||
|         # 3. Loop all the objects and mark parent as changed | ||||
|         for p in Person.objects: | ||||
|             p._mark_as_changed('parent') | ||||
|             p._mark_as_changed('friends') | ||||
|             p.save() | ||||
|  | ||||
|         # 4. Confirmation of the fix! | ||||
|         wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] | ||||
|         self.assertEqual(p1.id, wilson['parent']) | ||||
|         self.assertEqual([f1.id, f2.id, f3.id, f4.id], wilson['friends']) | ||||
| @@ -1,62 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField | ||||
|  | ||||
| __all__ = ('TurnOffInheritanceTest', ) | ||||
|  | ||||
|  | ||||
| class TurnOffInheritanceTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_how_to_turn_off_inheritance(self): | ||||
|         """Demonstrates migrating from allow_inheritance = True to False. | ||||
|         """ | ||||
|  | ||||
|         # 1. Old declaration of the class | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': ['name'] | ||||
|             } | ||||
|  | ||||
|         # 2. Turn off inheritance | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = { | ||||
|                 'allow_inheritance': False, | ||||
|                 'indexes': ['name'] | ||||
|             } | ||||
|  | ||||
|         # 3. Remove _types and _cls | ||||
|         collection = Animal._get_collection() | ||||
|         collection.update({}, {"$unset": {"_types": 1, "_cls": 1}}, multi=True) | ||||
|  | ||||
|         # 3. Confirm extra data is removed | ||||
|         count = collection.find({"$or": [{'_types': {"$exists": True}}, | ||||
|                                          {'_cls': {"$exists": True}}]}).count() | ||||
|         assert count == 0 | ||||
|  | ||||
|         # 4. Remove indexes | ||||
|         info = collection.index_information() | ||||
|         indexes_to_drop = [key for key, value in info.iteritems() | ||||
|                            if '_types' in dict(value['key']) | ||||
|                               or '_cls' in dict(value['key'])] | ||||
|         for index in indexes_to_drop: | ||||
|             collection.drop_index(index) | ||||
|  | ||||
|         # 5. Recreate indexes | ||||
|         Animal.ensure_indexes() | ||||
| @@ -1,48 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
| import uuid | ||||
|  | ||||
| from mongoengine import Document, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import StringField, UUIDField, ListField | ||||
|  | ||||
| __all__ = ('ConvertToBinaryUUID', ) | ||||
|  | ||||
|  | ||||
| class ConvertToBinaryUUID(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def test_how_to_convert_to_binary_uuid_fields(self): | ||||
|         """Demonstrates migrating from 0.7 to 0.8 | ||||
|         """ | ||||
|  | ||||
|         # 1. Old definition - using dbrefs | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             uuid = UUIDField(binary=False) | ||||
|             uuids = ListField(UUIDField(binary=False)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         Person(name="Wilson Jr", uuid=uuid.uuid4(), | ||||
|                uuids=[uuid.uuid4(), uuid.uuid4()]).save() | ||||
|  | ||||
|         # 2. Start the migration by changing the schema | ||||
|         # Change UUIDFIeld as now binary defaults to True | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             uuid = UUIDField() | ||||
|             uuids = ListField(UUIDField()) | ||||
|  | ||||
|         # 3. Loop all the objects and mark parent as changed | ||||
|         for p in Person.objects: | ||||
|             p._mark_as_changed('uuid') | ||||
|             p._mark_as_changed('uuids') | ||||
|             p.save() | ||||
|  | ||||
|         # 4. Confirmation of the fix! | ||||
|         wilson = Person.objects(name="Wilson Jr").as_pymongo()[0] | ||||
|         self.assertTrue(isinstance(wilson['uuid'], uuid.UUID)) | ||||
|         self.assertTrue(all([isinstance(u, uuid.UUID) for u in wilson['uuids']])) | ||||
| @@ -1,6 +1,3 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
| @@ -95,7 +92,7 @@ class OnlyExcludeAllTest(unittest.TestCase): | ||||
|         exclude = ['d', 'e'] | ||||
|         only = ['b', 'c'] | ||||
|  | ||||
|         qs = MyDoc.objects.fields(**dict(((i, 1) for i in include))) | ||||
|         qs = MyDoc.objects.fields(**{i: 1 for i in include}) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), | ||||
|                          {'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1}) | ||||
|         qs = qs.only(*only) | ||||
| @@ -103,14 +100,14 @@ class OnlyExcludeAllTest(unittest.TestCase): | ||||
|         qs = qs.exclude(*exclude) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) | ||||
|  | ||||
|         qs = MyDoc.objects.fields(**dict(((i, 1) for i in include))) | ||||
|         qs = MyDoc.objects.fields(**{i: 1 for i in include}) | ||||
|         qs = qs.exclude(*exclude) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) | ||||
|         qs = qs.only(*only) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) | ||||
|  | ||||
|         qs = MyDoc.objects.exclude(*exclude) | ||||
|         qs = qs.fields(**dict(((i, 1) for i in include))) | ||||
|         qs = qs.fields(**{i: 1 for i in include}) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) | ||||
|         qs = qs.only(*only) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) | ||||
| @@ -129,7 +126,7 @@ class OnlyExcludeAllTest(unittest.TestCase): | ||||
|         exclude = ['d', 'e'] | ||||
|         only = ['b', 'c'] | ||||
|  | ||||
|         qs = MyDoc.objects.fields(**dict(((i, 1) for i in include))) | ||||
|         qs = MyDoc.objects.fields(**{i: 1 for i in include}) | ||||
|         qs = qs.exclude(*exclude) | ||||
|         qs = qs.only(*only) | ||||
|         qs = qs.fields(slice__b=5) | ||||
|   | ||||
| @@ -1,9 +1,5 @@ | ||||
| import sys | ||||
|  | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
| from datetime import datetime, timedelta | ||||
| import unittest | ||||
|  | ||||
| from pymongo.errors import OperationFailure | ||||
| from mongoengine import * | ||||
|   | ||||
| @@ -1,6 +1,3 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import connect, Document, IntField | ||||
| @@ -99,4 +96,4 @@ class FindAndModifyTest(unittest.TestCase): | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
|     unittest.main() | ||||
|   | ||||
| @@ -9,13 +9,13 @@ from nose.plugins.skip import SkipTest | ||||
| import pymongo | ||||
| from pymongo.errors import ConfigurationError | ||||
| from pymongo.read_preferences import ReadPreference | ||||
|  | ||||
| import six | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_connection, get_db | ||||
| from mongoengine.context_managers import query_counter, switch_db | ||||
| from mongoengine.errors import InvalidQueryError | ||||
| from mongoengine.python_support import IS_PYMONGO_3, PY3 | ||||
| from mongoengine.python_support import IS_PYMONGO_3 | ||||
| from mongoengine.queryset import (DoesNotExist, MultipleObjectsReturned, | ||||
|                                   QuerySet, QuerySetManager, queryset_manager) | ||||
|  | ||||
| @@ -25,7 +25,10 @@ __all__ = ("QuerySetTest",) | ||||
| class db_ops_tracker(query_counter): | ||||
|  | ||||
|     def get_ops(self): | ||||
|         ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}} | ||||
|         ignore_query = { | ||||
|             'ns': {'$ne': '%s.system.indexes' % self.db.name}, | ||||
|             'command.count': {'$ne': 'system.profile'} | ||||
|         } | ||||
|         return list(self.db.system.profile.find(ignore_query)) | ||||
|  | ||||
|  | ||||
| @@ -94,12 +97,12 @@ class QuerySetTest(unittest.TestCase): | ||||
|             author = ReferenceField(self.Person) | ||||
|             author2 = GenericReferenceField() | ||||
|  | ||||
|         def test_reference(): | ||||
|         # test addressing a field from a reference | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             list(BlogPost.objects(author__name="test")) | ||||
|  | ||||
|         self.assertRaises(InvalidQueryError, test_reference) | ||||
|  | ||||
|         def test_generic_reference(): | ||||
|         # should fail for a generic reference as well | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             list(BlogPost.objects(author2__name="test")) | ||||
|  | ||||
|     def test_find(self): | ||||
| @@ -174,7 +177,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         # Test larger slice __repr__ | ||||
|         self.Person.objects.delete() | ||||
|         for i in xrange(55): | ||||
|         for i in range(55): | ||||
|             self.Person(name='A%s' % i, age=i).save() | ||||
|  | ||||
|         self.assertEqual(self.Person.objects.count(), 55) | ||||
| @@ -218,14 +221,15 @@ class QuerySetTest(unittest.TestCase): | ||||
|         person = self.Person.objects[1] | ||||
|         self.assertEqual(person.name, "User B") | ||||
|  | ||||
|         self.assertRaises(IndexError, self.Person.objects.__getitem__, 2) | ||||
|         with self.assertRaises(IndexError): | ||||
|             self.Person.objects[2] | ||||
|  | ||||
|         # Find a document using just the object id | ||||
|         person = self.Person.objects.with_id(person1.id) | ||||
|         self.assertEqual(person.name, "User A") | ||||
|  | ||||
|         self.assertRaises( | ||||
|             InvalidQueryError, self.Person.objects(name="User A").with_id, person1.id) | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             self.Person.objects(name="User A").with_id(person1.id) | ||||
|  | ||||
|     def test_find_only_one(self): | ||||
|         """Ensure that a query using ``get`` returns at most one result. | ||||
| @@ -337,9 +341,37 @@ class QuerySetTest(unittest.TestCase): | ||||
|         query = query.filter(boolfield=True) | ||||
|         self.assertEqual(query.count(), 1) | ||||
|  | ||||
|     def test_batch_size(self): | ||||
|         """Ensure that batch_size works.""" | ||||
|         class A(Document): | ||||
|             s = StringField() | ||||
|  | ||||
|         A.drop_collection() | ||||
|  | ||||
|         for i in range(100): | ||||
|             A.objects.create(s=str(i)) | ||||
|  | ||||
|         # test iterating over the result set | ||||
|         cnt = 0 | ||||
|         for a in A.objects.batch_size(10): | ||||
|             cnt += 1 | ||||
|         self.assertEqual(cnt, 100) | ||||
|  | ||||
|         # test chaining | ||||
|         qs = A.objects.all() | ||||
|         qs = qs.limit(10).batch_size(20).skip(91) | ||||
|         cnt = 0 | ||||
|         for a in qs: | ||||
|             cnt += 1 | ||||
|         self.assertEqual(cnt, 9) | ||||
|  | ||||
|         # test invalid batch size | ||||
|         qs = A.objects.batch_size(-1) | ||||
|         with self.assertRaises(ValueError): | ||||
|             list(qs) | ||||
|  | ||||
|     def test_update_write_concern(self): | ||||
|         """Test that passing write_concern works""" | ||||
|  | ||||
|         self.Person.drop_collection() | ||||
|  | ||||
|         write_concern = {"fsync": True} | ||||
| @@ -365,18 +397,14 @@ class QuerySetTest(unittest.TestCase): | ||||
|         """Test to ensure that update is passed a value to update to""" | ||||
|         self.Person.drop_collection() | ||||
|  | ||||
|         author = self.Person(name='Test User') | ||||
|         author.save() | ||||
|         author = self.Person.objects.create(name='Test User') | ||||
|  | ||||
|         def update_raises(): | ||||
|         with self.assertRaises(OperationError): | ||||
|             self.Person.objects(pk=author.pk).update({}) | ||||
|  | ||||
|         def update_one_raises(): | ||||
|         with self.assertRaises(OperationError): | ||||
|             self.Person.objects(pk=author.pk).update_one({}) | ||||
|  | ||||
|         self.assertRaises(OperationError, update_raises) | ||||
|         self.assertRaises(OperationError, update_one_raises) | ||||
|  | ||||
|     def test_update_array_position(self): | ||||
|         """Ensure that updating by array position works. | ||||
|  | ||||
| @@ -404,8 +432,8 @@ class QuerySetTest(unittest.TestCase): | ||||
|         Blog.objects.create(posts=[post2, post1]) | ||||
|  | ||||
|         # Update all of the first comments of second posts of all blogs | ||||
|         Blog.objects().update(set__posts__1__comments__0__name="testc") | ||||
|         testc_blogs = Blog.objects(posts__1__comments__0__name="testc") | ||||
|         Blog.objects().update(set__posts__1__comments__0__name='testc') | ||||
|         testc_blogs = Blog.objects(posts__1__comments__0__name='testc') | ||||
|         self.assertEqual(testc_blogs.count(), 2) | ||||
|  | ||||
|         Blog.drop_collection() | ||||
| @@ -414,14 +442,13 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         # Update only the first blog returned by the query | ||||
|         Blog.objects().update_one( | ||||
|             set__posts__1__comments__1__name="testc") | ||||
|         testc_blogs = Blog.objects(posts__1__comments__1__name="testc") | ||||
|             set__posts__1__comments__1__name='testc') | ||||
|         testc_blogs = Blog.objects(posts__1__comments__1__name='testc') | ||||
|         self.assertEqual(testc_blogs.count(), 1) | ||||
|  | ||||
|         # Check that using this indexing syntax on a non-list fails | ||||
|         def non_list_indexing(): | ||||
|             Blog.objects().update(set__posts__1__comments__0__name__1="asdf") | ||||
|         self.assertRaises(InvalidQueryError, non_list_indexing) | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             Blog.objects().update(set__posts__1__comments__0__name__1='asdf') | ||||
|  | ||||
|         Blog.drop_collection() | ||||
|  | ||||
| @@ -489,15 +516,12 @@ class QuerySetTest(unittest.TestCase): | ||||
|         self.assertEqual(simple.x, [1, 2, None, 4, 3, 2, 3, 4]) | ||||
|  | ||||
|         # Nested updates arent supported yet.. | ||||
|         def update_nested(): | ||||
|         with self.assertRaises(OperationError): | ||||
|             Simple.drop_collection() | ||||
|             Simple(x=[{'test': [1, 2, 3, 4]}]).save() | ||||
|             Simple.objects(x__test=2).update(set__x__S__test__S=3) | ||||
|             self.assertEqual(simple.x, [1, 2, 3, 4]) | ||||
|  | ||||
|         self.assertRaises(OperationError, update_nested) | ||||
|         Simple.drop_collection() | ||||
|  | ||||
|     def test_update_using_positional_operator_embedded_document(self): | ||||
|         """Ensure that the embedded documents can be updated using the positional | ||||
|         operator.""" | ||||
| @@ -590,11 +614,11 @@ class QuerySetTest(unittest.TestCase): | ||||
|             members = DictField() | ||||
|  | ||||
|         club = Club() | ||||
|         club.members['John'] = dict(gender="M", age=13) | ||||
|         club.members['John'] = {'gender': 'M', 'age': 13} | ||||
|         club.save() | ||||
|  | ||||
|         Club.objects().update( | ||||
|             set__members={"John": dict(gender="F", age=14)}) | ||||
|             set__members={"John": {'gender': 'F', 'age': 14}}) | ||||
|  | ||||
|         club = Club.objects().first() | ||||
|         self.assertEqual(club.members['John']['gender'], "F") | ||||
| @@ -775,7 +799,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|             post2 = Post(comments=[comment2, comment2]) | ||||
|  | ||||
|             blogs = [] | ||||
|             for i in xrange(1, 100): | ||||
|             for i in range(1, 100): | ||||
|                 blogs.append(Blog(title="post %s" % i, posts=[post1, post2])) | ||||
|  | ||||
|             Blog.objects.insert(blogs, load_bulk=False) | ||||
| @@ -812,30 +836,31 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         self.assertEqual(Blog.objects.count(), 2) | ||||
|  | ||||
|         # test handles people trying to upsert | ||||
|         def throw_operation_error(): | ||||
|         # test inserting an existing document (shouldn't be allowed) | ||||
|         with self.assertRaises(OperationError): | ||||
|             blog = Blog.objects.first() | ||||
|             Blog.objects.insert(blog) | ||||
|  | ||||
|         # test inserting a query set | ||||
|         with self.assertRaises(OperationError): | ||||
|             blogs = Blog.objects | ||||
|             Blog.objects.insert(blogs) | ||||
|  | ||||
|         self.assertRaises(OperationError, throw_operation_error) | ||||
|  | ||||
|         # Test can insert new doc | ||||
|         # insert a new doc | ||||
|         new_post = Blog(title="code123", id=ObjectId()) | ||||
|         Blog.objects.insert(new_post) | ||||
|  | ||||
|         # test handles other classes being inserted | ||||
|         def throw_operation_error_wrong_doc(): | ||||
|             class Author(Document): | ||||
|                 pass | ||||
|         class Author(Document): | ||||
|             pass | ||||
|  | ||||
|         # try inserting a different document class | ||||
|         with self.assertRaises(OperationError): | ||||
|             Blog.objects.insert(Author()) | ||||
|  | ||||
|         self.assertRaises(OperationError, throw_operation_error_wrong_doc) | ||||
|  | ||||
|         def throw_operation_error_not_a_document(): | ||||
|         # try inserting a non-document | ||||
|         with self.assertRaises(OperationError): | ||||
|             Blog.objects.insert("HELLO WORLD") | ||||
|  | ||||
|         self.assertRaises(OperationError, throw_operation_error_not_a_document) | ||||
|  | ||||
|         Blog.drop_collection() | ||||
|  | ||||
|         blog1 = Blog(title="code", posts=[post1, post2]) | ||||
| @@ -855,14 +880,13 @@ class QuerySetTest(unittest.TestCase): | ||||
|         blog3 = Blog(title="baz", posts=[post1, post2]) | ||||
|         Blog.objects.insert([blog1, blog2]) | ||||
|  | ||||
|         def throw_operation_error_not_unique(): | ||||
|         with self.assertRaises(NotUniqueError): | ||||
|             Blog.objects.insert([blog2, blog3]) | ||||
|  | ||||
|         self.assertRaises(NotUniqueError, throw_operation_error_not_unique) | ||||
|         self.assertEqual(Blog.objects.count(), 2) | ||||
|  | ||||
|         Blog.objects.insert([blog2, blog3], write_concern={"w": 0, | ||||
|                                                            'continue_on_error': True}) | ||||
|         Blog.objects.insert([blog2, blog3], | ||||
|                             write_concern={"w": 0, 'continue_on_error': True}) | ||||
|         self.assertEqual(Blog.objects.count(), 3) | ||||
|  | ||||
|     def test_get_changed_fields_query_count(self): | ||||
| @@ -995,7 +1019,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|  | ||||
|         for i in xrange(1000): | ||||
|         for i in range(1000): | ||||
|             Doc(number=i).save() | ||||
|  | ||||
|         docs = Doc.objects.order_by('number') | ||||
| @@ -1149,7 +1173,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|         qs = list(qs) | ||||
|         expected = list(expected) | ||||
|         self.assertEqual(len(qs), len(expected)) | ||||
|         for i in xrange(len(qs)): | ||||
|         for i in range(len(qs)): | ||||
|             self.assertEqual(qs[i], expected[i]) | ||||
|  | ||||
|     def test_ordering(self): | ||||
| @@ -1189,7 +1213,8 @@ class QuerySetTest(unittest.TestCase): | ||||
|         self.assertSequence(qs, expected) | ||||
|  | ||||
|     def test_clear_ordering(self): | ||||
|         """ Ensure that the default ordering can be cleared by calling order_by(). | ||||
|         """Ensure that the default ordering can be cleared by calling | ||||
|         order_by() w/o any arguments. | ||||
|         """ | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
| @@ -1205,12 +1230,13 @@ class QuerySetTest(unittest.TestCase): | ||||
|             BlogPost.objects.filter(title='whatever').first() | ||||
|             self.assertEqual(len(q.get_ops()), 1) | ||||
|             self.assertEqual( | ||||
|                 q.get_ops()[0]['query']['$orderby'], {u'published_date': -1}) | ||||
|                 q.get_ops()[0]['query']['$orderby'], | ||||
|                 {'published_date': -1} | ||||
|             ) | ||||
|  | ||||
|         with db_ops_tracker() as q: | ||||
|             BlogPost.objects.filter(title='whatever').order_by().first() | ||||
|             self.assertEqual(len(q.get_ops()), 1) | ||||
|             print q.get_ops()[0]['query'] | ||||
|             self.assertFalse('$orderby' in q.get_ops()[0]['query']) | ||||
|  | ||||
|     def test_no_ordering_for_get(self): | ||||
| @@ -1239,7 +1265,8 @@ class QuerySetTest(unittest.TestCase): | ||||
|             self.assertFalse('$orderby' in q.get_ops()[0]['query']) | ||||
|  | ||||
|     def test_find_embedded(self): | ||||
|         """Ensure that an embedded document is properly returned from a query. | ||||
|         """Ensure that an embedded document is properly returned from | ||||
|         a query. | ||||
|         """ | ||||
|         class User(EmbeddedDocument): | ||||
|             name = StringField() | ||||
| @@ -1250,16 +1277,31 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         post = BlogPost(content='Had a good coffee today...') | ||||
|         post.author = User(name='Test User') | ||||
|         post.save() | ||||
|         BlogPost.objects.create( | ||||
|             author=User(name='Test User'), | ||||
|             content='Had a good coffee today...' | ||||
|         ) | ||||
|  | ||||
|         result = BlogPost.objects.first() | ||||
|         self.assertTrue(isinstance(result.author, User)) | ||||
|         self.assertEqual(result.author.name, 'Test User') | ||||
|  | ||||
|     def test_find_empty_embedded(self): | ||||
|         """Ensure that you can save and find an empty embedded document.""" | ||||
|         class User(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             content = StringField() | ||||
|             author = EmbeddedDocumentField(User) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         BlogPost.objects.create(content='Anonymous post...') | ||||
|  | ||||
|         result = BlogPost.objects.get(author=None) | ||||
|         self.assertEqual(result.author, None) | ||||
|  | ||||
|     def test_find_dict_item(self): | ||||
|         """Ensure that DictField items may be found. | ||||
|         """ | ||||
| @@ -1667,7 +1709,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         Log.drop_collection() | ||||
|  | ||||
|         for i in xrange(10): | ||||
|         for i in range(10): | ||||
|             Log().save() | ||||
|  | ||||
|         Log.objects()[3:5].delete() | ||||
| @@ -1867,12 +1909,10 @@ class QuerySetTest(unittest.TestCase): | ||||
|         Site.objects(id=s.id).update_one(pull__collaborators__user='Esteban') | ||||
|         self.assertEqual(Site.objects.first().collaborators, []) | ||||
|  | ||||
|         def pull_all(): | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             Site.objects(id=s.id).update_one( | ||||
|                 pull_all__collaborators__user=['Ross']) | ||||
|  | ||||
|         self.assertRaises(InvalidQueryError, pull_all) | ||||
|  | ||||
|     def test_pull_from_nested_embedded(self): | ||||
|  | ||||
|         class User(EmbeddedDocument): | ||||
| @@ -1903,12 +1943,10 @@ class QuerySetTest(unittest.TestCase): | ||||
|             pull__collaborators__unhelpful={'name': 'Frank'}) | ||||
|         self.assertEqual(Site.objects.first().collaborators['unhelpful'], []) | ||||
|  | ||||
|         def pull_all(): | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             Site.objects(id=s.id).update_one( | ||||
|                 pull_all__collaborators__helpful__name=['Ross']) | ||||
|  | ||||
|         self.assertRaises(InvalidQueryError, pull_all) | ||||
|  | ||||
|     def test_pull_from_nested_mapfield(self): | ||||
|  | ||||
|         class Collaborator(EmbeddedDocument): | ||||
| @@ -1937,12 +1975,10 @@ class QuerySetTest(unittest.TestCase): | ||||
|             pull__collaborators__unhelpful={'user': 'Frank'}) | ||||
|         self.assertEqual(Site.objects.first().collaborators['unhelpful'], []) | ||||
|  | ||||
|         def pull_all(): | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             Site.objects(id=s.id).update_one( | ||||
|                 pull_all__collaborators__helpful__user=['Ross']) | ||||
|  | ||||
|         self.assertRaises(InvalidQueryError, pull_all) | ||||
|  | ||||
|     def test_update_one_pop_generic_reference(self): | ||||
|  | ||||
|         class BlogTag(Document): | ||||
| @@ -2199,6 +2235,21 @@ class QuerySetTest(unittest.TestCase): | ||||
|             a.author.name for a in Author.objects.order_by('-author__age')] | ||||
|         self.assertEqual(names, ['User A', 'User B', 'User C']) | ||||
|  | ||||
|     def test_comment(self): | ||||
|         """Make sure adding a comment to the query works.""" | ||||
|         class User(Document): | ||||
|             age = IntField() | ||||
|  | ||||
|         with db_ops_tracker() as q: | ||||
|             adult = (User.objects.filter(age__gte=18) | ||||
|                 .comment('looking for an adult') | ||||
|                 .first()) | ||||
|             ops = q.get_ops() | ||||
|             self.assertEqual(len(ops), 1) | ||||
|             op = ops[0] | ||||
|             self.assertEqual(op['query']['$query'], {'age': {'$gte': 18}}) | ||||
|             self.assertEqual(op['query']['$comment'], 'looking for an adult') | ||||
|  | ||||
|     def test_map_reduce(self): | ||||
|         """Ensure map/reduce is both mapping and reducing. | ||||
|         """ | ||||
| @@ -2552,7 +2603,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|         BlogPost(hits=2, tags=['music', 'actors']).save() | ||||
|  | ||||
|         def test_assertions(f): | ||||
|             f = dict((key, int(val)) for key, val in f.items()) | ||||
|             f = {key: int(val) for key, val in f.items()} | ||||
|             self.assertEqual( | ||||
|                 set(['music', 'film', 'actors', 'watch']), set(f.keys())) | ||||
|             self.assertEqual(f['music'], 3) | ||||
| @@ -2567,7 +2618,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         # Ensure query is taken into account | ||||
|         def test_assertions(f): | ||||
|             f = dict((key, int(val)) for key, val in f.items()) | ||||
|             f = {key: int(val) for key, val in f.items()} | ||||
|             self.assertEqual(set(['music', 'actors', 'watch']), set(f.keys())) | ||||
|             self.assertEqual(f['music'], 2) | ||||
|             self.assertEqual(f['actors'], 1) | ||||
| @@ -2631,7 +2682,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|         doc.save() | ||||
|  | ||||
|         def test_assertions(f): | ||||
|             f = dict((key, int(val)) for key, val in f.items()) | ||||
|             f = {key: int(val) for key, val in f.items()} | ||||
|             self.assertEqual( | ||||
|                 set(['62-3331-1656', '62-3332-1656']), set(f.keys())) | ||||
|             self.assertEqual(f['62-3331-1656'], 2) | ||||
| @@ -2645,7 +2696,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         # Ensure query is taken into account | ||||
|         def test_assertions(f): | ||||
|             f = dict((key, int(val)) for key, val in f.items()) | ||||
|             f = {key: int(val) for key, val in f.items()} | ||||
|             self.assertEqual(set(['62-3331-1656']), set(f.keys())) | ||||
|             self.assertEqual(f['62-3331-1656'], 2) | ||||
|  | ||||
| @@ -2752,10 +2803,10 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         Test.drop_collection() | ||||
|  | ||||
|         for i in xrange(50): | ||||
|         for i in range(50): | ||||
|             Test(val=1).save() | ||||
|  | ||||
|         for i in xrange(20): | ||||
|         for i in range(20): | ||||
|             Test(val=2).save() | ||||
|  | ||||
|         freqs = Test.objects.item_frequencies( | ||||
| @@ -3545,7 +3596,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         Post.drop_collection() | ||||
|  | ||||
|         for i in xrange(10): | ||||
|         for i in range(10): | ||||
|             Post(title="Post %s" % i).save() | ||||
|  | ||||
|         self.assertEqual(5, Post.objects.limit(5).skip(5).count(with_limit_and_skip=True)) | ||||
| @@ -3560,7 +3611,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|             pass | ||||
|  | ||||
|         MyDoc.drop_collection() | ||||
|         for i in xrange(0, 10): | ||||
|         for i in range(0, 10): | ||||
|             MyDoc().save() | ||||
|  | ||||
|         self.assertEqual(MyDoc.objects.count(), 10) | ||||
| @@ -3616,7 +3667,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         Number.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 101): | ||||
|         for i in range(1, 101): | ||||
|             t = Number(n=i) | ||||
|             t.save() | ||||
|  | ||||
| @@ -3763,11 +3814,9 @@ class QuerySetTest(unittest.TestCase): | ||||
|         self.assertTrue(a in results) | ||||
|         self.assertTrue(c in results) | ||||
|  | ||||
|         def invalid_where(): | ||||
|         with self.assertRaises(TypeError): | ||||
|             list(IntPair.objects.where(fielda__gte=3)) | ||||
|  | ||||
|         self.assertRaises(TypeError, invalid_where) | ||||
|  | ||||
|     def test_scalar(self): | ||||
|  | ||||
|         class Organization(Document): | ||||
| @@ -4023,7 +4072,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         # Test larger slice __repr__ | ||||
|         self.Person.objects.delete() | ||||
|         for i in xrange(55): | ||||
|         for i in range(55): | ||||
|             self.Person(name='A%s' % i, age=i).save() | ||||
|  | ||||
|         self.assertEqual(self.Person.objects.scalar('name').count(), 55) | ||||
| @@ -4031,7 +4080,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|             "A0", "%s" % self.Person.objects.order_by('name').scalar('name').first()) | ||||
|         self.assertEqual( | ||||
|             "A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0]) | ||||
|         if PY3: | ||||
|         if six.PY3: | ||||
|             self.assertEqual("['A1', 'A2']", "%s" % self.Person.objects.order_by( | ||||
|                 'age').scalar('name')[1:3]) | ||||
|             self.assertEqual("['A51', 'A52']", "%s" % self.Person.objects.order_by( | ||||
| @@ -4049,7 +4098,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         pks = self.Person.objects.order_by('age').scalar('pk')[1:3] | ||||
|         names = self.Person.objects.scalar('name').in_bulk(list(pks)).values() | ||||
|         if PY3: | ||||
|         if six.PY3: | ||||
|             expected = "['A1', 'A2']" | ||||
|         else: | ||||
|             expected = "[u'A1', u'A2']" | ||||
| @@ -4405,7 +4454,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|             name = StringField() | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         for i in xrange(100): | ||||
|         for i in range(100): | ||||
|             Person(name="No: %s" % i).save() | ||||
|  | ||||
|         with query_counter() as q: | ||||
| @@ -4436,7 +4485,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|             name = StringField() | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         for i in xrange(100): | ||||
|         for i in range(100): | ||||
|             Person(name="No: %s" % i).save() | ||||
|  | ||||
|         with query_counter() as q: | ||||
| @@ -4480,7 +4529,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|             fields = DictField() | ||||
|  | ||||
|         Noddy.drop_collection() | ||||
|         for i in xrange(100): | ||||
|         for i in range(100): | ||||
|             noddy = Noddy() | ||||
|             for j in range(20): | ||||
|                 noddy.fields["key" + str(j)] = "value " + str(j) | ||||
| @@ -4492,7 +4541,9 @@ class QuerySetTest(unittest.TestCase): | ||||
|         self.assertEqual(counter, 100) | ||||
|  | ||||
|         self.assertEqual(len(list(docs)), 100) | ||||
|         self.assertRaises(TypeError, lambda: len(docs)) | ||||
|  | ||||
|         with self.assertRaises(TypeError): | ||||
|             len(docs) | ||||
|  | ||||
|         with query_counter() as q: | ||||
|             self.assertEqual(q, 0) | ||||
| @@ -4681,7 +4732,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|             name = StringField() | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         for i in xrange(100): | ||||
|         for i in range(100): | ||||
|             Person(name="No: %s" % i).save() | ||||
|  | ||||
|         with query_counter() as q: | ||||
| @@ -4805,10 +4856,10 @@ class QuerySetTest(unittest.TestCase): | ||||
|         ]) | ||||
|  | ||||
|     def test_delete_count(self): | ||||
|         [self.Person(name="User {0}".format(i), age=i * 10).save() for i in xrange(1, 4)] | ||||
|         [self.Person(name="User {0}".format(i), age=i * 10).save() for i in range(1, 4)] | ||||
|         self.assertEqual(self.Person.objects().delete(), 3)  # test ordinary QuerySey delete count | ||||
|  | ||||
|         [self.Person(name="User {0}".format(i), age=i * 10).save() for i in xrange(1, 4)] | ||||
|         [self.Person(name="User {0}".format(i), age=i * 10).save() for i in range(1, 4)] | ||||
|  | ||||
|         self.assertEqual(self.Person.objects().skip(1).delete(), 2)  # test Document delete with existing documents | ||||
|  | ||||
| @@ -4817,12 +4868,14 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|     def test_max_time_ms(self): | ||||
|         # 778: max_time_ms can get only int or None as input | ||||
|         self.assertRaises(TypeError, self.Person.objects(name="name").max_time_ms, "not a number") | ||||
|         self.assertRaises(TypeError, | ||||
|                           self.Person.objects(name="name").max_time_ms, | ||||
|                           'not a number') | ||||
|  | ||||
|     def test_subclass_field_query(self): | ||||
|         class Animal(Document): | ||||
|             is_mamal = BooleanField() | ||||
|             meta = dict(allow_inheritance=True) | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         class Cat(Animal): | ||||
|             whiskers_length = FloatField() | ||||
| @@ -4860,6 +4913,56 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         self.assertEqual(1, Doc.objects(item__type__="axe").count()) | ||||
|  | ||||
|     def test_len_during_iteration(self): | ||||
|         """Tests that calling len on a queyset during iteration doesn't | ||||
|         stop paging. | ||||
|         """ | ||||
|         class Data(Document): | ||||
|             pass | ||||
|  | ||||
|         for i in range(300): | ||||
|             Data().save() | ||||
|  | ||||
|         records = Data.objects.limit(250) | ||||
|  | ||||
|         # This should pull all 250 docs from mongo and populate the result | ||||
|         # cache | ||||
|         len(records) | ||||
|  | ||||
|         # Assert that iterating over documents in the qs touches every | ||||
|         # document even if we call len(qs) midway through the iteration. | ||||
|         for i, r in enumerate(records): | ||||
|             if i == 58: | ||||
|                 len(records) | ||||
|         self.assertEqual(i, 249) | ||||
|  | ||||
|         # Assert the same behavior is true even if we didn't pre-populate the | ||||
|         # result cache. | ||||
|         records = Data.objects.limit(250) | ||||
|         for i, r in enumerate(records): | ||||
|             if i == 58: | ||||
|                 len(records) | ||||
|         self.assertEqual(i, 249) | ||||
|  | ||||
|     def test_iteration_within_iteration(self): | ||||
|         """You should be able to reliably iterate over all the documents | ||||
|         in a given queryset even if there are multiple iterations of it | ||||
|         happening at the same time. | ||||
|         """ | ||||
|         class Data(Document): | ||||
|             pass | ||||
|  | ||||
|         for i in range(300): | ||||
|             Data().save() | ||||
|  | ||||
|         qs = Data.objects.limit(250) | ||||
|         for i, doc in enumerate(qs): | ||||
|             for j, doc2 in enumerate(qs): | ||||
|                 pass | ||||
|  | ||||
|         self.assertEqual(i, 249) | ||||
|         self.assertEqual(j, 249) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
|   | ||||
| @@ -238,7 +238,8 @@ class TransformTest(unittest.TestCase): | ||||
|         box = [(35.0, -125.0), (40.0, -100.0)] | ||||
|         # I *meant* to execute location__within_box=box | ||||
|         events = Event.objects(location__within=box) | ||||
|         self.assertRaises(InvalidQueryError, lambda: events.count()) | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             events.count() | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|   | ||||
| @@ -185,7 +185,7 @@ class QTest(unittest.TestCase): | ||||
|             x = IntField() | ||||
|  | ||||
|         TestDoc.drop_collection() | ||||
|         for i in xrange(1, 101): | ||||
|         for i in range(1, 101): | ||||
|             t = TestDoc(x=i) | ||||
|             t.save() | ||||
|  | ||||
| @@ -268,14 +268,13 @@ class QTest(unittest.TestCase): | ||||
|         self.assertEqual(self.Person.objects(Q(age__in=[20, 30])).count(), 3) | ||||
|  | ||||
|         # Test invalid query objs | ||||
|         def wrong_query_objs(): | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             self.Person.objects('user1') | ||||
|  | ||||
|         def wrong_query_objs_filter(): | ||||
|             self.Person.objects('user1') | ||||
|         # filter should fail, too | ||||
|         with self.assertRaises(InvalidQueryError): | ||||
|             self.Person.objects.filter('user1') | ||||
|  | ||||
|         self.assertRaises(InvalidQueryError, wrong_query_objs) | ||||
|         self.assertRaises(InvalidQueryError, wrong_query_objs_filter) | ||||
|  | ||||
|     def test_q_regex(self): | ||||
|         """Ensure that Q objects can be queried using regexes. | ||||
|   | ||||
| @@ -1,9 +1,6 @@ | ||||
| import sys | ||||
| import datetime | ||||
| from pymongo.errors import OperationFailure | ||||
|  | ||||
| sys.path[0:0] = [""] | ||||
|  | ||||
| try: | ||||
|     import unittest2 as unittest | ||||
| except ImportError: | ||||
| @@ -19,7 +16,8 @@ from mongoengine import ( | ||||
| ) | ||||
| from mongoengine.python_support import IS_PYMONGO_3 | ||||
| import mongoengine.connection | ||||
| from mongoengine.connection import get_db, get_connection, ConnectionError | ||||
| from mongoengine.connection import (MongoEngineConnectionError, get_db, | ||||
|                                     get_connection) | ||||
|  | ||||
|  | ||||
| def get_tz_awareness(connection): | ||||
| @@ -159,7 +157,10 @@ class ConnectionTest(unittest.TestCase): | ||||
|         c.mongoenginetest.add_user("username", "password") | ||||
|  | ||||
|         if not IS_PYMONGO_3: | ||||
|             self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost') | ||||
|             self.assertRaises( | ||||
|                 MongoEngineConnectionError, connect, 'testdb_uri_bad', | ||||
|                 host='mongodb://test:password@localhost' | ||||
|             ) | ||||
|  | ||||
|         connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') | ||||
|  | ||||
| @@ -174,19 +175,9 @@ class ConnectionTest(unittest.TestCase): | ||||
|         c.mongoenginetest.system.users.remove({}) | ||||
|  | ||||
|     def test_connect_uri_without_db(self): | ||||
|         """Ensure connect() method works properly with uri's without database_name | ||||
|         """Ensure connect() method works properly if the URI doesn't | ||||
|         include a database name. | ||||
|         """ | ||||
|         c = connect(db='mongoenginetest', alias='admin') | ||||
|         c.admin.system.users.remove({}) | ||||
|         c.mongoenginetest.system.users.remove({}) | ||||
|  | ||||
|         c.admin.add_user("admin", "password") | ||||
|         c.admin.authenticate("admin", "password") | ||||
|         c.mongoenginetest.add_user("username", "password") | ||||
|  | ||||
|         if not IS_PYMONGO_3: | ||||
|             self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost') | ||||
|  | ||||
|         connect("mongoenginetest", host='mongodb://localhost/') | ||||
|  | ||||
|         conn = get_connection() | ||||
| @@ -196,8 +187,31 @@ class ConnectionTest(unittest.TestCase): | ||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||
|         self.assertEqual(db.name, 'mongoenginetest') | ||||
|  | ||||
|         c.admin.system.users.remove({}) | ||||
|         c.mongoenginetest.system.users.remove({}) | ||||
|     def test_connect_uri_default_db(self): | ||||
|         """Ensure connect() defaults to the right database name if | ||||
|         the URI and the database_name don't explicitly specify it. | ||||
|         """ | ||||
|         connect(host='mongodb://localhost/') | ||||
|  | ||||
|         conn = get_connection() | ||||
|         self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) | ||||
|  | ||||
|         db = get_db() | ||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||
|         self.assertEqual(db.name, 'test') | ||||
|  | ||||
|     def test_uri_without_credentials_doesnt_override_conn_settings(self): | ||||
|         """Ensure connect() uses the username & password params if the URI | ||||
|         doesn't explicitly specify them. | ||||
|         """ | ||||
|         c = connect(host='mongodb://localhost/mongoenginetest', | ||||
|                     username='user', | ||||
|                     password='pass') | ||||
|  | ||||
|         # OperationFailure means that mongoengine attempted authentication | ||||
|         # w/ the provided username/password and failed - that's the desired | ||||
|         # behavior. If the MongoDB URI would override the credentials | ||||
|         self.assertRaises(OperationFailure, get_db) | ||||
|  | ||||
|     def test_connect_uri_with_authsource(self): | ||||
|         """Ensure that the connect() method works well with | ||||
| @@ -216,10 +230,11 @@ class ConnectionTest(unittest.TestCase): | ||||
|             self.assertRaises(OperationFailure, test_conn.server_info) | ||||
|         else: | ||||
|             self.assertRaises( | ||||
|                 ConnectionError, connect, 'mongoenginetest', alias='test1', | ||||
|                 MongoEngineConnectionError, connect, 'mongoenginetest', | ||||
|                 alias='test1', | ||||
|                 host='mongodb://username2:password@localhost/mongoenginetest' | ||||
|             ) | ||||
|             self.assertRaises(ConnectionError, get_db, 'test1') | ||||
|             self.assertRaises(MongoEngineConnectionError, get_db, 'test1') | ||||
|  | ||||
|         # Authentication succeeds with "authSource" | ||||
|         connect( | ||||
| @@ -240,7 +255,7 @@ class ConnectionTest(unittest.TestCase): | ||||
|         """ | ||||
|         register_connection('testdb', 'mongoenginetest2') | ||||
|  | ||||
|         self.assertRaises(ConnectionError, get_connection) | ||||
|         self.assertRaises(MongoEngineConnectionError, get_connection) | ||||
|         conn = get_connection('testdb') | ||||
|         self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) | ||||
|  | ||||
|   | ||||
| @@ -1,5 +1,3 @@ | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
| @@ -79,7 +77,7 @@ class ContextManagersTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             User(name='user %s' % i).save() | ||||
|  | ||||
|         user = User.objects.first() | ||||
| @@ -117,7 +115,7 @@ class ContextManagersTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             User(name='user %s' % i).save() | ||||
|  | ||||
|         user = User.objects.first() | ||||
| @@ -195,7 +193,7 @@ class ContextManagersTest(unittest.TestCase): | ||||
|         with query_counter() as q: | ||||
|             self.assertEqual(0, q) | ||||
|  | ||||
|             for i in xrange(1, 51): | ||||
|             for i in range(1, 51): | ||||
|                 db.test.find({}).count() | ||||
|  | ||||
|             self.assertEqual(50, q) | ||||
|   | ||||
| @@ -1,5 +1,6 @@ | ||||
| import unittest | ||||
| from mongoengine.base.datastructures import StrictDict, SemiStrictDict  | ||||
|  | ||||
| from mongoengine.base.datastructures import StrictDict, SemiStrictDict | ||||
|  | ||||
|  | ||||
| class TestStrictDict(unittest.TestCase): | ||||
| @@ -13,9 +14,18 @@ class TestStrictDict(unittest.TestCase): | ||||
|         d = self.dtype(a=1, b=1, c=1) | ||||
|         self.assertEqual((d.a, d.b, d.c), (1, 1, 1)) | ||||
|  | ||||
|     def test_repr(self): | ||||
|         d = self.dtype(a=1, b=2, c=3) | ||||
|         self.assertEqual(repr(d), '{"a": 1, "b": 2, "c": 3}') | ||||
|  | ||||
|         # make sure quotes are escaped properly | ||||
|         d = self.dtype(a='"', b="'", c="") | ||||
|         self.assertEqual(repr(d), '{"a": \'"\', "b": "\'", "c": \'\'}') | ||||
|  | ||||
|     def test_init_fails_on_nonexisting_attrs(self): | ||||
|         self.assertRaises(AttributeError, lambda: self.dtype(a=1, b=2, d=3)) | ||||
|          | ||||
|         with self.assertRaises(AttributeError): | ||||
|             self.dtype(a=1, b=2, d=3) | ||||
|  | ||||
|     def test_eq(self): | ||||
|         d = self.dtype(a=1, b=1, c=1) | ||||
|         dd = self.dtype(a=1, b=1, c=1) | ||||
| @@ -24,7 +34,7 @@ class TestStrictDict(unittest.TestCase): | ||||
|         g = self.strict_dict_class(("a", "b", "c", "d"))(a=1, b=1, c=1, d=1) | ||||
|         h = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=1) | ||||
|         i = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=2) | ||||
|          | ||||
|  | ||||
|         self.assertEqual(d, dd) | ||||
|         self.assertNotEqual(d, e) | ||||
|         self.assertNotEqual(d, f) | ||||
| @@ -37,20 +47,18 @@ class TestStrictDict(unittest.TestCase): | ||||
|         d = self.dtype() | ||||
|         d.a = 1 | ||||
|         self.assertEqual(d.a, 1) | ||||
|         self.assertRaises(AttributeError, lambda: d.b) | ||||
|      | ||||
|         self.assertRaises(AttributeError, getattr, d, 'b') | ||||
|  | ||||
|     def test_setattr_raises_on_nonexisting_attr(self): | ||||
|         d = self.dtype() | ||||
|  | ||||
|         def _f(): | ||||
|         with self.assertRaises(AttributeError): | ||||
|             d.x = 1 | ||||
|         self.assertRaises(AttributeError, _f) | ||||
|      | ||||
|  | ||||
|     def test_setattr_getattr_special(self): | ||||
|         d = self.strict_dict_class(["items"]) | ||||
|         d.items = 1 | ||||
|         self.assertEqual(d.items, 1) | ||||
|      | ||||
|  | ||||
|     def test_get(self): | ||||
|         d = self.dtype(a=1) | ||||
|         self.assertEqual(d.get('a'), 1) | ||||
| @@ -88,7 +96,7 @@ class TestSemiSrictDict(TestStrictDict): | ||||
|     def test_init_succeeds_with_nonexisting_attrs(self): | ||||
|         d = self.dtype(a=1, b=1, c=1, x=2) | ||||
|         self.assertEqual((d.a, d.b, d.c, d.x), (1, 1, 1, 2)) | ||||
|     | ||||
|  | ||||
|     def test_iter_with_nonexisting_attrs(self): | ||||
|         d = self.dtype(a=1, b=1, c=1, x=2) | ||||
|         self.assertEqual(list(d), ['a', 'b', 'c', 'x']) | ||||
|   | ||||
| @@ -1,6 +1,4 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from bson import DBRef, ObjectId | ||||
| @@ -32,7 +30,7 @@ class FieldTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             user = User(name='user %s' % i) | ||||
|             user.save() | ||||
|  | ||||
| @@ -90,7 +88,7 @@ class FieldTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             user = User(name='user %s' % i) | ||||
|             user.save() | ||||
|  | ||||
| @@ -162,7 +160,7 @@ class FieldTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 26): | ||||
|         for i in range(1, 26): | ||||
|             user = User(name='user %s' % i) | ||||
|             user.save() | ||||
|  | ||||
| @@ -440,7 +438,7 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             a = UserA(name='User A %s' % i) | ||||
|             a.save() | ||||
|  | ||||
| @@ -531,7 +529,7 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             a = UserA(name='User A %s' % i) | ||||
|             a.save() | ||||
|  | ||||
| @@ -614,15 +612,15 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             user = User(name='user %s' % i) | ||||
|             user.save() | ||||
|             members.append(user) | ||||
|  | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|  | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|  | ||||
|         with query_counter() as q: | ||||
| @@ -687,7 +685,7 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             a = UserA(name='User A %s' % i) | ||||
|             a.save() | ||||
|  | ||||
| @@ -699,9 +697,9 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|             members += [a, b, c] | ||||
|  | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|  | ||||
|         with query_counter() as q: | ||||
| @@ -783,16 +781,16 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             a = UserA(name='User A %s' % i) | ||||
|             a.save() | ||||
|  | ||||
|             members += [a] | ||||
|  | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|  | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|  | ||||
|         with query_counter() as q: | ||||
| @@ -866,7 +864,7 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             a = UserA(name='User A %s' % i) | ||||
|             a.save() | ||||
|  | ||||
| @@ -878,9 +876,9 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|             members += [a, b, c] | ||||
|  | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) | ||||
|         group = Group(members={str(u.id): u for u in members}) | ||||
|         group.save() | ||||
|  | ||||
|         with query_counter() as q: | ||||
| @@ -1103,7 +1101,7 @@ class FieldTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             User(name='user %s' % i).save() | ||||
|  | ||||
|         Group(name="Test", members=User.objects).save() | ||||
| @@ -1132,7 +1130,7 @@ class FieldTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             User(name='user %s' % i).save() | ||||
|  | ||||
|         Group(name="Test", members=User.objects).save() | ||||
| @@ -1169,7 +1167,7 @@ class FieldTest(unittest.TestCase): | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         members = [] | ||||
|         for i in xrange(1, 51): | ||||
|         for i in range(1, 51): | ||||
|             a = UserA(name='User A %s' % i).save() | ||||
|             b = UserB(name='User B %s' % i).save() | ||||
|             c = UserC(name='User C %s' % i).save() | ||||
|   | ||||
| @@ -1,6 +1,3 @@ | ||||
| import sys | ||||
|  | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from pymongo import ReadPreference | ||||
| @@ -18,7 +15,7 @@ else: | ||||
|  | ||||
| import mongoengine | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import ConnectionError | ||||
| from mongoengine.connection import MongoEngineConnectionError | ||||
|  | ||||
|  | ||||
| class ConnectionTest(unittest.TestCase): | ||||
| @@ -41,7 +38,7 @@ class ConnectionTest(unittest.TestCase): | ||||
|             conn = connect(db='mongoenginetest', | ||||
|                            host="mongodb://localhost/mongoenginetest?replicaSet=rs", | ||||
|                            read_preference=READ_PREF) | ||||
|         except ConnectionError, e: | ||||
|         except MongoEngineConnectionError as e: | ||||
|             return | ||||
|  | ||||
|         if not isinstance(conn, CONN_CLASS): | ||||
|   | ||||
| @@ -1,6 +1,4 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
| sys.path[0:0] = [""] | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
|   | ||||
		Reference in New Issue
	
	Block a user