Compare commits

...

98 Commits

Author SHA1 Message Date
Omer Katz
b42b760393 Merge branch 'fix-reloading-strict' of https://github.com/paularmand/mongoengine into fix-reloading-strict and bumped version.
# Conflicts:
#	AUTHORS
2015-11-30 12:13:47 +02:00
Omer Katz
bf6f4c48c0 Merge pull request #1167 from BeardedSteve/upsert_one
Upsert one
2015-11-30 12:08:04 +02:00
Paul-Armand Verhaegen
6133f04841 Manual merge conflicts in AUTHORS 2015-11-27 23:55:55 +01:00
Paul-Armand Verhaegen
3c18f79ea4 Added test for reloading of strict with special fields #1156 2015-11-27 23:45:25 +01:00
srossiter
fc3db7942d updated changelog and version tuple 2015-11-24 12:56:59 +00:00
srossiter
164e2b2678 Docstring change and rename variable to avoid clash with kwargs 2015-11-24 12:53:09 +00:00
srossiter
b7b28390df Added upsert_one method on BaseQuerySet and modified test_upsert_one 2015-11-24 12:46:38 +00:00
Omer Katz
a6e996d921 Added #1165 to the changelog. 2015-11-24 07:06:54 +02:00
Omer Katz
07e666345d Merge pull request #1165 from touilleMan/bug-1164
Add SaveConditionError to __all__
2015-11-24 07:04:54 +02:00
Omer Katz
007f10d29d Merge pull request #1161 from AWhetter/docFix
Fixed a couple of documentation typos
2015-11-24 07:01:49 +02:00
Omer Katz
f9284d20ca Moved #1042 to the next version in the changelog. 2015-11-24 07:00:09 +02:00
Omer Katz
9050869781 Merge pull request #1042 from closeio/fix-read-preference
Fix read_preference
2015-11-24 06:58:51 +02:00
Stefan Wojcik
54975de0f3 fix read_preference for PyMongo 3+ 2015-11-23 10:46:52 -08:00
Stefan Wojcik
a7aead5138 re-create the cursor object whenever we apply read_preference 2015-11-23 10:46:52 -08:00
Omer Katz
6868f66f24 Merge pull request #1155 from AWhetter/fix837
ReferenceFields can now reference abstract Document types
2015-11-23 15:52:54 +02:00
Omer Katz
3c0b00e42d Added python 3.5 to the build. 2015-11-23 15:40:16 +02:00
Omer Katz
3327388f1f Merge pull request #1122 from larsbutler/improve-reverse_delete_rule-docs
fields.ReferenceField: add integer values to `reverse_delete_rule` docs
2015-11-23 15:29:20 +02:00
Ashley Whetter
04497aec36 Fixed setting dbref to True on abstract reference fields causing the reference to be stored incorrectly 2015-11-23 13:21:30 +00:00
Ashley Whetter
aa9d596930 Updated documentation for abstract reference changes 2015-11-23 13:21:30 +00:00
Ashley Whetter
f96e68cd11 Made type inheritance a validation check for abstract references 2015-11-23 13:20:35 +00:00
Ashley Whetter
013227323d ReferenceFields can now reference abstract Document types
A class that inherits from an abstract Document type is stored in the database
as a reference with a 'cls' field that is the class name of the document being
stored.

Fixes #837
2015-11-23 13:20:35 +00:00
Omer Katz
19cbb442ee Added #1129 to the changelog. 2015-11-23 13:57:15 +02:00
Omer Katz
c0e7f341cb Merge pull request #1129 from illico/feature/arbitrary-metadata
Indirection-free optimized field metadata.
2015-11-23 12:49:48 +02:00
Emmanuel Leblond
0a1ba7c434 Add SaveConditionError to __all__ 2015-11-21 10:25:11 +01:00
Omer Katz
b708dabf98 Merge pull request #1158 from gmacon/551-shard-key-embedded
Allow shard key to be in an embedded document (#551)
2015-11-20 07:50:52 +02:00
George Macon
899e56e5b8 Add gmacon to AUTHORS 2015-11-19 17:15:43 -05:00
George Macon
f6d3bd8ccb Update changelog for #551 2015-11-19 17:15:27 -05:00
George Macon
deb5677a57 Allow shard key to be in an embedded document (#551) 2015-11-19 17:14:45 -05:00
Omer Katz
5c464c3f5a Bumped version to 0.10.1 2015-11-18 14:07:39 +02:00
Ashley Whetter
cceef33fef Fixed a couple of documentation typos 2015-11-17 14:22:10 +00:00
Paul-Armand Verhaegen
ed8174fe36 Added Paul-Armand Verhaegen to contributor list 2015-11-15 15:32:26 +01:00
Paul-Armand Verhaegen
3c8906494f Added #1156 to changelog 2015-11-15 15:31:22 +01:00
Paul-Armand Verhaegen
6e745e9882 fixed wrong indentation style 2015-11-10 21:13:24 +01:00
Paul-Armand Verhaegen
fb4e9c3772 fix for reloading of strict with special fields 2015-11-10 20:43:49 +01:00
Omer Katz
2c282f9550 Added changelog entry for #1131. 2015-11-08 12:18:12 +02:00
Omer Katz
d92d41cb05 Merge pull request #1131 from noirbizarre/fix-instance-back-references
Fix instance back references
2015-11-08 12:14:37 +02:00
Omer Katz
82e7050561 Merge pull request #1134 from abonhomme/patch-1
docstring correction
2015-11-05 15:49:53 +02:00
abonhomme
44f92d4169 docstring correction
Corrected the docstring for `mongoengine.queryset.base.update_one()`
2015-10-23 11:07:26 -04:00
David Bordeynik
2f1fae38dd Merge pull request #1117 from reallistic/master
Enable ops in queries using elemMatch for EmbeddedDocuments
fixes #1130
2015-10-21 08:40:39 +03:00
Axel Haustant
9fe99979fe Fix tests on Python 2.6 (assertIsNotNone does not exists) 2015-10-19 18:04:15 +02:00
Axel Haustant
6399de0b51 Fix _instance on list of EmbeddedDocuments 2015-10-19 16:39:00 +02:00
Axel Haustant
959740a585 Fix false positive test on _instance 2015-10-19 16:33:40 +02:00
reallistic
159b082828 Recursively create mongo query for embeddeddocument elemMatch 2015-10-18 16:34:24 -07:00
Omer Katz
79a2d715b0 Merge pull request #1121 from larsbutler/simplify-install-deps
Move nose/rednose from install dependencies to test dependencies
2015-10-14 12:45:14 +03:00
Alice Bevan–McGregor
50b271c868 Arbitrary metadata documentation. 2015-10-13 22:51:03 -04:00
Alice Bevan–McGregor
a57f28ac83 Correction for local monkeypatch. 2015-10-13 22:41:58 -04:00
Alice Bevan–McGregor
3f3747a2fe Minor formatting tweaks and additional comments. 2015-10-13 21:59:46 -04:00
Alice Bevan–McGregor
d133913c3d Remove now superfluous special cases.
Removes `verbose_name`, `help_text`, and `custom_data`.  All three are
covered by the one metadata assignment and will continue working as
expected.
2015-10-13 21:59:29 -04:00
Alice Bevan–McGregor
e049cef00a Add arbitrary metadata capture to BaseField.
Includes ability to detect and report conflicts.
2015-10-13 21:54:58 -04:00
Lars Butler
9b500cd867 docs/changelog.rst: fix #1079 to version 0.10.1 - DEV 2015-10-12 10:13:41 +02:00
Lars Butler
b52cae6575 AUTHORS: Add Lars Butler (that's me!) 2015-10-12 10:13:11 +02:00
Lars Butler
35a0142f9b setup.py, tox.ini: move nose/rednose from install deps to test deps
Remove nose/rednose from `setup_requires` and instead declare them in
`tests_require`. Also explicitly add `nose` and `rednose` to
dependencies list in tox.ini (to avoid breaking test runs).

`python setup.py nosetests` is the preferred way for running tests, and
this works, except that placing test deps (nose/rednose) in the
`setup_requires` also means that these dependencies are pulled in for
installs of mongoengine. These deps are not actually be required just
to run mongoengine, so setup.py should not force users to install these
dependencies.

This refactoring should not change any test run semantics.
2015-10-12 10:13:11 +02:00
David Bordeynik
d4f6ef4f1b Merge pull request #1113 from DavidBord/fix-1105
fix-#1105: StrictDict & SemiStrictDict are shadowed at init time
2015-10-11 21:14:05 +03:00
Lars Butler
5a038de1d5 fields.ReferenceField: add integer values to reverse_delete_rule docs
When I first tried to use the `reverse_delete_rule` feature of
`ReferenceField`, I had to dig through the source code to find what the
actual integer values were expected to be for DO_NOTHING, NULLIFY,
CASCADE, DENY, and PULL (or at least, where these constants were defined
so that I could import and use them). This patch adds the integer values
for those constants (which are defined in mongoengine.queryset.base) to
the docs so that users can easily choose the correct integer value.

Note: A possible improvement on this change would be to include
`mongoengine.queryset.base` module documentation in the generated docs,
and then update the `ReferenceField` docs to link to the documentation
of these constants (DO_NOTHING, NULLIFY, etc.).
2015-10-05 14:37:03 +02:00
Emmanuel Leblond
903982e896 Merge pull request #1088 from touilleMan/bug-1058
Fix DictField with '_cls' field is converted to Document on access
2015-09-21 12:10:23 +02:00
David Bordeynik
6355c404cc fix-#1105: StrictDict & SemiStrictDict are shadowed at init time 2015-09-16 20:27:52 +03:00
Emmanuel Leblond
92b9cb5d43 Add drop_collection for test_subclass_field_query 2015-09-08 17:35:35 +02:00
Emmanuel Leblond
7580383d26 Add #1050 fix to changelog 2015-09-02 19:00:18 +02:00
Catstyle
ba0934e41e added DynamicTest.test_reload_dynamic_field 2015-09-02 18:42:30 +02:00
Catstyle
a6a1021521 use obj._data instead of self._fields_ordered since DynamicDocument missing some attributes 2015-09-02 18:42:30 +02:00
Emmanuel Leblond
33b4d83c73 Merge pull request #1084 from optik/patch-1
Bad property name for text search index meta
2015-09-02 18:27:10 +02:00
Emmanuel Leblond
6cf630c74a Merge pull request #1096 from vasion/save-condition-for-2.4
save_condition uses "n" instead of "nModified"
2015-09-02 18:23:37 +02:00
Emmanuel Leblond
736fe5b84e Fix unwanted dereference in DictField (issue #1058) 2015-08-30 10:01:24 +02:00
Omer Katz
4241bde6ea Merge pull request #1055 from zxhuang/master
comply to pymongo MongoClient constructor host
2015-08-16 11:52:27 +03:00
Momchil Rogelov
b4ce14d744 use n instead of nModified in save_condition 2015-08-13 10:11:42 +01:00
Momchil Rogelov
10832a2ccc save_condition falls back to "n" if "nModified" is not found to support mongo 2.4 2015-08-12 10:57:20 +01:00
Emmanuel Leblond
91aca44f67 Merge pull request #1093 from touilleMan/bug-1069
Replace disconnect with close method in pymongo
2015-08-10 18:40:59 +02:00
Emmanuel Leblond
96cfbb201a Replace use close method in pymongo 2015-08-04 18:02:57 +02:00
David Bordeynik
b2bc155701 Merge pull request #1087 from marcoskv/patch-1
QuerySet count vs len #937
2015-08-01 11:22:35 +03:00
marcoskv
a70ef5594d QuerySet count vs len #937
Current documentation does not consider performance issues in using len instead of count.
https://github.com/MongoEngine/mongoengine/issues/937
2015-07-26 22:41:24 +02:00
optik
6d991586fd Bad property name for indices description in docs
The correct name for MongoDB index definition property is weights not weight. Using "weight" will cause "Index with name ... already exists with different options"
2015-07-24 15:26:10 +02:00
Emmanuel Leblond
f8890ca841 Merge pull request #1070 from touilleMan/save-condition-error
Use SaveConditionError instead of OperationError in save_condition
2015-07-19 11:18:33 +02:00
Emmanuel Leblond
0752c6b24f Update changelog for #1070 2015-07-19 10:33:54 +02:00
Emmanuel Leblond
3ffaf2c0e1 Correct SaveConditionError involved tests 2015-07-15 11:59:29 +02:00
Emmanuel Leblond
a3e0fbd606 Add SaveConditionError exception 2015-07-15 11:15:40 +02:00
Emmanuel Leblond
9c8ceb6b4e Merge pull request #1060 from touilleMan/GenericReferenceField-choices
Fix GenericReferenceField choices parameter
2015-07-09 11:38:22 +02:00
Emmanuel Leblond
bebce2c053 Clean ununsed variables in iterations 2015-07-09 10:51:04 +02:00
Emmanuel Leblond
34c6790762 Simplify implementation of choices in GenericReferenceField 2015-07-06 10:10:05 +02:00
Emmanuel Leblond
a5fb009b62 Fix GenericReferenceField choices with DBRef and let it possible to set Document choice as string 2015-07-06 02:33:43 +02:00
David Bordeynik
9671ca5ebf Merge pull request #1049 from DavidBord/fix-842
fix-#842: Fix ignored chained options
2015-07-03 08:23:15 +03:00
David Bordeynik
5334ea393e fix-#842: Fix ignored chained options 2015-07-02 23:08:09 +03:00
Zeke Huang
2aaacc02e3 comply to pymongo MongoClient constructor host
Only MongoReplicaSetClient use hosts_or_uri param and it will be deprecated soon.
2015-07-01 12:39:30 -07:00
Matthieu Rigal
222e929b2d Merge pull request #1048 from amitlicht/amitlicht/1047_cached_reference_field_bugfix
Suggested fix for #1047: CachedReferenceField DBRefs bug
2015-07-01 08:53:03 +02:00
amitlicht
6f16d35a92 Adding a changelog line & adding myself to AUTHORS. 2015-06-30 15:08:20 +03:00
amitlicht
d7a2ccf5ac Adding a test case for #1047. 2015-06-30 15:03:06 +03:00
amitlicht
9ce605221a Suggested fix for #1047: CachedReferenceField creates DBRef on to_python, but can't save them on to_mongo.
Dereferencing DBRef to document type before returning it from to_python.
2015-06-28 17:53:20 +03:00
Matthieu Rigal
1e930fe950 Merge branch 'emilecaron-master' 2015-06-26 17:59:25 +02:00
Matthieu Rigal
4dc158589c Moved change to right place and added fancier test 2015-06-26 17:58:53 +02:00
emilecaron
4525eb457b update changelog 2015-06-26 14:23:42 +02:00
emilecaron
56a2e07dc2 always store docs in cascade_refs 2015-06-26 10:45:07 +00:00
emilecaron
9b7fe9ac31 restore broken behavior 2015-06-26 09:31:07 +00:00
emilecaron
c3da07ccf7 Merge branch 'master' of https://github.com/emilecaron/mongoengine 2015-06-26 08:54:12 +00:00
emilecaron
b691a56d51 late set instanciation 2015-06-26 08:52:30 +00:00
Emile Caron
13e0a1b5bb Merge pull request #1 from emilecaron/fix_delete_rule_cascade_cycle
Fix delete rule cascade cycle
2015-06-25 21:53:04 +02:00
emilecaron
646baddce4 fix cascade delete cycle issuue 2015-06-25 18:27:22 +00:00
emilecaron
02f61c323d update test 2015-06-25 18:26:52 +00:00
emilecaron
1e3d2df9e7 fix illogicality 2015-06-25 15:40:12 +00:00
emilecaron
e43fae86f1 reproduce RuntimeError 2015-06-25 15:37:15 +00:00
23 changed files with 718 additions and 112 deletions

View File

@@ -5,6 +5,7 @@ python:
- '3.2'
- '3.3'
- '3.4'
- '3.5'
- pypy
- pypy3
env:

View File

@@ -226,4 +226,10 @@ that much better:
* Emmanuel Leblond (https://github.com/touilleMan)
* Breeze.Kay (https://github.com/9nix00)
* Vicki Donchenko (https://github.com/kivistein)
* Emile Caron (https://github.com/emilecaron)
* Amit Lichtenberg (https://github.com/amitlicht)
* Lars Butler (https://github.com/larsbutler)
* George Macon (https://github.com/gmacon)
* Ashley Whetter (https://github.com/AWhetter)
* Paul-Armand Verhaegen (https://github.com/paularmand)
* Steven Rossiter (https://github.com/BeardedSteve)

View File

@@ -2,8 +2,36 @@
Changelog
=========
Changes in 0.10.1 - DEV
=======================
Changes in 0.10.5
=================
- Fix for reloading of strict with special fields. #1156
Changes in 0.10.4
=================
- SaveConditionError is now importable from the top level package. #1165
- upsert_one method added. #1157
Changes in 0.10.3
=================
- Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042
Changes in 0.10.2
=================
- Allow shard key to point to a field in an embedded document. #551
- Allow arbirary metadata in fields. #1129
- ReferenceFields now support abstract document types. #837
Changes in 0.10.1
=================
- Fix infinite recursion with CASCADE delete rules under specific conditions. #1046
- Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047
- Fix ignored chained options #842
- Document save's save_condition error raises `SaveConditionError` exception #1070
- Fix Document.reload for DynamicDocument. #1050
- StrictDict & SemiStrictDict are shadowed at init time. #1105
- Remove test dependencies (nose and rednose) from install dependencies list. #1079
- Recursively build query when using elemMatch operator. #1130
- Fix instance back references for lists of embedded documents. #1131
Changes in 0.10.0
=================

View File

@@ -172,11 +172,11 @@ arguments can be set on all fields:
class Shirt(Document):
size = StringField(max_length=3, choices=SIZE)
:attr:`help_text` (Default: None)
Optional help text to output with the field -- used by form libraries
:attr:`verbose_name` (Default: None)
Optional human-readable name for the field -- used by form libraries
:attr:`**kwargs` (Optional)
You can supply additional metadata as arbitrary additional keyword
arguments. You can not override existing attributes, however. Common
choices include `help_text` and `verbose_name`, commonly used by form and
widget libraries.
List fields

View File

@@ -347,6 +347,8 @@ way of achieving this::
num_users = len(User.objects)
Even if len() is the Pythonic way of counting results, keep in mind that if you concerned about performance, :meth:`~mongoengine.queryset.QuerySet.count` is the way to go since it only execute a server side count query, while len() retrieves the results, places them in cache, and finally counts them. If we compare the performance of the two operations, len() is much slower than :meth:`~mongoengine.queryset.QuerySet.count`.
Further aggregation
-------------------
You may sum over the values of a specific field on documents using

View File

@@ -17,7 +17,7 @@ Use the *$* prefix to set a text index, Look the declaration::
meta = {'indexes': [
{'fields': ['$title', "$content"],
'default_language': 'english',
'weight': {'title': 10, 'content': 2}
'weights': {'title': 10, 'content': 2}
}
]}

View File

@@ -14,7 +14,7 @@ import errors
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
list(queryset.__all__) + signals.__all__ + list(errors.__all__))
VERSION = (0, 10, 0)
VERSION = (0, 10, 5)
def get_version():

View File

@@ -85,7 +85,6 @@ class BaseDocument(object):
self._data = SemiStrictDict.create(
allowed_keys=self._fields_ordered)()
self._data = {}
self._dynamic_fields = SON()
# Assign default values to instance

View File

@@ -41,8 +41,8 @@ class BaseField(object):
def __init__(self, db_field=None, name=None, required=False, default=None,
unique=False, unique_with=None, primary_key=False,
validation=None, choices=None, verbose_name=None,
help_text=None, null=False, sparse=False, custom_data=None):
validation=None, choices=None, null=False, sparse=False,
**kwargs):
"""
:param db_field: The database field to store this field in
(defaults to the name of the field)
@@ -60,16 +60,15 @@ class BaseField(object):
field. Generally this is deprecated in favour of the
`FIELD.validate` method
:param choices: (optional) The valid choices
:param verbose_name: (optional) The verbose name for the field.
Designed to be human readable and is often used when generating
model forms from the document model.
:param help_text: (optional) The help text for this field and is often
used when generating model forms from the document model.
:param null: (optional) Is the field value can be null. If no and there is a default value
then the default value is set
:param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False`
means that uniqueness won't be enforced for `None` values
:param custom_data: (optional) Custom metadata for this field.
:param **kwargs: (optional) Arbitrary indirection-free metadata for
this field can be supplied as additional keyword arguments and
accessed as attributes of the field. Must not conflict with any
existing attributes. Common metadata includes `verbose_name` and
`help_text`.
"""
self.db_field = (db_field or name) if not primary_key else '_id'
@@ -83,12 +82,19 @@ class BaseField(object):
self.primary_key = primary_key
self.validation = validation
self.choices = choices
self.verbose_name = verbose_name
self.help_text = help_text
self.null = null
self.sparse = sparse
self._owner_document = None
self.custom_data = custom_data
# Detect and report conflicts between metadata and base properties.
conflicts = set(dir(self)) & set(kwargs)
if conflicts:
raise TypeError("%s already has attribute(s): %s" % (
self.__class__.__name__, ', '.join(conflicts) ))
# Assign metadata to the instance
# This efficient method is available because no __slots__ are defined.
self.__dict__.update(kwargs)
# Adjust the appropriate creation counter, and save our local copy.
if self.db_field == '_id':
@@ -135,6 +141,10 @@ class BaseField(object):
EmbeddedDocument = _import_class('EmbeddedDocument')
if isinstance(value, EmbeddedDocument):
value._instance = weakref.proxy(instance)
elif isinstance(value, (list, tuple)):
for v in value:
if isinstance(v, EmbeddedDocument):
v._instance = weakref.proxy(instance)
instance._data[self.name] = value
def error(self, message="", errors=None, field_name=None):
@@ -165,26 +175,29 @@ class BaseField(object):
"""
pass
def _validate(self, value, **kwargs):
def _validate_choices(self, value):
Document = _import_class('Document')
EmbeddedDocument = _import_class('EmbeddedDocument')
choice_list = self.choices
if isinstance(choice_list[0], (list, tuple)):
choice_list = [k for k, _ in choice_list]
# Choices which are other types of Documents
if isinstance(value, (Document, EmbeddedDocument)):
if not any(isinstance(value, c) for c in choice_list):
self.error(
'Value must be instance of %s' % unicode(choice_list)
)
# Choices which are types other than Documents
elif value not in choice_list:
self.error('Value must be one of %s' % unicode(choice_list))
def _validate(self, value, **kwargs):
# Check the Choices Constraint
if self.choices:
choice_list = self.choices
if isinstance(self.choices[0], (list, tuple)):
choice_list = [k for k, v in self.choices]
# Choices which are other types of Documents
if isinstance(value, (Document, EmbeddedDocument)):
if not any(isinstance(value, c) for c in choice_list):
self.error(
'Value must be instance of %s' % unicode(choice_list)
)
# Choices which are types other than Documents
elif value not in choice_list:
self.error('Value must be one of %s' % unicode(choice_list))
self._validate_choices(value)
# check validation argument
if self.validation is not None:
@@ -308,7 +321,7 @@ class ComplexBaseField(BaseField):
value_dict[k] = self.to_python(v)
if is_list: # Convert back to a list
return [v for k, v in sorted(value_dict.items(),
return [v for _, v in sorted(value_dict.items(),
key=operator.itemgetter(0))]
return value_dict
@@ -375,7 +388,7 @@ class ComplexBaseField(BaseField):
value_dict[k] = self.to_mongo(v)
if is_list: # Convert back to a list
return [v for k, v in sorted(value_dict.items(),
return [v for _, v in sorted(value_dict.items(),
key=operator.itemgetter(0))]
return value_dict

View File

@@ -81,7 +81,7 @@ def disconnect(alias=DEFAULT_CONNECTION_NAME):
global _dbs
if alias in _connections:
get_connection(alias=alias).disconnect()
get_connection(alias=alias).close()
del _connections[alias]
if alias in _dbs:
del _dbs[alias]
@@ -108,7 +108,6 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
connection_class = MongoClient
if 'replicaSet' in conn_settings:
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
# Discard port since it can't be used on MongoReplicaSetClient
conn_settings.pop('port', None)
# Discard replicaSet if not base string
@@ -116,6 +115,7 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
conn_settings.pop('replicaSet', None)
if not IS_PYMONGO_3:
connection_class = MongoReplicaSetClient
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
try:
connection = None

View File

@@ -16,7 +16,8 @@ from mongoengine.base import (
ALLOW_INHERITANCE,
get_document
)
from mongoengine.errors import InvalidQueryError, InvalidDocumentError
from mongoengine.errors import (InvalidQueryError, InvalidDocumentError,
SaveConditionError)
from mongoengine.python_support import IS_PYMONGO_3
from mongoengine.queryset import (OperationError, NotUniqueError,
QuerySet, transform)
@@ -216,7 +217,7 @@ class Document(BaseDocument):
Returns True if the document has been updated or False if the document
in the database doesn't match the query.
.. note:: All unsaved changes that has been made to the document are
.. note:: All unsaved changes that have been made to the document are
rejected if the method returns True.
:param query: the update will be performed only if the document in the
@@ -294,6 +295,8 @@ class Document(BaseDocument):
if the condition is satisfied in the current db record.
.. versionchanged:: 0.10
:class:`OperationError` exception raised if save_condition fails.
.. versionchanged:: 0.10.1
:class: save_condition failure now raises a `SaveConditionError`
"""
signals.pre_save.send(self.__class__, document=self)
@@ -338,8 +341,12 @@ class Document(BaseDocument):
select_dict['_id'] = object_id
shard_key = self.__class__._meta.get('shard_key', tuple())
for k in shard_key:
actual_key = self._db_field_map.get(k, k)
select_dict[actual_key] = doc[actual_key]
path = self._lookup_field(k.split('.'))
actual_key = [p.db_field for p in path]
val = doc
for ak in actual_key:
val = val[ak]
select_dict['.'.join(actual_key)] = val
def is_new_object(last_error):
if last_error is not None:
@@ -358,9 +365,9 @@ class Document(BaseDocument):
upsert = save_condition is None
last_error = collection.update(select_dict, update_query,
upsert=upsert, **write_concern)
if not upsert and last_error['nModified'] == 0:
raise OperationError('Race condition preventing'
' document update detected')
if not upsert and last_error["n"] == 0:
raise SaveConditionError('Race condition preventing'
' document update detected')
created = is_new_object(last_error)
if cascade is None:
@@ -400,7 +407,7 @@ class Document(BaseDocument):
def cascade_save(self, *args, **kwargs):
"""Recursively saves any references /
generic references on an objects"""
generic references on the document"""
_refs = kwargs.get('_refs', []) or []
ReferenceField = _import_class('ReferenceField')
@@ -441,7 +448,12 @@ class Document(BaseDocument):
select_dict = {'pk': self.pk}
shard_key = self.__class__._meta.get('shard_key', tuple())
for k in shard_key:
select_dict[k] = getattr(self, k)
path = self._lookup_field(k.split('.'))
actual_key = [p.db_field for p in path]
val = self
for ak in actual_key:
val = getattr(val, ak)
select_dict['__'.join(actual_key)] = val
return select_dict
def update(self, **kwargs):
@@ -588,15 +600,20 @@ class Document(BaseDocument):
else:
raise self.DoesNotExist("Document does not exist")
for field in self._fields_ordered:
for field in obj._data:
if not fields or field in fields:
try:
setattr(self, field, self._reload(field, obj[field]))
except KeyError:
# If field is removed from the database while the object
# is in memory, a reload would cause a KeyError
# i.e. obj.update(unset__field=1) followed by obj.reload()
delattr(self, field)
except (KeyError, AttributeError):
try:
# If field is a special field, e.g. items is stored as _reserved_items,
# an KeyError is thrown. So try to retrieve the field from _data
setattr(self, field, self._reload(field, obj._data.get(field)))
except KeyError:
# If field is removed from the database while the object
# is in memory, a reload would cause a KeyError
# i.e. obj.update(unset__field=1) followed by obj.reload()
delattr(self, field)
self._changed_fields = obj._changed_fields
self._created = False

View File

@@ -6,7 +6,7 @@ from mongoengine.python_support import txt_type
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
'OperationError', 'NotUniqueError', 'FieldDoesNotExist',
'ValidationError')
'ValidationError', 'SaveConditionError')
class NotRegistered(Exception):
@@ -41,6 +41,10 @@ class NotUniqueError(OperationError):
pass
class SaveConditionError(OperationError):
pass
class FieldDoesNotExist(Exception):
"""Raised when trying to set a field
not declared in a :class:`~mongoengine.Document`

View File

@@ -794,6 +794,7 @@ class DictField(ComplexBaseField):
def __init__(self, basecls=None, field=None, *args, **kwargs):
self.field = field
self._auto_dereference = False
self.basecls = basecls or BaseField
if not issubclass(self.basecls, BaseField):
self.error('DictField only accepts dict values')
@@ -862,12 +863,11 @@ class ReferenceField(BaseField):
The options are:
* DO_NOTHING - don't do anything (default).
* NULLIFY - Updates the reference to null.
* CASCADE - Deletes the documents associated with the reference.
* DENY - Prevent the deletion of the reference object.
* PULL - Pull the reference from a :class:`~mongoengine.fields.ListField`
of references
* DO_NOTHING (0) - don't do anything (default).
* NULLIFY (1) - Updates the reference to null.
* CASCADE (2) - Deletes the documents associated with the reference.
* DENY (3) - Prevent the deletion of the reference object.
* PULL (4) - Pull the reference from a :class:`~mongoengine.fields.ListField` of references
Alternative syntax for registering delete rules (useful when implementing
bi-directional delete rules)
@@ -895,6 +895,10 @@ class ReferenceField(BaseField):
or as the :class:`~pymongo.objectid.ObjectId`.id .
:param reverse_delete_rule: Determines what to do when the referring
object is deleted
.. note ::
A reference to an abstract document type is always stored as a
:class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`.
"""
if not isinstance(document_type, basestring):
if not issubclass(document_type, (Document, basestring)):
@@ -927,9 +931,14 @@ class ReferenceField(BaseField):
self._auto_dereference = instance._fields[self.name]._auto_dereference
# Dereference DBRefs
if self._auto_dereference and isinstance(value, DBRef):
value = self.document_type._get_db().dereference(value)
if hasattr(value, 'cls'):
# Dereference using the class type specified in the reference
cls = get_document(value.cls)
else:
cls = self.document_type
value = cls._get_db().dereference(value)
if value is not None:
instance._data[self.name] = self.document_type._from_son(value)
instance._data[self.name] = cls._from_son(value)
return super(ReferenceField, self).__get__(instance, owner)
@@ -939,21 +948,29 @@ class ReferenceField(BaseField):
return document.id
return document
id_field_name = self.document_type._meta['id_field']
id_field = self.document_type._fields[id_field_name]
if isinstance(document, Document):
# We need the id from the saved object to create the DBRef
id_ = document.pk
if id_ is None:
self.error('You can only reference documents once they have'
' been saved to the database')
# Use the attributes from the document instance, so that they
# override the attributes of this field's document type
cls = document
else:
id_ = document
cls = self.document_type
id_field_name = cls._meta['id_field']
id_field = cls._fields[id_field_name]
id_ = id_field.to_mongo(id_)
if self.dbref:
collection = self.document_type._get_collection_name()
if self.document_type._meta.get('abstract'):
collection = cls._get_collection_name()
return DBRef(collection, id_, cls=cls._class_name)
elif self.dbref:
collection = cls._get_collection_name()
return DBRef(collection, id_)
return id_
@@ -982,6 +999,14 @@ class ReferenceField(BaseField):
self.error('You can only reference documents once they have been '
'saved to the database')
if self.document_type._meta.get('abstract') and \
not isinstance(value, self.document_type):
self.error('%s is not an instance of abstract reference'
' type %s' % (value._class_name,
self.document_type._class_name)
)
def lookup_member(self, member_name):
return self.document_type._fields.get(member_name)
@@ -1034,6 +1059,7 @@ class CachedReferenceField(BaseField):
collection = self.document_type._get_collection_name()
value = DBRef(
collection, self.document_type.id.to_python(value['_id']))
return self.document_type._from_son(self.document_type._get_db().dereference(value))
return value
@@ -1139,6 +1165,30 @@ class GenericReferenceField(BaseField):
.. versionadded:: 0.3
"""
def __init__(self, *args, **kwargs):
choices = kwargs.pop('choices', None)
super(GenericReferenceField, self).__init__(*args, **kwargs)
self.choices = []
# Keep the choices as a list of allowed Document class names
if choices:
for choice in choices:
if isinstance(choice, basestring):
self.choices.append(choice)
elif isinstance(choice, type) and issubclass(choice, Document):
self.choices.append(choice._class_name)
else:
self.error('Invalid choices provided: must be a list of'
'Document subclasses and/or basestrings')
def _validate_choices(self, value):
if isinstance(value, dict):
# If the field has not been dereferenced, it is still a dict
# of class and DBRef
value = value.get('_cls')
elif isinstance(value, Document):
value = value._class_name
super(GenericReferenceField, self)._validate_choices(value)
def __get__(self, instance, owner):
if instance is None:
return self

View File

@@ -346,7 +346,7 @@ class BaseQuerySet(object):
return 0
return self._cursor.count(with_limit_and_skip=with_limit_and_skip)
def delete(self, write_concern=None, _from_doc_delete=False):
def delete(self, write_concern=None, _from_doc_delete=False, cascade_refs=None):
"""Delete the documents matched by the query.
:param write_concern: Extra keyword arguments are passed down which
@@ -402,11 +402,13 @@ class BaseQuerySet(object):
continue
rule = doc._meta['delete_rules'][rule_entry]
if rule == CASCADE:
ref_q = document_cls.objects(**{field_name + '__in': self})
cascade_refs = set() if cascade_refs is None else cascade_refs
for ref in queryset:
cascade_refs.add(ref.id)
ref_q = document_cls.objects(**{field_name + '__in': self, 'id__nin': cascade_refs})
ref_q_count = ref_q.count()
if (doc != document_cls and ref_q_count > 0 or
(doc == document_cls and ref_q_count > 0)):
ref_q.delete(write_concern=write_concern)
if ref_q_count > 0:
ref_q.delete(write_concern=write_concern, cascade_refs=cascade_refs)
elif rule == NULLIFY:
document_cls.objects(**{field_name + '__in': self}).update(
write_concern=write_concern, **{'unset__%s' % field_name: 1})
@@ -469,8 +471,35 @@ class BaseQuerySet(object):
raise OperationError(message)
raise OperationError(u'Update failed (%s)' % unicode(err))
def upsert_one(self, write_concern=None, **update):
"""Overwrite or add the first document matched by the query.
:param write_concern: Extra keyword arguments are passed down which
will be used as options for the resultant
``getLastError`` command. For example,
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
wait until at least two servers have recorded the write and
will force an fsync on the primary server.
:param update: Django-style update keyword arguments
:returns the new or overwritten document
.. versionadded:: 0.10.2
"""
atomic_update = self.update(multi=False, upsert=True, write_concern=write_concern,
full_result=True,**update)
if atomic_update['updatedExisting']:
document = self.get()
else:
document = self._document.objects.with_id(atomic_update['upserted'])
return document
def update_one(self, upsert=False, write_concern=None, **update):
"""Perform an atomic update on first field matched by the query.
"""Perform an atomic update on the fields of the first document
matched by the query.
:param upsert: Any existing document with that "_id" is overwritten.
:param write_concern: Extra keyword arguments are passed down which
@@ -682,11 +711,7 @@ class BaseQuerySet(object):
:param n: the maximum number of objects to return
"""
queryset = self.clone()
if n == 0:
queryset._cursor.limit(1)
else:
queryset._cursor.limit(n)
queryset._limit = n
queryset._limit = n if n != 0 else 1
# Return self to allow chaining
return queryset
@@ -697,7 +722,6 @@ class BaseQuerySet(object):
:param n: the number of objects to skip before returning results
"""
queryset = self.clone()
queryset._cursor.skip(n)
queryset._skip = n
return queryset
@@ -715,7 +739,6 @@ class BaseQuerySet(object):
.. versionadded:: 0.5
"""
queryset = self.clone()
queryset._cursor.hint(index)
queryset._hint = index
return queryset
@@ -933,6 +956,7 @@ class BaseQuerySet(object):
validate_read_preference('read_preference', read_preference)
queryset = self.clone()
queryset._read_preference = read_preference
queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_preference
return queryset
def scalar(self, *fields):
@@ -1446,8 +1470,16 @@ class BaseQuerySet(object):
def _cursor(self):
if self._cursor_obj is None:
self._cursor_obj = self._collection.find(self._query,
**self._cursor_args)
# In PyMongo 3+, we define the read preference on a collection
# level, not a cursor level. Thus, we need to get a cloned
# collection object using `with_options` first.
if IS_PYMONGO_3 and self._read_preference is not None:
self._cursor_obj = self._collection\
.with_options(read_preference=self._read_preference)\
.find(self._query, **self._cursor_args)
else:
self._cursor_obj = self._collection.find(self._query,
**self._cursor_args)
# Apply where clauses to cursor
if self._where_clause:
where_clause = self._sub_js_fields(self._where_clause)

View File

@@ -26,12 +26,12 @@ MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
STRING_OPERATORS + CUSTOM_OPERATORS)
def query(_doc_cls=None, **query):
def query(_doc_cls=None, **kwargs):
"""Transform a query from Django-style format to Mongo format.
"""
mongo_query = {}
merge_query = defaultdict(list)
for key, value in sorted(query.items()):
for key, value in sorted(kwargs.items()):
if key == "__raw__":
mongo_query.update(value)
continue
@@ -105,13 +105,18 @@ def query(_doc_cls=None, **query):
if op:
if op in GEO_OPERATORS:
value = _geo_operator(field, op, value)
elif op in CUSTOM_OPERATORS:
if op in ('elem_match', 'match'):
value = field.prepare_query_value(op, value)
value = {"$elemMatch": value}
elif op in ('match', 'elemMatch'):
ListField = _import_class('ListField')
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
if (isinstance(value, dict) and isinstance(field, ListField) and
isinstance(field.field, EmbeddedDocumentField)):
value = query(field.field.document_type, **value)
else:
NotImplementedError("Custom method '%s' has not "
"been implemented" % op)
value = field.prepare_query_value(op, value)
value = {"$elemMatch": value}
elif op in CUSTOM_OPERATORS:
NotImplementedError("Custom method '%s' has not "
"been implemented" % op)
elif op not in STRING_OPERATORS:
value = {'$' + op: value}

View File

@@ -52,13 +52,13 @@ CLASSIFIERS = [
extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])}
if sys.version_info[0] == 3:
extra_opts['use_2to3'] = True
extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0']
extra_opts['tests_require'] = ['nose', 'rednose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0']
if "test" in sys.argv or "nosetests" in sys.argv:
extra_opts['packages'] = find_packages()
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
else:
# coverage 4 does not support Python 3.2 anymore
extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0', 'python-dateutil']
extra_opts['tests_require'] = ['nose', 'rednose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0', 'python-dateutil']
if sys.version_info[0] == 2 and sys.version_info[1] == 6:
extra_opts['tests_require'].append('unittest2')
@@ -79,6 +79,5 @@ setup(name='mongoengine',
classifiers=CLASSIFIERS,
install_requires=['pymongo>=2.7.1'],
test_suite='nose.collector',
setup_requires=['nose', 'rednose'], # Allow proper nose usage with setuptols and tox
**extra_opts
)

View File

@@ -88,6 +88,18 @@ class DynamicTest(unittest.TestCase):
p.update(unset__misc=1)
p.reload()
def test_reload_dynamic_field(self):
self.Person.objects.delete()
p = self.Person.objects.create()
p.update(age=1)
self.assertEqual(len(p._data), 3)
self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name'])
p.reload()
self.assertEqual(len(p._data), 4)
self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name'])
def test_dynamic_document_queries(self):
"""Ensure we can query dynamic fields"""
p = self.Person()

View File

@@ -577,11 +577,11 @@ class IndexesTest(unittest.TestCase):
self.assertEqual(BlogPost.objects.hint('tags').count(), 10)
else:
def invalid_index():
BlogPost.objects.hint('tags')
BlogPost.objects.hint('tags').next()
self.assertRaises(TypeError, invalid_index)
def invalid_index_2():
return BlogPost.objects.hint(('tags', 1))
return BlogPost.objects.hint(('tags', 1)).next()
self.assertRaises(Exception, invalid_index_2)
def test_unique(self):

View File

@@ -7,6 +7,7 @@ import os
import pickle
import unittest
import uuid
import weakref
from datetime import datetime
from bson import DBRef, ObjectId
@@ -17,7 +18,7 @@ from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest,
from mongoengine import *
from mongoengine.errors import (NotRegistered, InvalidDocumentError,
InvalidQueryError, NotUniqueError,
FieldDoesNotExist)
FieldDoesNotExist, SaveConditionError)
from mongoengine.queryset import NULLIFY, Q
from mongoengine.connection import get_db
from mongoengine.base import get_document
@@ -30,6 +31,8 @@ TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__),
__all__ = ("InstanceTest",)
class InstanceTest(unittest.TestCase):
def setUp(self):
@@ -63,6 +66,14 @@ class InstanceTest(unittest.TestCase):
list(self.Person._get_collection().find().sort("id")),
sorted(docs, key=lambda doc: doc["_id"]))
def assertHasInstance(self, field, instance):
self.assertTrue(hasattr(field, "_instance"))
self.assertTrue(field._instance is not None)
if isinstance(field._instance, weakref.ProxyType):
self.assertTrue(field._instance.__eq__(instance))
else:
self.assertEqual(field._instance, instance)
def test_capped_collection(self):
"""Ensure that capped collections work properly.
"""
@@ -473,6 +484,20 @@ class InstanceTest(unittest.TestCase):
doc.reload()
Animal.drop_collection()
def test_reload_sharded_nested(self):
class SuperPhylum(EmbeddedDocument):
name = StringField()
class Animal(Document):
superphylum = EmbeddedDocumentField(SuperPhylum)
meta = {'shard_key': ('superphylum.name',)}
Animal.drop_collection()
doc = Animal(superphylum=SuperPhylum(name='Deuterostomia'))
doc.save()
doc.reload()
Animal.drop_collection()
def test_reload_referencing(self):
"""Ensures reloading updates weakrefs correctly
"""
@@ -546,6 +571,28 @@ class InstanceTest(unittest.TestCase):
except Exception:
self.assertFalse("Threw wrong exception")
def test_reload_of_non_strict_with_special_field_name(self):
"""Ensures reloading works for documents with meta strict == False
"""
class Post(Document):
meta = {
'strict': False
}
title = StringField()
items = ListField()
Post.drop_collection()
Post._get_collection().insert_one({
"title": "Items eclipse",
"items": ["more lorem", "even more ipsum"]
})
post = Post.objects.first()
post.reload()
self.assertEqual(post.title, "Items eclipse")
self.assertEqual(post.items, ["more lorem", "even more ipsum"])
def test_dictionary_access(self):
"""Ensure that dictionary-style field access works properly.
"""
@@ -608,10 +655,12 @@ class InstanceTest(unittest.TestCase):
embedded_field = EmbeddedDocumentField(Embedded)
Doc.drop_collection()
Doc(embedded_field=Embedded(string="Hi")).save()
doc = Doc(embedded_field=Embedded(string="Hi"))
self.assertHasInstance(doc.embedded_field, doc)
doc.save()
doc = Doc.objects.get()
self.assertEqual(doc, doc.embedded_field._instance)
self.assertHasInstance(doc.embedded_field, doc)
def test_embedded_document_complex_instance(self):
"""Ensure that embedded documents in complex fields can reference
@@ -623,10 +672,12 @@ class InstanceTest(unittest.TestCase):
embedded_field = ListField(EmbeddedDocumentField(Embedded))
Doc.drop_collection()
Doc(embedded_field=[Embedded(string="Hi")]).save()
doc = Doc(embedded_field=[Embedded(string="Hi")])
self.assertHasInstance(doc.embedded_field[0], doc)
doc.save()
doc = Doc.objects.get()
self.assertEqual(doc, doc.embedded_field[0]._instance)
self.assertHasInstance(doc.embedded_field[0], doc)
def test_instance_is_set_on_setattr(self):
@@ -639,11 +690,28 @@ class InstanceTest(unittest.TestCase):
Account.drop_collection()
acc = Account()
acc.email = Email(email='test@example.com')
self.assertTrue(hasattr(acc._data["email"], "_instance"))
self.assertHasInstance(acc._data["email"], acc)
acc.save()
acc1 = Account.objects.first()
self.assertTrue(hasattr(acc1._data["email"], "_instance"))
self.assertHasInstance(acc1._data["email"], acc1)
def test_instance_is_set_on_setattr_on_embedded_document_list(self):
class Email(EmbeddedDocument):
email = EmailField()
class Account(Document):
emails = EmbeddedDocumentListField(Email)
Account.drop_collection()
acc = Account()
acc.emails = [Email(email='test@example.com')]
self.assertHasInstance(acc._data["emails"][0], acc)
acc.save()
acc1 = Account.objects.first()
self.assertHasInstance(acc1._data["emails"][0], acc1)
def test_document_clean(self):
class TestDocument(Document):
@@ -1021,7 +1089,7 @@ class InstanceTest(unittest.TestCase):
flip(w1)
self.assertTrue(w1.toggle)
self.assertEqual(w1.count, 1)
self.assertRaises(OperationError,
self.assertRaises(SaveConditionError,
w1.save, save_condition={'save_id': UUID(42)})
w1.reload()
self.assertFalse(w1.toggle)
@@ -1050,7 +1118,7 @@ class InstanceTest(unittest.TestCase):
self.assertEqual(w1.count, 2)
flip(w2)
flip(w2)
self.assertRaises(OperationError,
self.assertRaises(SaveConditionError,
w2.save, save_condition={'save_id': old_id})
w2.reload()
self.assertFalse(w2.toggle)
@@ -1063,7 +1131,7 @@ class InstanceTest(unittest.TestCase):
self.assertTrue(w1.toggle)
self.assertEqual(w1.count, 3)
flip(w1)
self.assertRaises(OperationError,
self.assertRaises(SaveConditionError,
w1.save, save_condition={'count__gte': w1.count})
w1.reload()
self.assertTrue(w1.toggle)
@@ -2683,6 +2751,32 @@ class InstanceTest(unittest.TestCase):
self.assertRaises(OperationError, change_shard_key)
def test_shard_key_in_embedded_document(self):
class Foo(EmbeddedDocument):
foo = StringField()
class Bar(Document):
meta = {
'shard_key': ('foo.foo',)
}
foo = EmbeddedDocumentField(Foo)
bar = StringField()
foo_doc = Foo(foo='hello')
bar_doc = Bar(foo=foo_doc, bar='world')
bar_doc.save()
self.assertTrue(bar_doc.id is not None)
bar_doc.bar = 'baz'
bar_doc.save()
def change_shard_key():
bar_doc.foo.foo = 'something'
bar_doc.save()
self.assertRaises(OperationError, change_shard_key)
def test_shard_key_primary(self):
class LogEntry(Document):
machine = StringField(primary_key=True)

View File

@@ -1257,6 +1257,44 @@ class FieldTest(unittest.TestCase):
BlogPost.drop_collection()
def test_dictfield_dump_document(self):
"""Ensure a DictField can handle another document's dump
"""
class Doc(Document):
field = DictField()
class ToEmbed(Document):
id = IntField(primary_key=True, default=1)
recursive = DictField()
class ToEmbedParent(Document):
id = IntField(primary_key=True, default=1)
recursive = DictField()
meta = {'allow_inheritance': True}
class ToEmbedChild(ToEmbedParent):
pass
to_embed_recursive = ToEmbed(id=1).save()
to_embed = ToEmbed(
id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save()
doc = Doc(field=to_embed.to_mongo().to_dict())
doc.save()
assert isinstance(doc.field, dict)
assert doc.field == {'_id': 2, 'recursive': {'_id': 1, 'recursive': {}}}
# Same thing with a Document with a _cls field
to_embed_recursive = ToEmbedChild(id=1).save()
to_embed_child = ToEmbedChild(
id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save()
doc = Doc(field=to_embed_child.to_mongo().to_dict())
doc.save()
assert isinstance(doc.field, dict)
assert doc.field == {
'_id': 2, '_cls': 'ToEmbedParent.ToEmbedChild',
'recursive': {'_id': 1, '_cls': 'ToEmbedParent.ToEmbedChild', 'recursive': {}}
}
def test_dictfield_strict(self):
"""Ensure that dict field handles validation if provided a strict field type."""
@@ -1617,6 +1655,27 @@ class FieldTest(unittest.TestCase):
'parent': "50a234ea469ac1eda42d347d"})
mongoed = p1.to_mongo()
self.assertTrue(isinstance(mongoed['parent'], ObjectId))
def test_cached_reference_field_get_and_save(self):
"""
Tests #1047: CachedReferenceField creates DBRefs on to_python, but can't save them on to_mongo
"""
class Animal(Document):
name = StringField()
tag = StringField()
class Ocorrence(Document):
person = StringField()
animal = CachedReferenceField(Animal)
Animal.drop_collection()
Ocorrence.drop_collection()
Ocorrence(person="testte",
animal=Animal(name="Leopard", tag="heavy").save()).save()
p = Ocorrence.objects.get()
p.person = 'new_testte'
p.save()
def test_cached_reference_fields(self):
class Animal(Document):
@@ -2222,6 +2281,81 @@ class FieldTest(unittest.TestCase):
Member.drop_collection()
BlogPost.drop_collection()
def test_reference_class_with_abstract_parent(self):
"""Ensure that a class with an abstract parent can be referenced.
"""
class Sibling(Document):
name = StringField()
meta = {"abstract": True}
class Sister(Sibling):
pass
class Brother(Sibling):
sibling = ReferenceField(Sibling)
Sister.drop_collection()
Brother.drop_collection()
sister = Sister(name="Alice")
sister.save()
brother = Brother(name="Bob", sibling=sister)
brother.save()
self.assertEquals(Brother.objects[0].sibling.name, sister.name)
Sister.drop_collection()
Brother.drop_collection()
def test_reference_abstract_class(self):
"""Ensure that an abstract class instance cannot be used in the
reference of that abstract class.
"""
class Sibling(Document):
name = StringField()
meta = {"abstract": True}
class Sister(Sibling):
pass
class Brother(Sibling):
sibling = ReferenceField(Sibling)
Sister.drop_collection()
Brother.drop_collection()
sister = Sibling(name="Alice")
brother = Brother(name="Bob", sibling=sister)
self.assertRaises(ValidationError, brother.save)
Sister.drop_collection()
Brother.drop_collection()
def test_abstract_reference_base_type(self):
"""Ensure that an an abstract reference fails validation when given a
Document that does not inherit from the abstract type.
"""
class Sibling(Document):
name = StringField()
meta = {"abstract": True}
class Brother(Sibling):
sibling = ReferenceField(Sibling)
class Mother(Document):
name = StringField()
Brother.drop_collection()
Mother.drop_collection()
mother = Mother(name="Carol")
mother.save()
brother = Brother(name="Bob", sibling=mother)
self.assertRaises(ValidationError, brother.save)
Brother.drop_collection()
Mother.drop_collection()
def test_generic_reference(self):
"""Ensure that a GenericReferenceField properly dereferences items.
"""
@@ -2375,6 +2509,62 @@ class FieldTest(unittest.TestCase):
bm = Bookmark.objects.first()
self.assertEqual(bm.bookmark_object, post_1)
def test_generic_reference_string_choices(self):
"""Ensure that a GenericReferenceField can handle choices as strings
"""
class Link(Document):
title = StringField()
class Post(Document):
title = StringField()
class Bookmark(Document):
bookmark_object = GenericReferenceField(choices=('Post', Link))
Link.drop_collection()
Post.drop_collection()
Bookmark.drop_collection()
link_1 = Link(title="Pitchfork")
link_1.save()
post_1 = Post(title="Behind the Scenes of the Pavement Reunion")
post_1.save()
bm = Bookmark(bookmark_object=link_1)
bm.save()
bm = Bookmark(bookmark_object=post_1)
bm.save()
bm = Bookmark(bookmark_object=bm)
self.assertRaises(ValidationError, bm.validate)
def test_generic_reference_choices_no_dereference(self):
"""Ensure that a GenericReferenceField can handle choices on
non-derefenreced (i.e. DBRef) elements
"""
class Post(Document):
title = StringField()
class Bookmark(Document):
bookmark_object = GenericReferenceField(choices=(Post, ))
other_field = StringField()
Post.drop_collection()
Bookmark.drop_collection()
post_1 = Post(title="Behind the Scenes of the Pavement Reunion")
post_1.save()
bm = Bookmark(bookmark_object=post_1)
bm.save()
bm = Bookmark.objects.get(id=bm.id)
# bookmark_object is now a DBRef
bm.other_field = 'dummy_change'
bm.save()
def test_generic_reference_list_choices(self):
"""Ensure that a ListField properly dereferences generic references and
respects choices.
@@ -3250,6 +3440,39 @@ class FieldTest(unittest.TestCase):
doc = Doc.objects.get()
self.assertEqual(doc.embed_me.field_1, "hello")
def test_dynamicfield_dump_document(self):
"""Ensure a DynamicField can handle another document's dump
"""
class Doc(Document):
field = DynamicField()
class ToEmbed(Document):
id = IntField(primary_key=True, default=1)
recursive = DynamicField()
class ToEmbedParent(Document):
id = IntField(primary_key=True, default=1)
recursive = DynamicField()
meta = {'allow_inheritance': True}
class ToEmbedChild(ToEmbedParent):
pass
to_embed_recursive = ToEmbed(id=1).save()
to_embed = ToEmbed(id=2, recursive=to_embed_recursive).save()
doc = Doc(field=to_embed)
doc.save()
assert isinstance(doc.field, ToEmbed)
assert doc.field == to_embed
# Same thing with a Document with a _cls field
to_embed_recursive = ToEmbedChild(id=1).save()
to_embed_child = ToEmbedChild(id=2, recursive=to_embed_recursive).save()
doc = Doc(field=to_embed_child)
doc.save()
assert isinstance(doc.field, ToEmbedChild)
assert doc.field == to_embed_child
def test_invalid_dict_value(self):
class DictFieldTest(Document):
dictionary = DictField(required=True)

View File

@@ -188,6 +188,10 @@ class QuerySetTest(unittest.TestCase):
"[<Person: Person object>, <Person: Person object>]", "%s" % self.Person.objects[1:3])
self.assertEqual(
"[<Person: Person object>, <Person: Person object>]", "%s" % self.Person.objects[51:53])
# Test only after limit
self.assertEqual(self.Person.objects().limit(2).only('name')[0].age, None)
# Test only after skip
self.assertEqual(self.Person.objects().skip(2).only('name')[0].age, None)
def test_find_one(self):
"""Ensure that a query using find_one returns a valid result.
@@ -676,12 +680,21 @@ class QuerySetTest(unittest.TestCase):
def test_upsert_one(self):
self.Person.drop_collection()
self.Person.objects(name="Bob", age=30).update_one(upsert=True)
bob = self.Person.objects(name="Bob", age=30).upsert_one()
bob = self.Person.objects.first()
self.assertEqual("Bob", bob.name)
self.assertEqual(30, bob.age)
bob.name = "Bobby"
bob.save()
bobby = self.Person.objects(name="Bobby", age=30).upsert_one()
self.assertEqual("Bobby", bobby.name)
self.assertEqual(30, bobby.age)
self.assertEqual(bob.id, bobby.id)
def test_set_on_insert(self):
self.Person.drop_collection()
@@ -1413,6 +1426,47 @@ class QuerySetTest(unittest.TestCase):
self.Person.objects(name='Test User').delete()
self.assertEqual(1, BlogPost.objects.count())
def test_reverse_delete_rule_cascade_cycle(self):
"""Ensure reference cascading doesn't loop if reference graph isn't
a tree
"""
class Dummy(Document):
reference = ReferenceField('self', reverse_delete_rule=CASCADE)
base = Dummy().save()
other = Dummy(reference=base).save()
base.reference = other
base.save()
base.delete()
self.assertRaises(DoesNotExist, base.reload)
self.assertRaises(DoesNotExist, other.reload)
def test_reverse_delete_rule_cascade_complex_cycle(self):
"""Ensure reference cascading doesn't loop if reference graph isn't
a tree
"""
class Category(Document):
name = StringField()
class Dummy(Document):
reference = ReferenceField('self', reverse_delete_rule=CASCADE)
cat = ReferenceField(Category, reverse_delete_rule=CASCADE)
cat = Category(name='cat').save()
base = Dummy(cat=cat).save()
other = Dummy(reference=base).save()
other2 = Dummy(reference=other).save()
base.reference = other
base.save()
cat.delete()
self.assertRaises(DoesNotExist, base.reload)
self.assertRaises(DoesNotExist, other.reload)
self.assertRaises(DoesNotExist, other2.reload)
def test_reverse_delete_rule_cascade_self_referencing(self):
"""Ensure self-referencing CASCADE deletes do not result in infinite
loop
@@ -4071,6 +4125,15 @@ class QuerySetTest(unittest.TestCase):
ak = list(Bar.objects(foo__match=Foo(shape="square", color="purple")))
self.assertEqual([b1], ak)
ak = list(
Bar.objects(foo__elemMatch={'shape': "square", "color__exists": True}))
self.assertEqual([b1, b2], ak)
ak = list(
Bar.objects(foo__match={'shape': "square", "color__exists": True}))
self.assertEqual([b1, b2], ak)
def test_upsert_includes_cls(self):
"""Upserts should include _cls information for inheritable classes
"""
@@ -4111,7 +4174,11 @@ class QuerySetTest(unittest.TestCase):
def test_read_preference(self):
class Bar(Document):
pass
txt = StringField()
meta = {
'indexes': [ 'txt' ]
}
Bar.drop_collection()
bars = list(Bar.objects(read_preference=ReadPreference.PRIMARY))
@@ -4123,9 +4190,51 @@ class QuerySetTest(unittest.TestCase):
error_class = TypeError
self.assertRaises(error_class, Bar.objects, read_preference='Primary')
# read_preference as a kwarg
bars = Bar.objects(read_preference=ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(
bars._read_preference, ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._cursor._Cursor__read_preference,
ReadPreference.SECONDARY_PREFERRED)
# read_preference as a query set method
bars = Bar.objects.read_preference(ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(
bars._read_preference, ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._cursor._Cursor__read_preference,
ReadPreference.SECONDARY_PREFERRED)
# read_preference after skip
bars = Bar.objects.skip(1) \
.read_preference(ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(
bars._read_preference, ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._cursor._Cursor__read_preference,
ReadPreference.SECONDARY_PREFERRED)
# read_preference after limit
bars = Bar.objects.limit(1) \
.read_preference(ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(
bars._read_preference, ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._cursor._Cursor__read_preference,
ReadPreference.SECONDARY_PREFERRED)
# read_preference after order_by
bars = Bar.objects.order_by('txt') \
.read_preference(ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(
bars._read_preference, ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._cursor._Cursor__read_preference,
ReadPreference.SECONDARY_PREFERRED)
# read_preference after hint
bars = Bar.objects.hint([('txt', 1)]) \
.read_preference(ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(
bars._read_preference, ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._cursor._Cursor__read_preference,
ReadPreference.SECONDARY_PREFERRED)
def test_json_simple(self):
@@ -4749,6 +4858,8 @@ class QuerySetTest(unittest.TestCase):
class ScottishCat(Cat):
folded_ears = BooleanField()
Animal.drop_collection()
Animal(is_mamal=False).save()
Cat(is_mamal=True, whiskers_length=5.1).save()
ScottishCat(is_mamal=True, folded_ears=True).save()

View File

@@ -51,6 +51,14 @@ class ConnectionTest(unittest.TestCase):
conn = get_connection('testdb')
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
def test_disconnect(self):
"""Ensure that the disconnect() method works properly
"""
conn1 = connect('mongoenginetest')
mongoengine.connection.disconnect()
conn2 = connect('mongoenginetest')
self.assertTrue(conn1 is not conn2)
def test_sharing_connections(self):
"""Ensure that connections are shared when the connection settings are exactly the same
"""

View File

@@ -1,11 +1,13 @@
[tox]
envlist = {py26,py27,py32,py33,py34,pypy,pypy3}-{mg27,mg28}
envlist = {py26,py27,py32,py33,py34,py35,pypy,pypy3}-{mg27,mg28}
#envlist = {py26,py27,py32,py33,py34,pypy,pypy3}-{mg27,mg28,mg30,mgdev}
[testenv]
commands =
python setup.py nosetests {posargs}
deps =
nose
rednose
mg27: PyMongo<2.8
mg28: PyMongo>=2.8,<3.0
mg30: PyMongo>=3.0