Compare commits
237 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
88c4aa2d87 | ||
|
fb8c0d8fe3 | ||
|
1a863725d1 | ||
|
7b4245c91c | ||
|
9bd0d6b99d | ||
|
b640c766db | ||
|
50ffa8014e | ||
|
7ef688b256 | ||
|
b4fe0b35e4 | ||
|
a2cbbdf819 | ||
|
35b7efe3f4 | ||
|
7cea2a768f | ||
|
7247b9b68e | ||
|
dca837b843 | ||
|
c60c2ee8d0 | ||
|
3cdb5b5db2 | ||
|
b9cc8a4ca9 | ||
|
28606e9985 | ||
|
5bbe782812 | ||
|
d65861cdf7 | ||
|
c8df3fd2a7 | ||
|
6cfe6652a3 | ||
|
6b711da69d | ||
|
595cb99b2d | ||
|
f0a3445250 | ||
|
6d353dae1e | ||
|
57a38282a9 | ||
|
db47604865 | ||
|
2a121fe202 | ||
|
36baff0d7f | ||
|
201f3008b1 | ||
|
f4873fee18 | ||
|
e02261be6d | ||
|
2919e6765c | ||
|
b8fc4d0079 | ||
|
4a46f5f095 | ||
|
3484ceabb8 | ||
|
cab659dce6 | ||
|
a657f29439 | ||
|
4c054bf316 | ||
|
dc7922c38b | ||
|
c6c68abfcc | ||
|
6aacb0c898 | ||
|
e7000db491 | ||
|
fce994ea7f | ||
|
6c6446765e | ||
|
69a99c70c6 | ||
|
56d9f7a8af | ||
|
363aefe399 | ||
|
7fd4f792ba | ||
|
6fbdde63d8 | ||
|
b04dc90cdf | ||
|
b525c91bd3 | ||
|
a32c893078 | ||
|
2c6a744848 | ||
|
4492874d08 | ||
|
d3a592e5bf | ||
|
cab21b1b21 | ||
|
1319e422ea | ||
|
c88ea40b57 | ||
|
3194a37fcb | ||
|
72ebaa52e9 | ||
|
0e00695fc7 | ||
|
48a691e722 | ||
|
cf54d6d6f8 | ||
|
a03fe234d0 | ||
|
d88d40cc08 | ||
|
d3b4af116e | ||
|
352b23331b | ||
|
bdd6041a5c | ||
|
1894003f8a | ||
|
220513ae42 | ||
|
fcbabbe357 | ||
|
3627969fce | ||
|
8807c0dbef | ||
|
23cc9f6ff8 | ||
|
e50799e9c4 | ||
|
b92c4844eb | ||
|
c306d42d08 | ||
|
e31558318e | ||
|
78a9420f26 | ||
|
b47c5b5bfc | ||
|
28a312accf | ||
|
611094e92e | ||
|
2a8579a6a5 | ||
|
47577f2f47 | ||
|
34e3e45843 | ||
|
364dc9ddfb | ||
|
23324f0f87 | ||
|
17fa9a3b77 | ||
|
424b3ca308 | ||
|
26e2fc8fd4 | ||
|
8e18484898 | ||
|
354cfe0f9c | ||
|
983474b2bd | ||
|
14d861bcbb | ||
|
f6cd349a16 | ||
|
8e1c4dec87 | ||
|
18b47e4a73 | ||
|
4f157f50ed | ||
|
f44a2f4857 | ||
|
c685ace327 | ||
|
f23b0faf41 | ||
|
e0e2ca7ccd | ||
|
83fe7f7eef | ||
|
1feaa8f2e9 | ||
|
598d6bf4c5 | ||
|
0afd5a40d6 | ||
|
26b70e9ed3 | ||
|
a1a93a4bdd | ||
|
4939a7dd7c | ||
|
0fa6610fdb | ||
|
b0148e7860 | ||
|
59a06a242d | ||
|
ffe902605d | ||
|
556f7e85fc | ||
|
45c86be402 | ||
|
bf34f413de | ||
|
9b022b187f | ||
|
c3409d64dc | ||
|
3c5c3b5026 | ||
|
f240f00d84 | ||
|
68c7764c63 | ||
|
adfb039ba6 | ||
|
89416d9856 | ||
|
9b6c972e0f | ||
|
55fc04752a | ||
|
96f0919633 | ||
|
17b140baf4 | ||
|
45c2151d0f | ||
|
1887f5b7e7 | ||
|
708d1c7a32 | ||
|
acf8c3015a | ||
|
f83ae5789b | ||
|
57ccfcfc1b | ||
|
dd0fdcfdd4 | ||
|
5c805be067 | ||
|
e423380d7f | ||
|
4d8bebc917 | ||
|
4314fa883f | ||
|
d6e39b362b | ||
|
f89214f9cf | ||
|
d17cac8210 | ||
|
aa49283fa9 | ||
|
e79ea7a2cf | ||
|
8a1d280f19 | ||
|
6a8eb9562f | ||
|
8f76e1e344 | ||
|
7b9f084e6b | ||
|
5b1693a908 | ||
|
fd7c00da49 | ||
|
7fc5ced3af | ||
|
a86092fb64 | ||
|
003827e916 | ||
|
b15673c525 | ||
|
00363303b1 | ||
|
48fbe890f8 | ||
|
4179877cc7 | ||
|
282b83ac08 | ||
|
193656e71b | ||
|
a25d127f36 | ||
|
cf9df548ca | ||
|
f29b93c762 | ||
|
032ace40d1 | ||
|
f74dd1cb3c | ||
|
29889d1e35 | ||
|
d6d19c4229 | ||
|
ab08e67eaf | ||
|
00bf6ac258 | ||
|
b65478e7d9 | ||
|
e83b529f1c | ||
|
408274152b | ||
|
8ff82996fb | ||
|
d59c4044b7 | ||
|
3574e21e4f | ||
|
5a091956ef | ||
|
14e9c58444 | ||
|
bfe5b03c69 | ||
|
f96f7f840e | ||
|
a3bcf26dce | ||
|
a7852a89cc | ||
|
1b0c761fc0 | ||
|
5e4e8d4eda | ||
|
bd524d2e1e | ||
|
60fe919992 | ||
|
b90063b170 | ||
|
d9fce49b08 | ||
|
5dbee2a270 | ||
|
4779106139 | ||
|
bf2de81873 | ||
|
28cdedc9aa | ||
|
7e90571404 | ||
|
42bbe63927 | ||
|
7ddbea697e | ||
|
b4860de34d | ||
|
576f23d5fb | ||
|
86548fc7bf | ||
|
b3b4d992fe | ||
|
d72daf5f39 | ||
|
9ad959a478 | ||
|
cc00a321da | ||
|
de74273108 | ||
|
a7658c7573 | ||
|
48a85ee6e0 | ||
|
461b789515 | ||
|
b71ff6fbb8 | ||
|
1bcdcce93a | ||
|
c09bfca634 | ||
|
36c5f02bfb | ||
|
eae6e5d9a1 | ||
|
364813dd73 | ||
|
1a2b1f283b | ||
|
a0e5cf4ecc | ||
|
820f7b4d93 | ||
|
727866f090 | ||
|
3d45cdc339 | ||
|
02a557aa67 | ||
|
6da27e5976 | ||
|
19a6e324c4 | ||
|
62eadbc174 | ||
|
ae783d4f45 | ||
|
1241a902e3 | ||
|
fdba648afb | ||
|
080226dd72 | ||
|
2d76aebb8e | ||
|
8b5df3ca17 | ||
|
6b38ef3c9f | ||
|
15451ff42b | ||
|
6e2db1ced6 | ||
|
5c4ce8754e | ||
|
416486c370 | ||
|
2f075be6f8 | ||
|
c4de879b20 | ||
|
ee5686e91a | ||
|
2a795e9138 | ||
|
437b11af9a | ||
|
54d8c64ad5 |
@@ -3,12 +3,7 @@
|
||||
sudo apt-get remove mongodb-org-server
|
||||
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
|
||||
|
||||
if [ "$MONGODB" = "2.4" ]; then
|
||||
echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-10gen=2.4.14
|
||||
sudo service mongodb start
|
||||
elif [ "$MONGODB" = "2.6" ]; then
|
||||
if [ "$MONGODB" = "2.6" ]; then
|
||||
echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-org-server=2.6.12
|
||||
@@ -18,8 +13,20 @@ elif [ "$MONGODB" = "3.0" ]; then
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-org-server=3.0.14
|
||||
# service should be started automatically
|
||||
elif [ "$MONGODB" = "3.2" ]; then
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv EA312927
|
||||
echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.2.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-org-server=3.2.20
|
||||
# service should be started automatically
|
||||
elif [ "$MONGODB" = "3.4" ]; then
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com:80 --recv 0C49F3730359A14518585931BC711F9BA15703C6
|
||||
echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.4.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-org-server=3.4.17
|
||||
# service should be started automatically
|
||||
else
|
||||
echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0."
|
||||
echo "Invalid MongoDB version, expected 2.6, 3.0, 3.2 or 3.4."
|
||||
exit 1
|
||||
fi;
|
||||
|
||||
|
23
.travis.yml
23
.travis.yml
@@ -2,12 +2,11 @@
|
||||
# PyMongo combinations. However, that would result in an overly long build
|
||||
# with a very large number of jobs, hence we only test a subset of all the
|
||||
# combinations:
|
||||
# * MongoDB v2.4 & v3.0 are only tested against Python v2.7 & v3.5.
|
||||
# * MongoDB v2.4 is tested against PyMongo v2.7 & v3.x.
|
||||
# * MongoDB v3.0 is tested against PyMongo v3.x.
|
||||
# * MongoDB v2.6 is currently the "main" version tested against Python v2.7,
|
||||
# v3.5, PyPy & PyPy3, and PyMongo v2.7, v2.8 & v3.x.
|
||||
#
|
||||
# v3.5, v3.6, PyPy, and PyMongo v3.x.
|
||||
# * MongoDB v3.0 & v3.2 are tested against Python v2.7, v3.5 & v3.6
|
||||
# and Pymongo v3.5 & v3.x
|
||||
# * MongoDB v3.4 is tested against v3.6 and Pymongo v3.x
|
||||
# Reminder: Update README.rst if you change MongoDB versions we test.
|
||||
|
||||
language: python
|
||||
@@ -27,17 +26,15 @@ matrix:
|
||||
|
||||
include:
|
||||
- python: 2.7
|
||||
env: MONGODB=2.4 PYMONGO=3.5
|
||||
- python: 2.7
|
||||
env: MONGODB=3.0 PYMONGO=3.x
|
||||
env: MONGODB=3.0 PYMONGO=3.5
|
||||
- python: 3.5
|
||||
env: MONGODB=2.4 PYMONGO=3.5
|
||||
- python: 3.5
|
||||
env: MONGODB=3.0 PYMONGO=3.x
|
||||
env: MONGODB=3.2 PYMONGO=3.x
|
||||
- python: 3.6
|
||||
env: MONGODB=2.4 PYMONGO=3.5
|
||||
env: MONGODB=3.0 PYMONGO=3.5
|
||||
- python: 3.6
|
||||
env: MONGODB=3.0 PYMONGO=3.x
|
||||
env: MONGODB=3.2 PYMONGO=3.x
|
||||
- python: 3.6
|
||||
env: MONGODB=3.4 PYMONGO=3.x
|
||||
|
||||
before_install:
|
||||
- bash .install_mongodb_on_travis.sh
|
||||
|
3
AUTHORS
3
AUTHORS
@@ -246,3 +246,6 @@ that much better:
|
||||
* Renjianxin (https://github.com/Davidrjx)
|
||||
* Erdenezul Batmunkh (https://github.com/erdenezul)
|
||||
* Andy Yankovsky (https://github.com/werat)
|
||||
* Bastien Gérard (https://github.com/bagerard)
|
||||
* Trevor Hall (https://github.com/tjhall13)
|
||||
* Gleb Voropaev (https://github.com/buggyspace)
|
@@ -22,8 +22,11 @@ Supported Interpreters
|
||||
|
||||
MongoEngine supports CPython 2.7 and newer. Language
|
||||
features not supported by all interpreters can not be used.
|
||||
Please also ensure that your code is properly converted by
|
||||
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.
|
||||
The codebase is written in python 2 so you must be using python 2
|
||||
when developing new features. Compatibility of the library with Python 3
|
||||
relies on the 2to3 package that gets executed as part of the installation
|
||||
build. You should ensure that your code is properly converted by
|
||||
`2to3 <http://docs.python.org/library/2to3.html>`_.
|
||||
|
||||
Style Guide
|
||||
-----------
|
||||
|
12
README.rst
12
README.rst
@@ -26,19 +26,21 @@ an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_.
|
||||
|
||||
Supported MongoDB Versions
|
||||
==========================
|
||||
MongoEngine is currently tested against MongoDB v2.4, v2.6, and v3.0. Future
|
||||
MongoEngine is currently tested against MongoDB v2.6, v3.0, v3.2 and v3.4. Future
|
||||
versions should be supported as well, but aren't actively tested at the moment.
|
||||
Make sure to open an issue or submit a pull request if you experience any
|
||||
problems with MongoDB v3.2+.
|
||||
problems with MongoDB v3.4+.
|
||||
|
||||
Installation
|
||||
============
|
||||
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
|
||||
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
|
||||
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||
and thus you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
||||
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
||||
setup.py install``.
|
||||
and thus you can use ``easy_install -U mongoengine``. Another option is
|
||||
`pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine``
|
||||
to both create the virtual environment and install the package. Otherwise, you can
|
||||
download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and
|
||||
run ``python setup.py install``.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
|
@@ -2,8 +2,77 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
dev
|
||||
===
|
||||
Development
|
||||
===========
|
||||
- (Fill this out as you fix issues and develop your features).
|
||||
|
||||
Changes in 0.17.0
|
||||
=================
|
||||
- Fix .only() working improperly after using .count() of the same instance of QuerySet
|
||||
- Fix batch_size that was not copied when cloning a queryset object #2011
|
||||
- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (_cls, _id) when using `QuerySet.as_pymongo` #1976
|
||||
- Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time (#1995)
|
||||
- Fix InvalidStringData error when using modify on a BinaryField #1127
|
||||
- DEPRECATION: `EmbeddedDocument.save` & `.reload` are marked as deprecated and will be removed in a next version of mongoengine #1552
|
||||
- Fix test suite and CI to support MongoDB 3.4 #1445
|
||||
|
||||
=================
|
||||
Changes in 0.16.3
|
||||
=================
|
||||
- Fix $push with $position operator not working with lists in embedded document #1965
|
||||
|
||||
=================
|
||||
Changes in 0.16.2
|
||||
=================
|
||||
- Fix .save() that fails when called with write_concern=None (regression of 0.16.1) #1958
|
||||
|
||||
=================
|
||||
Changes in 0.16.1
|
||||
=================
|
||||
- Fix `_cls` that is not set properly in Document constructor (regression) #1950
|
||||
- Fix bug in _delta method - Update of a ListField depends on an unrelated dynamic field update #1733
|
||||
- Remove deprecated `save()` method and used `insert_one()` #1899
|
||||
|
||||
=================
|
||||
Changes in 0.16.0
|
||||
=================
|
||||
- Various improvements to the doc
|
||||
- Improvement to code quality
|
||||
- POTENTIAL BREAKING CHANGES:
|
||||
- EmbeddedDocumentField will no longer accept references to Document classes in its constructor #1661
|
||||
- Get rid of the `basecls` parameter from the DictField constructor (dead code) #1876
|
||||
- default value of ComplexDateTime is now None (and no longer the current datetime) #1368
|
||||
- Fix unhashable TypeError when referencing a Document with a compound key in an EmbeddedDocument #1685
|
||||
- Fix bug where an EmbeddedDocument with the same id as its parent would not be tracked for changes #1768
|
||||
- Fix the fact that bulk `insert()` was not setting primary keys of inserted documents instances #1919
|
||||
- Fix bug when referencing the abstract class in a ReferenceField #1920
|
||||
- Allow modification to the document made in pre_save_post_validation to be taken into account #1202
|
||||
- Replaced MongoDB 2.4 tests in CI by MongoDB 3.2 #1903
|
||||
- Fix side effects of using queryset.`no_dereference` on other documents #1677
|
||||
- Fix TypeError when using lazy django translation objects as translated choices #1879
|
||||
- Improve 2-3 codebase compatibility #1889
|
||||
- Fix the support for changing the default value of ComplexDateTime #1368
|
||||
- Improves error message in case an EmbeddedDocumentListField receives an EmbeddedDocument instance
|
||||
instead of a list #1877
|
||||
- Fix the Decimal operator inc/dec #1517 #1320
|
||||
- Ignore killcursors queries in `query_counter` context manager #1869
|
||||
- Fix the fact that `query_counter` was modifying the initial profiling_level in case it was != 0 #1870
|
||||
- Repaired the `no_sub_classes` context manager + fix the fact that it was swallowing exceptions #1865
|
||||
- Fix index creation error that was swallowed by hasattr under python2 #1688
|
||||
- QuerySet limit function behaviour: Passing 0 as parameter will return all the documents in the cursor #1611
|
||||
- bulk insert updates the ids of the input documents instances #1919
|
||||
- Fix an harmless bug related to GenericReferenceField where modifications in the generic-referenced document
|
||||
were tracked in the parent #1934
|
||||
- Improve validator of BinaryField #273
|
||||
- Implemented lazy regex compiling in Field classes to improve 'import mongoengine' performance #1806
|
||||
- Updated GridFSProxy.__str__ so that it would always print both the filename and grid_id #710
|
||||
- Add __repr__ to Q and QCombination #1843
|
||||
- fix bug in BaseList.__iter__ operator (was occuring when modifying a BaseList while iterating over it) #1676
|
||||
- Added field `DateField`#513
|
||||
|
||||
Changes in 0.15.3
|
||||
=================
|
||||
- BREAKING CHANGES: `Queryset.update/update_one` methods now returns an UpdateResult when `full_result=True` is provided and no longer a dict (relates to #1491)
|
||||
- Subfield resolve error in generic_emdedded_document query #1651 #1652
|
||||
- use each modifier only with $position #1673 #1675
|
||||
- Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704
|
||||
|
@@ -45,27 +45,27 @@ post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs'
|
||||
post2.tags = ['mongoengine']
|
||||
post2.save()
|
||||
|
||||
print 'ALL POSTS'
|
||||
print
|
||||
print('ALL POSTS')
|
||||
print()
|
||||
for post in Post.objects:
|
||||
print post.title
|
||||
print(post.title)
|
||||
#print '=' * post.title.count()
|
||||
print "=" * 20
|
||||
print("=" * 20)
|
||||
|
||||
if isinstance(post, TextPost):
|
||||
print post.content
|
||||
print(post.content)
|
||||
|
||||
if isinstance(post, LinkPost):
|
||||
print 'Link:', post.link_url
|
||||
print('Link:', post.link_url)
|
||||
|
||||
print
|
||||
print
|
||||
print()
|
||||
print()
|
||||
|
||||
print 'POSTS TAGGED \'MONGODB\''
|
||||
print
|
||||
print('POSTS TAGGED \'MONGODB\'')
|
||||
print()
|
||||
for post in Post.objects(tags='mongodb'):
|
||||
print post.title
|
||||
print
|
||||
print(post.title)
|
||||
print()
|
||||
|
||||
num_posts = Post.objects(tags='mongodb').count()
|
||||
print 'Found %d posts with tag "mongodb"' % num_posts
|
||||
print('Found %d posts with tag "mongodb"' % num_posts)
|
||||
|
@@ -18,10 +18,10 @@ provide the :attr:`host` and :attr:`port` arguments to
|
||||
|
||||
connect('project1', host='192.168.1.35', port=12345)
|
||||
|
||||
If the database requires authentication, :attr:`username` and :attr:`password`
|
||||
arguments should be provided::
|
||||
If the database requires authentication, :attr:`username`, :attr:`password`
|
||||
and :attr:`authentication_source` arguments should be provided::
|
||||
|
||||
connect('project1', username='webapp', password='pwd123')
|
||||
connect('project1', username='webapp', password='pwd123', authentication_source='admin')
|
||||
|
||||
URI style connections are also supported -- just supply the URI as
|
||||
the :attr:`host` to
|
||||
|
@@ -85,6 +85,7 @@ are as follows:
|
||||
* :class:`~mongoengine.fields.ImageField`
|
||||
* :class:`~mongoengine.fields.IntField`
|
||||
* :class:`~mongoengine.fields.ListField`
|
||||
* :class:`~mongoengine.fields.LongField`
|
||||
* :class:`~mongoengine.fields.MapField`
|
||||
* :class:`~mongoengine.fields.ObjectIdField`
|
||||
* :class:`~mongoengine.fields.ReferenceField`
|
||||
@@ -155,7 +156,7 @@ arguments can be set on all fields:
|
||||
An iterable (e.g. list, tuple or set) of choices to which the value of this
|
||||
field should be limited.
|
||||
|
||||
Can be either be a nested tuples of value (stored in mongo) and a
|
||||
Can either be nested tuples of value (stored in mongo) and a
|
||||
human readable key ::
|
||||
|
||||
SIZE = (('S', 'Small'),
|
||||
@@ -492,7 +493,9 @@ the field name with a **#**::
|
||||
]
|
||||
}
|
||||
|
||||
If a dictionary is passed then the following options are available:
|
||||
If a dictionary is passed then additional options become available. Valid options include,
|
||||
but are not limited to:
|
||||
|
||||
|
||||
:attr:`fields` (Default: None)
|
||||
The fields to index. Specified in the same format as described above.
|
||||
@@ -513,8 +516,15 @@ If a dictionary is passed then the following options are available:
|
||||
Allows you to automatically expire data from a collection by setting the
|
||||
time in seconds to expire the a field.
|
||||
|
||||
:attr:`name` (Optional)
|
||||
Allows you to specify a name for the index
|
||||
|
||||
:attr:`collation` (Optional)
|
||||
Allows to create case insensitive indexes (MongoDB v3.4+ only)
|
||||
|
||||
.. note::
|
||||
|
||||
Additional options are forwarded as **kwargs to pymongo's create_index method.
|
||||
Inheritance adds extra fields indices see: :ref:`document-inheritance`.
|
||||
|
||||
Global index default options
|
||||
@@ -526,7 +536,7 @@ There are a few top level defaults for all indexes that can be set::
|
||||
title = StringField()
|
||||
rating = StringField()
|
||||
meta = {
|
||||
'index_options': {},
|
||||
'index_opts': {},
|
||||
'index_background': True,
|
||||
'index_cls': False,
|
||||
'auto_create_index': True,
|
||||
@@ -534,8 +544,8 @@ There are a few top level defaults for all indexes that can be set::
|
||||
}
|
||||
|
||||
|
||||
:attr:`index_options` (Optional)
|
||||
Set any default index options - see the `full options list <http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex>`_
|
||||
:attr:`index_opts` (Optional)
|
||||
Set any default index options - see the `full options list <https://docs.mongodb.com/manual/reference/method/db.collection.createIndex/#db.collection.createIndex>`_
|
||||
|
||||
:attr:`index_background` (Optional)
|
||||
Set the default value for if an index should be indexed in the background
|
||||
@@ -551,8 +561,7 @@ There are a few top level defaults for all indexes that can be set::
|
||||
|
||||
:attr:`index_drop_dups` (Optional)
|
||||
Set the default value for if an index should drop duplicates
|
||||
|
||||
.. note:: Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning
|
||||
Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning
|
||||
and has no effect
|
||||
|
||||
|
||||
@@ -734,6 +743,9 @@ document.::
|
||||
.. note:: From 0.8 onwards :attr:`allow_inheritance` defaults
|
||||
to False, meaning you must set it to True to use inheritance.
|
||||
|
||||
Setting :attr:`allow_inheritance` to True should also be used in
|
||||
:class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it
|
||||
|
||||
Working with existing data
|
||||
--------------------------
|
||||
As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and
|
||||
|
@@ -57,7 +57,8 @@ document values for example::
|
||||
|
||||
def clean(self):
|
||||
"""Ensures that only published essays have a `pub_date` and
|
||||
automatically sets the pub_date if published and not set"""
|
||||
automatically sets `pub_date` if essay is published and `pub_date`
|
||||
is not set"""
|
||||
if self.status == 'Draft' and self.pub_date is not None:
|
||||
msg = 'Draft entries should not have a publication date.'
|
||||
raise ValidationError(msg)
|
||||
|
@@ -53,7 +53,8 @@ Deletion
|
||||
|
||||
Deleting stored files is achieved with the :func:`delete` method::
|
||||
|
||||
marmot.photo.delete()
|
||||
marmot.photo.delete() # Deletes the GridFS document
|
||||
marmot.save() # Saves the GridFS reference (being None) contained in the marmot instance
|
||||
|
||||
.. warning::
|
||||
|
||||
@@ -71,4 +72,5 @@ Files can be replaced with the :func:`replace` method. This works just like
|
||||
the :func:`put` method so even metadata can (and should) be replaced::
|
||||
|
||||
another_marmot = open('another_marmot.png', 'rb')
|
||||
marmot.photo.replace(another_marmot, content_type='image/png')
|
||||
marmot.photo.replace(another_marmot, content_type='image/png') # Replaces the GridFS document
|
||||
marmot.save() # Replaces the GridFS reference contained in marmot instance
|
||||
|
@@ -456,14 +456,14 @@ data. To turn off dereferencing of the results of a query use
|
||||
:func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so::
|
||||
|
||||
post = Post.objects.no_dereference().first()
|
||||
assert(isinstance(post.author, ObjectId))
|
||||
assert(isinstance(post.author, DBRef))
|
||||
|
||||
You can also turn off all dereferencing for a fixed period by using the
|
||||
:class:`~mongoengine.context_managers.no_dereference` context manager::
|
||||
|
||||
with no_dereference(Post) as Post:
|
||||
post = Post.objects.first()
|
||||
assert(isinstance(post.author, ObjectId))
|
||||
assert(isinstance(post.author, DBRef))
|
||||
|
||||
# Outside the context manager dereferencing occurs.
|
||||
assert(isinstance(post.author, User))
|
||||
|
@@ -113,6 +113,10 @@ handlers within your subclass::
|
||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||
signals.post_save.connect(Author.post_save, sender=Author)
|
||||
|
||||
.. warning::
|
||||
|
||||
Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently.
|
||||
|
||||
Finally, you can also use this small decorator to quickly create a number of
|
||||
signals and attach them to your :class:`~mongoengine.Document` or
|
||||
:class:`~mongoengine.EmbeddedDocument` subclasses as class decorators::
|
||||
|
@@ -6,6 +6,11 @@ Development
|
||||
***********
|
||||
(Fill this out whenever you introduce breaking changes to MongoEngine)
|
||||
|
||||
URLField's constructor no longer takes `verify_exists`
|
||||
|
||||
0.15.0
|
||||
******
|
||||
|
||||
0.14.0
|
||||
******
|
||||
This release includes a few bug fixes and a significant code cleanup. The most
|
||||
|
@@ -23,7 +23,7 @@ __all__ = (list(document.__all__) + list(fields.__all__) +
|
||||
list(signals.__all__) + list(errors.__all__))
|
||||
|
||||
|
||||
VERSION = (0, 15, 0)
|
||||
VERSION = (0, 17, 0)
|
||||
|
||||
|
||||
def get_version():
|
||||
|
@@ -3,10 +3,10 @@ from mongoengine.errors import NotRegistered
|
||||
__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
|
||||
|
||||
|
||||
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'mul',
|
||||
'pop', 'push', 'push_all', 'pull',
|
||||
'pull_all', 'add_to_set', 'set_on_insert',
|
||||
'min', 'max', 'rename'])
|
||||
UPDATE_OPERATORS = {'set', 'unset', 'inc', 'dec', 'mul',
|
||||
'pop', 'push', 'push_all', 'pull',
|
||||
'pull_all', 'add_to_set', 'set_on_insert',
|
||||
'min', 'max', 'rename'}
|
||||
|
||||
|
||||
_document_registry = {}
|
||||
@@ -19,7 +19,7 @@ def get_document(name):
|
||||
# Possible old style name
|
||||
single_end = name.split('.')[-1]
|
||||
compound_end = '.%s' % single_end
|
||||
possible_match = [k for k in _document_registry.keys()
|
||||
possible_match = [k for k in _document_registry
|
||||
if k.endswith(compound_end) or k == single_end]
|
||||
if len(possible_match) == 1:
|
||||
doc = _document_registry.get(possible_match.pop(), None)
|
||||
|
@@ -1,13 +1,31 @@
|
||||
import itertools
|
||||
import weakref
|
||||
|
||||
from bson import DBRef
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||
|
||||
__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference')
|
||||
__all__ = ('BaseDict', 'StrictDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference')
|
||||
|
||||
|
||||
def mark_as_changed_wrapper(parent_method):
|
||||
"""Decorators that ensures _mark_as_changed method gets called"""
|
||||
def wrapper(self, *args, **kwargs):
|
||||
result = parent_method(self, *args, **kwargs) # Can't use super() in the decorator
|
||||
self._mark_as_changed()
|
||||
return result
|
||||
return wrapper
|
||||
|
||||
|
||||
def mark_key_as_changed_wrapper(parent_method):
|
||||
"""Decorators that ensures _mark_as_changed method gets called with the key argument"""
|
||||
def wrapper(self, key, *args, **kwargs):
|
||||
result = parent_method(self, key, *args, **kwargs) # Can't use super() in the decorator
|
||||
self._mark_as_changed(key)
|
||||
return result
|
||||
return wrapper
|
||||
|
||||
|
||||
class BaseDict(dict):
|
||||
@@ -18,46 +36,36 @@ class BaseDict(dict):
|
||||
_name = None
|
||||
|
||||
def __init__(self, dict_items, instance, name):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
BaseDocument = _import_class('BaseDocument')
|
||||
|
||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||
if isinstance(instance, BaseDocument):
|
||||
self._instance = weakref.proxy(instance)
|
||||
self._name = name
|
||||
super(BaseDict, self).__init__(dict_items)
|
||||
|
||||
def __getitem__(self, key, *args, **kwargs):
|
||||
def get(self, key, default=None):
|
||||
# get does not use __getitem__ by default so we must override it as well
|
||||
try:
|
||||
return self.__getitem__(key)
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def __getitem__(self, key):
|
||||
value = super(BaseDict, self).__getitem__(key)
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseDict) and isinstance(value, dict):
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseDict, self).__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseList) and isinstance(value, list):
|
||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseDict, self).__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
def __setitem__(self, key, value, *args, **kwargs):
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseDict, self).__setitem__(key, value)
|
||||
|
||||
def __delete__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).__delete__(*args, **kwargs)
|
||||
|
||||
def __delitem__(self, key, *args, **kwargs):
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseDict, self).__delitem__(key)
|
||||
|
||||
def __delattr__(self, key, *args, **kwargs):
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseDict, self).__delattr__(key)
|
||||
|
||||
def __getstate__(self):
|
||||
self.instance = None
|
||||
self._dereferenced = False
|
||||
@@ -67,25 +75,14 @@ class BaseDict(dict):
|
||||
self = state
|
||||
return self
|
||||
|
||||
def clear(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).clear()
|
||||
|
||||
def pop(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).pop(*args, **kwargs)
|
||||
|
||||
def popitem(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).popitem()
|
||||
|
||||
def setdefault(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).setdefault(*args, **kwargs)
|
||||
|
||||
def update(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).update(*args, **kwargs)
|
||||
__setitem__ = mark_key_as_changed_wrapper(dict.__setitem__)
|
||||
__delattr__ = mark_key_as_changed_wrapper(dict.__delattr__)
|
||||
__delitem__ = mark_key_as_changed_wrapper(dict.__delitem__)
|
||||
pop = mark_as_changed_wrapper(dict.pop)
|
||||
clear = mark_as_changed_wrapper(dict.clear)
|
||||
update = mark_as_changed_wrapper(dict.update)
|
||||
popitem = mark_as_changed_wrapper(dict.popitem)
|
||||
setdefault = mark_as_changed_wrapper(dict.setdefault)
|
||||
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, '_mark_as_changed'):
|
||||
@@ -103,52 +100,39 @@ class BaseList(list):
|
||||
_name = None
|
||||
|
||||
def __init__(self, list_items, instance, name):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
BaseDocument = _import_class('BaseDocument')
|
||||
|
||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||
if isinstance(instance, BaseDocument):
|
||||
self._instance = weakref.proxy(instance)
|
||||
self._name = name
|
||||
super(BaseList, self).__init__(list_items)
|
||||
|
||||
def __getitem__(self, key, *args, **kwargs):
|
||||
def __getitem__(self, key):
|
||||
value = super(BaseList, self).__getitem__(key)
|
||||
|
||||
if isinstance(key, slice):
|
||||
# When receiving a slice operator, we don't convert the structure and bind
|
||||
# to parent's instance. This is buggy for now but would require more work to be handled properly
|
||||
return value
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseDict) and isinstance(value, dict):
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
# Replace dict by BaseDict
|
||||
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseList, self).__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseList) and isinstance(value, list):
|
||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||
# Replace list by BaseList
|
||||
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseList, self).__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
def __iter__(self):
|
||||
for i in six.moves.range(self.__len__()):
|
||||
yield self[i]
|
||||
|
||||
def __setitem__(self, key, value, *args, **kwargs):
|
||||
if isinstance(key, slice):
|
||||
self._mark_as_changed()
|
||||
else:
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseList, self).__setitem__(key, value)
|
||||
|
||||
def __delitem__(self, key, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__delitem__(key)
|
||||
|
||||
def __setslice__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__setslice__(*args, **kwargs)
|
||||
|
||||
def __delslice__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__delslice__(*args, **kwargs)
|
||||
for v in super(BaseList, self).__iter__():
|
||||
yield v
|
||||
|
||||
def __getstate__(self):
|
||||
self.instance = None
|
||||
@@ -159,41 +143,40 @@ class BaseList(list):
|
||||
self = state
|
||||
return self
|
||||
|
||||
def __iadd__(self, other):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__iadd__(other)
|
||||
def __setitem__(self, key, value):
|
||||
changed_key = key
|
||||
if isinstance(key, slice):
|
||||
# In case of slice, we don't bother to identify the exact elements being updated
|
||||
# instead, we simply marks the whole list as changed
|
||||
changed_key = None
|
||||
|
||||
def __imul__(self, other):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__imul__(other)
|
||||
result = super(BaseList, self).__setitem__(key, value)
|
||||
self._mark_as_changed(changed_key)
|
||||
return result
|
||||
|
||||
def append(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).append(*args, **kwargs)
|
||||
append = mark_as_changed_wrapper(list.append)
|
||||
extend = mark_as_changed_wrapper(list.extend)
|
||||
insert = mark_as_changed_wrapper(list.insert)
|
||||
pop = mark_as_changed_wrapper(list.pop)
|
||||
remove = mark_as_changed_wrapper(list.remove)
|
||||
reverse = mark_as_changed_wrapper(list.reverse)
|
||||
sort = mark_as_changed_wrapper(list.sort)
|
||||
__delitem__ = mark_as_changed_wrapper(list.__delitem__)
|
||||
__iadd__ = mark_as_changed_wrapper(list.__iadd__)
|
||||
__imul__ = mark_as_changed_wrapper(list.__imul__)
|
||||
|
||||
def extend(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).extend(*args, **kwargs)
|
||||
if six.PY2:
|
||||
# Under py3 __setslice__, __delslice__ and __getslice__
|
||||
# are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter
|
||||
# so we mimic this under python 2
|
||||
def __setslice__(self, i, j, sequence):
|
||||
return self.__setitem__(slice(i, j), sequence)
|
||||
|
||||
def insert(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).insert(*args, **kwargs)
|
||||
def __delslice__(self, i, j):
|
||||
return self.__delitem__(slice(i, j))
|
||||
|
||||
def pop(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).pop(*args, **kwargs)
|
||||
|
||||
def remove(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).remove(*args, **kwargs)
|
||||
|
||||
def reverse(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).reverse()
|
||||
|
||||
def sort(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).sort(*args, **kwargs)
|
||||
def __getslice__(self, i, j):
|
||||
return self.__getitem__(slice(i, j))
|
||||
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, '_mark_as_changed'):
|
||||
@@ -207,6 +190,10 @@ class BaseList(list):
|
||||
|
||||
class EmbeddedDocumentList(BaseList):
|
||||
|
||||
def __init__(self, list_items, instance, name):
|
||||
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
|
||||
self._instance = instance
|
||||
|
||||
@classmethod
|
||||
def __match_all(cls, embedded_doc, kwargs):
|
||||
"""Return True if a given embedded doc matches all the filter
|
||||
@@ -225,15 +212,14 @@ class EmbeddedDocumentList(BaseList):
|
||||
return embedded_docs
|
||||
return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)]
|
||||
|
||||
def __init__(self, list_items, instance, name):
|
||||
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
|
||||
self._instance = instance
|
||||
|
||||
def filter(self, **kwargs):
|
||||
"""
|
||||
Filters the list by only including embedded documents with the
|
||||
given keyword arguments.
|
||||
|
||||
This method only supports simple comparison (e.g: .filter(name='John Doe'))
|
||||
and does not support operators like __gte, __lte, __icontains like queryset.filter does
|
||||
|
||||
:param kwargs: The keyword arguments corresponding to the fields to
|
||||
filter on. *Multiple arguments are treated as if they are ANDed
|
||||
together.*
|
||||
@@ -374,11 +360,11 @@ class EmbeddedDocumentList(BaseList):
|
||||
|
||||
class StrictDict(object):
|
||||
__slots__ = ()
|
||||
_special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
|
||||
_special_fields = {'get', 'pop', 'iteritems', 'items', 'keys', 'create'}
|
||||
_classes = {}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for k, v in kwargs.iteritems():
|
||||
for k, v in iteritems(kwargs):
|
||||
setattr(self, k, v)
|
||||
|
||||
def __getitem__(self, key):
|
||||
@@ -426,7 +412,7 @@ class StrictDict(object):
|
||||
return (key for key in self.__slots__ if hasattr(self, key))
|
||||
|
||||
def __len__(self):
|
||||
return len(list(self.iteritems()))
|
||||
return len(list(iteritems(self)))
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.items() == other.items()
|
||||
|
@@ -1,13 +1,11 @@
|
||||
import copy
|
||||
import numbers
|
||||
from collections import Hashable
|
||||
from functools import partial
|
||||
|
||||
from bson import ObjectId, json_util
|
||||
from bson.dbref import DBRef
|
||||
from bson.son import SON
|
||||
from bson import DBRef, ObjectId, SON, json_util
|
||||
import pymongo
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import signals
|
||||
from mongoengine.base.common import get_document
|
||||
@@ -19,6 +17,7 @@ from mongoengine.base.fields import ComplexBaseField
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError,
|
||||
LookUpError, OperationError, ValidationError)
|
||||
from mongoengine.python_support import Hashable
|
||||
|
||||
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
|
||||
|
||||
@@ -85,7 +84,7 @@ class BaseDocument(object):
|
||||
self._dynamic_fields = SON()
|
||||
|
||||
# Assign default values to instance
|
||||
for key, field in self._fields.iteritems():
|
||||
for key, field in iteritems(self._fields):
|
||||
if self._db_field_map.get(key, key) in __only_fields:
|
||||
continue
|
||||
value = getattr(self, key, None)
|
||||
@@ -97,16 +96,14 @@ class BaseDocument(object):
|
||||
# Set passed values after initialisation
|
||||
if self._dynamic:
|
||||
dynamic_data = {}
|
||||
for key, value in values.iteritems():
|
||||
for key, value in iteritems(values):
|
||||
if key in self._fields or key == '_id':
|
||||
setattr(self, key, value)
|
||||
elif self._dynamic:
|
||||
else:
|
||||
dynamic_data[key] = value
|
||||
else:
|
||||
FileField = _import_class('FileField')
|
||||
for key, value in values.iteritems():
|
||||
if key == '__auto_convert':
|
||||
continue
|
||||
for key, value in iteritems(values):
|
||||
key = self._reverse_db_field_map.get(key, key)
|
||||
if key in self._fields or key in ('id', 'pk', '_cls'):
|
||||
if __auto_convert and value is not None:
|
||||
@@ -122,7 +119,7 @@ class BaseDocument(object):
|
||||
|
||||
if self._dynamic:
|
||||
self._dynamic_lock = False
|
||||
for key, value in dynamic_data.iteritems():
|
||||
for key, value in iteritems(dynamic_data):
|
||||
setattr(self, key, value)
|
||||
|
||||
# Flag initialised
|
||||
@@ -304,7 +301,7 @@ class BaseDocument(object):
|
||||
data['_cls'] = self._class_name
|
||||
|
||||
# only root fields ['test1.a', 'test2'] => ['test1', 'test2']
|
||||
root_fields = set([f.split('.')[0] for f in fields])
|
||||
root_fields = {f.split('.')[0] for f in fields}
|
||||
|
||||
for field_name in self:
|
||||
if root_fields and field_name not in root_fields:
|
||||
@@ -406,7 +403,15 @@ class BaseDocument(object):
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_data, created=False):
|
||||
"""Converts json data to an unsaved document instance"""
|
||||
"""Converts json data to a Document instance
|
||||
|
||||
:param json_data: The json data to load into the Document
|
||||
:param created: If True, the document will be considered as a brand new document
|
||||
If False and an id is provided, it will consider that the data being
|
||||
loaded corresponds to what's already in the database (This has an impact of subsequent call to .save())
|
||||
If False and no id is provided, it will consider the data as a new document
|
||||
(default ``False``)
|
||||
"""
|
||||
return cls._from_son(json_util.loads(json_data), created=created)
|
||||
|
||||
def __expand_dynamic_values(self, name, value):
|
||||
@@ -497,76 +502,74 @@ class BaseDocument(object):
|
||||
|
||||
self._changed_fields = []
|
||||
|
||||
def _nestable_types_changed_fields(self, changed_fields, key, data, inspected):
|
||||
def _nestable_types_changed_fields(self, changed_fields, base_key, data):
|
||||
"""Inspect nested data for changed fields
|
||||
|
||||
:param changed_fields: Previously collected changed fields
|
||||
:param base_key: The base key that must be used to prepend changes to this data
|
||||
:param data: data to inspect for changes
|
||||
"""
|
||||
# Loop list / dict fields as they contain documents
|
||||
# Determine the iterator to use
|
||||
if not hasattr(data, 'items'):
|
||||
iterator = enumerate(data)
|
||||
else:
|
||||
iterator = data.iteritems()
|
||||
iterator = iteritems(data)
|
||||
|
||||
for index, value in iterator:
|
||||
list_key = '%s%s.' % (key, index)
|
||||
for index_or_key, value in iterator:
|
||||
item_key = '%s%s.' % (base_key, index_or_key)
|
||||
# don't check anything lower if this key is already marked
|
||||
# as changed.
|
||||
if list_key[:-1] in changed_fields:
|
||||
if item_key[:-1] in changed_fields:
|
||||
continue
|
||||
|
||||
if hasattr(value, '_get_changed_fields'):
|
||||
changed = value._get_changed_fields(inspected)
|
||||
changed_fields += ['%s%s' % (list_key, k)
|
||||
for k in changed if k]
|
||||
changed = value._get_changed_fields()
|
||||
changed_fields += ['%s%s' % (item_key, k) for k in changed if k]
|
||||
elif isinstance(value, (list, tuple, dict)):
|
||||
self._nestable_types_changed_fields(
|
||||
changed_fields, list_key, value, inspected)
|
||||
changed_fields, item_key, value)
|
||||
|
||||
def _get_changed_fields(self, inspected=None):
|
||||
def _get_changed_fields(self):
|
||||
"""Return a list of all fields that have explicitly been changed.
|
||||
"""
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
DynamicEmbeddedDocument = _import_class('DynamicEmbeddedDocument')
|
||||
ReferenceField = _import_class('ReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
SortedListField = _import_class('SortedListField')
|
||||
|
||||
changed_fields = []
|
||||
changed_fields += getattr(self, '_changed_fields', [])
|
||||
|
||||
inspected = inspected or set()
|
||||
if hasattr(self, 'id') and isinstance(self.id, Hashable):
|
||||
if self.id in inspected:
|
||||
return changed_fields
|
||||
inspected.add(self.id)
|
||||
|
||||
for field_name in self._fields_ordered:
|
||||
db_field_name = self._db_field_map.get(field_name, field_name)
|
||||
key = '%s.' % db_field_name
|
||||
data = self._data.get(field_name, None)
|
||||
field = self._fields.get(field_name)
|
||||
|
||||
if hasattr(data, 'id'):
|
||||
if data.id in inspected:
|
||||
continue
|
||||
if isinstance(field, ReferenceField):
|
||||
if db_field_name in changed_fields:
|
||||
# Whole field already marked as changed, no need to go further
|
||||
continue
|
||||
elif (
|
||||
isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) and
|
||||
db_field_name not in changed_fields
|
||||
):
|
||||
|
||||
if isinstance(field, ReferenceField): # Don't follow referenced documents
|
||||
continue
|
||||
|
||||
if isinstance(data, EmbeddedDocument):
|
||||
# Find all embedded fields that have been changed
|
||||
changed = data._get_changed_fields(inspected)
|
||||
changed = data._get_changed_fields()
|
||||
changed_fields += ['%s%s' % (key, k) for k in changed if k]
|
||||
elif (isinstance(data, (list, tuple, dict)) and
|
||||
db_field_name not in changed_fields):
|
||||
elif isinstance(data, (list, tuple, dict)):
|
||||
if (hasattr(field, 'field') and
|
||||
isinstance(field.field, ReferenceField)):
|
||||
isinstance(field.field, (ReferenceField, GenericReferenceField))):
|
||||
continue
|
||||
elif isinstance(field, SortedListField) and field._ordering:
|
||||
# if ordering is affected whole list is changed
|
||||
if any(map(lambda d: field._ordering in d._changed_fields, data)):
|
||||
if any(field._ordering in d._changed_fields for d in data):
|
||||
changed_fields.append(db_field_name)
|
||||
continue
|
||||
|
||||
self._nestable_types_changed_fields(
|
||||
changed_fields, key, data, inspected)
|
||||
changed_fields, key, data)
|
||||
return changed_fields
|
||||
|
||||
def _delta(self):
|
||||
@@ -578,7 +581,6 @@ class BaseDocument(object):
|
||||
|
||||
set_fields = self._get_changed_fields()
|
||||
unset_data = {}
|
||||
parts = []
|
||||
if hasattr(self, '_changed_fields'):
|
||||
set_data = {}
|
||||
# Fetch each set item from its path
|
||||
@@ -588,15 +590,13 @@ class BaseDocument(object):
|
||||
new_path = []
|
||||
for p in parts:
|
||||
if isinstance(d, (ObjectId, DBRef)):
|
||||
# Don't dig in the references
|
||||
break
|
||||
elif isinstance(d, list) and p.lstrip('-').isdigit():
|
||||
if p[0] == '-':
|
||||
p = str(len(d) + int(p))
|
||||
try:
|
||||
d = d[int(p)]
|
||||
except IndexError:
|
||||
d = None
|
||||
elif isinstance(d, list) and p.isdigit():
|
||||
# An item of a list (identified by its index) is updated
|
||||
d = d[int(p)]
|
||||
elif hasattr(d, 'get'):
|
||||
# dict-like (dict, embedded document)
|
||||
d = d.get(p)
|
||||
new_path.append(p)
|
||||
path = '.'.join(new_path)
|
||||
@@ -608,26 +608,26 @@ class BaseDocument(object):
|
||||
|
||||
# Determine if any changed items were actually unset.
|
||||
for path, value in set_data.items():
|
||||
if value or isinstance(value, (numbers.Number, bool)):
|
||||
if value or isinstance(value, (numbers.Number, bool)): # Account for 0 and True that are truthy
|
||||
continue
|
||||
|
||||
# If we've set a value that ain't the default value don't unset it.
|
||||
default = None
|
||||
parts = path.split('.')
|
||||
|
||||
if (self._dynamic and len(parts) and parts[0] in
|
||||
self._dynamic_fields):
|
||||
del set_data[path]
|
||||
unset_data[path] = 1
|
||||
continue
|
||||
elif path in self._fields:
|
||||
|
||||
# If we've set a value that ain't the default value don't unset it.
|
||||
default = None
|
||||
if path in self._fields:
|
||||
default = self._fields[path].default
|
||||
else: # Perform a full lookup for lists / embedded lookups
|
||||
d = self
|
||||
parts = path.split('.')
|
||||
db_field_name = parts.pop()
|
||||
for p in parts:
|
||||
if isinstance(d, list) and p.lstrip('-').isdigit():
|
||||
if p[0] == '-':
|
||||
p = str(len(d) + int(p))
|
||||
if isinstance(d, list) and p.isdigit():
|
||||
d = d[int(p)]
|
||||
elif (hasattr(d, '__getattribute__') and
|
||||
not isinstance(d, dict)):
|
||||
@@ -645,10 +645,9 @@ class BaseDocument(object):
|
||||
default = None
|
||||
|
||||
if default is not None:
|
||||
if callable(default):
|
||||
default = default()
|
||||
default = default() if callable(default) else default
|
||||
|
||||
if default != value:
|
||||
if value != default:
|
||||
continue
|
||||
|
||||
del set_data[path]
|
||||
@@ -680,7 +679,7 @@ class BaseDocument(object):
|
||||
# Convert SON to a data dict, making sure each key is a string and
|
||||
# corresponds to the right db field.
|
||||
data = {}
|
||||
for key, value in son.iteritems():
|
||||
for key, value in iteritems(son):
|
||||
key = str(key)
|
||||
key = cls._db_field_map.get(key, key)
|
||||
data[key] = value
|
||||
@@ -694,9 +693,9 @@ class BaseDocument(object):
|
||||
|
||||
fields = cls._fields
|
||||
if not _auto_dereference:
|
||||
fields = copy.copy(fields)
|
||||
fields = copy.deepcopy(fields)
|
||||
|
||||
for field_name, field in fields.iteritems():
|
||||
for field_name, field in iteritems(fields):
|
||||
field._auto_dereference = _auto_dereference
|
||||
if field.db_field in data:
|
||||
value = data[field.db_field]
|
||||
@@ -717,7 +716,7 @@ class BaseDocument(object):
|
||||
|
||||
# In STRICT documents, remove any keys that aren't in cls._fields
|
||||
if cls.STRICT:
|
||||
data = {k: v for k, v in data.iteritems() if k in cls._fields}
|
||||
data = {k: v for k, v in iteritems(data) if k in cls._fields}
|
||||
|
||||
obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data)
|
||||
obj._changed_fields = changed_fields
|
||||
@@ -1085,6 +1084,6 @@ class BaseDocument(object):
|
||||
sep = getattr(field, 'display_sep', ' ')
|
||||
values = value if field.__class__.__name__ in ('ListField', 'SortedListField') else [value]
|
||||
return sep.join([
|
||||
dict(field.choices).get(val, val)
|
||||
six.text_type(dict(field.choices).get(val, val))
|
||||
for val in values or []])
|
||||
return value
|
||||
|
@@ -5,6 +5,7 @@ import weakref
|
||||
from bson import DBRef, ObjectId, SON
|
||||
import pymongo
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.base.common import UPDATE_OPERATORS
|
||||
from mongoengine.base.datastructures import (BaseDict, BaseList,
|
||||
@@ -55,7 +56,7 @@ class BaseField(object):
|
||||
field. Generally this is deprecated in favour of the
|
||||
`FIELD.validate` method
|
||||
:param choices: (optional) The valid choices
|
||||
:param null: (optional) Is the field value can be null. If no and there is a default value
|
||||
:param null: (optional) If the field value can be null. If no and there is a default value
|
||||
then the default value is set
|
||||
:param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False`
|
||||
means that uniqueness won't be enforced for `None` values
|
||||
@@ -130,7 +131,6 @@ class BaseField(object):
|
||||
def __set__(self, instance, value):
|
||||
"""Descriptor for assigning a value to a field in a document.
|
||||
"""
|
||||
|
||||
# If setting to None and there is a default
|
||||
# Then set the value to the default value
|
||||
if value is None:
|
||||
@@ -267,13 +267,15 @@ class ComplexBaseField(BaseField):
|
||||
ReferenceField = _import_class('ReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
|
||||
dereference = (self._auto_dereference and
|
||||
|
||||
auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
|
||||
dereference = (auto_dereference and
|
||||
(self.field is None or isinstance(self.field,
|
||||
(GenericReferenceField, ReferenceField))))
|
||||
|
||||
_dereference = _import_class('DeReference')()
|
||||
|
||||
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
if instance._initialised and dereference and instance._data.get(self.name):
|
||||
instance._data[self.name] = _dereference(
|
||||
instance._data.get(self.name), max_depth=1, instance=instance,
|
||||
@@ -294,7 +296,7 @@ class ComplexBaseField(BaseField):
|
||||
value = BaseDict(value, instance, self.name)
|
||||
instance._data[self.name] = value
|
||||
|
||||
if (self._auto_dereference and instance._initialised and
|
||||
if (auto_dereference and instance._initialised and
|
||||
isinstance(value, (BaseList, BaseDict)) and
|
||||
not value._dereferenced):
|
||||
value = _dereference(
|
||||
@@ -313,11 +315,16 @@ class ComplexBaseField(BaseField):
|
||||
if hasattr(value, 'to_python'):
|
||||
return value.to_python()
|
||||
|
||||
BaseDocument = _import_class('BaseDocument')
|
||||
if isinstance(value, BaseDocument):
|
||||
# Something is wrong, return the value as it is
|
||||
return value
|
||||
|
||||
is_list = False
|
||||
if not hasattr(value, 'items'):
|
||||
try:
|
||||
is_list = True
|
||||
value = {k: v for k, v in enumerate(value)}
|
||||
value = {idx: v for idx, v in enumerate(value)}
|
||||
except TypeError: # Not iterable return the value
|
||||
return value
|
||||
|
||||
@@ -376,11 +383,11 @@ class ComplexBaseField(BaseField):
|
||||
if self.field:
|
||||
value_dict = {
|
||||
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
|
||||
for key, item in value.iteritems()
|
||||
for key, item in iteritems(value)
|
||||
}
|
||||
else:
|
||||
value_dict = {}
|
||||
for k, v in value.iteritems():
|
||||
for k, v in iteritems(value):
|
||||
if isinstance(v, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
if v.pk is None:
|
||||
@@ -417,7 +424,7 @@ class ComplexBaseField(BaseField):
|
||||
errors = {}
|
||||
if self.field:
|
||||
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
|
||||
sequence = value.iteritems()
|
||||
sequence = iteritems(value)
|
||||
else:
|
||||
sequence = enumerate(value)
|
||||
for k, v in sequence:
|
||||
@@ -502,7 +509,7 @@ class GeoJsonBaseField(BaseField):
|
||||
def validate(self, value):
|
||||
"""Validate the GeoJson object based on its type."""
|
||||
if isinstance(value, dict):
|
||||
if set(value.keys()) == set(['type', 'coordinates']):
|
||||
if set(value.keys()) == {'type', 'coordinates'}:
|
||||
if value['type'] != self._type:
|
||||
self.error('%s type must be "%s"' %
|
||||
(self._name, self._type))
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import warnings
|
||||
|
||||
import six
|
||||
from six import iteritems, itervalues
|
||||
|
||||
from mongoengine.base.common import _document_registry
|
||||
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||
@@ -18,14 +19,14 @@ class DocumentMetaclass(type):
|
||||
"""Metaclass for all documents."""
|
||||
|
||||
# TODO lower complexity of this method
|
||||
def __new__(cls, name, bases, attrs):
|
||||
flattened_bases = cls._get_bases(bases)
|
||||
super_new = super(DocumentMetaclass, cls).__new__
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
flattened_bases = mcs._get_bases(bases)
|
||||
super_new = super(DocumentMetaclass, mcs).__new__
|
||||
|
||||
# If a base class just call super
|
||||
metaclass = attrs.get('my_metaclass')
|
||||
if metaclass and issubclass(metaclass, DocumentMetaclass):
|
||||
return super_new(cls, name, bases, attrs)
|
||||
return super_new(mcs, name, bases, attrs)
|
||||
|
||||
attrs['_is_document'] = attrs.get('_is_document', False)
|
||||
attrs['_cached_reference_fields'] = []
|
||||
@@ -62,7 +63,7 @@ class DocumentMetaclass(type):
|
||||
# Standard object mixin - merge in any Fields
|
||||
if not hasattr(base, '_meta'):
|
||||
base_fields = {}
|
||||
for attr_name, attr_value in base.__dict__.iteritems():
|
||||
for attr_name, attr_value in iteritems(base.__dict__):
|
||||
if not isinstance(attr_value, BaseField):
|
||||
continue
|
||||
attr_value.name = attr_name
|
||||
@@ -74,7 +75,7 @@ class DocumentMetaclass(type):
|
||||
|
||||
# Discover any document fields
|
||||
field_names = {}
|
||||
for attr_name, attr_value in attrs.iteritems():
|
||||
for attr_name, attr_value in iteritems(attrs):
|
||||
if not isinstance(attr_value, BaseField):
|
||||
continue
|
||||
attr_value.name = attr_name
|
||||
@@ -103,7 +104,7 @@ class DocumentMetaclass(type):
|
||||
|
||||
attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
|
||||
(v.creation_counter, v.name)
|
||||
for v in doc_fields.itervalues()))
|
||||
for v in itervalues(doc_fields)))
|
||||
|
||||
#
|
||||
# Set document hierarchy
|
||||
@@ -121,7 +122,8 @@ class DocumentMetaclass(type):
|
||||
# inheritance of classes where inheritance is set to False
|
||||
allow_inheritance = base._meta.get('allow_inheritance')
|
||||
if not allow_inheritance and not base._meta.get('abstract'):
|
||||
raise ValueError('Document %s may not be subclassed' %
|
||||
raise ValueError('Document %s may not be subclassed. '
|
||||
'To enable inheritance, use the "allow_inheritance" meta attribute.' %
|
||||
base.__name__)
|
||||
|
||||
# Get superclasses from last base superclass
|
||||
@@ -138,7 +140,7 @@ class DocumentMetaclass(type):
|
||||
attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types
|
||||
|
||||
# Create the new_class
|
||||
new_class = super_new(cls, name, bases, attrs)
|
||||
new_class = super_new(mcs, name, bases, attrs)
|
||||
|
||||
# Set _subclasses
|
||||
for base in document_bases:
|
||||
@@ -147,7 +149,7 @@ class DocumentMetaclass(type):
|
||||
base._types = base._subclasses # TODO depreciate _types
|
||||
|
||||
(Document, EmbeddedDocument, DictField,
|
||||
CachedReferenceField) = cls._import_classes()
|
||||
CachedReferenceField) = mcs._import_classes()
|
||||
|
||||
if issubclass(new_class, Document):
|
||||
new_class._collection = None
|
||||
@@ -172,7 +174,7 @@ class DocumentMetaclass(type):
|
||||
f.__dict__.update({'im_self': getattr(f, '__self__')})
|
||||
|
||||
# Handle delete rules
|
||||
for field in new_class._fields.itervalues():
|
||||
for field in itervalues(new_class._fields):
|
||||
f = field
|
||||
if f.owner_document is None:
|
||||
f.owner_document = new_class
|
||||
@@ -219,29 +221,26 @@ class DocumentMetaclass(type):
|
||||
|
||||
return new_class
|
||||
|
||||
def add_to_class(self, name, value):
|
||||
setattr(self, name, value)
|
||||
|
||||
@classmethod
|
||||
def _get_bases(cls, bases):
|
||||
def _get_bases(mcs, bases):
|
||||
if isinstance(bases, BasesTuple):
|
||||
return bases
|
||||
seen = []
|
||||
bases = cls.__get_bases(bases)
|
||||
bases = mcs.__get_bases(bases)
|
||||
unique_bases = (b for b in bases if not (b in seen or seen.append(b)))
|
||||
return BasesTuple(unique_bases)
|
||||
|
||||
@classmethod
|
||||
def __get_bases(cls, bases):
|
||||
def __get_bases(mcs, bases):
|
||||
for base in bases:
|
||||
if base is object:
|
||||
continue
|
||||
yield base
|
||||
for child_base in cls.__get_bases(base.__bases__):
|
||||
for child_base in mcs.__get_bases(base.__bases__):
|
||||
yield child_base
|
||||
|
||||
@classmethod
|
||||
def _import_classes(cls):
|
||||
def _import_classes(mcs):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
DictField = _import_class('DictField')
|
||||
@@ -254,9 +253,9 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
collection in the database.
|
||||
"""
|
||||
|
||||
def __new__(cls, name, bases, attrs):
|
||||
flattened_bases = cls._get_bases(bases)
|
||||
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
flattened_bases = mcs._get_bases(bases)
|
||||
super_new = super(TopLevelDocumentMetaclass, mcs).__new__
|
||||
|
||||
# Set default _meta data if base class, otherwise get user defined meta
|
||||
if attrs.get('my_metaclass') == TopLevelDocumentMetaclass:
|
||||
@@ -319,7 +318,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
not parent_doc_cls._meta.get('abstract', False)):
|
||||
msg = 'Abstract document cannot have non-abstract base'
|
||||
raise ValueError(msg)
|
||||
return super_new(cls, name, bases, attrs)
|
||||
return super_new(mcs, name, bases, attrs)
|
||||
|
||||
# Merge base class metas.
|
||||
# Uses a special MetaDict that handles various merging rules
|
||||
@@ -360,7 +359,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
attrs['_meta'] = meta
|
||||
|
||||
# Call super and get the new class
|
||||
new_class = super_new(cls, name, bases, attrs)
|
||||
new_class = super_new(mcs, name, bases, attrs)
|
||||
|
||||
meta = new_class._meta
|
||||
|
||||
@@ -377,7 +376,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
new_class.objects = QuerySetManager()
|
||||
|
||||
# Validate the fields and set primary key if needed
|
||||
for field_name, field in new_class._fields.iteritems():
|
||||
for field_name, field in iteritems(new_class._fields):
|
||||
if field.primary_key:
|
||||
# Ensure only one primary key is set
|
||||
current_pk = new_class._meta.get('id_field')
|
||||
@@ -394,7 +393,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
'_auto_id_field', False)
|
||||
if not new_class._meta.get('id_field'):
|
||||
# After 0.10, find not existing names, instead of overwriting
|
||||
id_name, id_db_name = cls.get_auto_id_names(new_class)
|
||||
id_name, id_db_name = mcs.get_auto_id_names(new_class)
|
||||
new_class._auto_id_field = True
|
||||
new_class._meta['id_field'] = id_name
|
||||
new_class._fields[id_name] = ObjectIdField(db_field=id_db_name)
|
||||
@@ -419,7 +418,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
return new_class
|
||||
|
||||
@classmethod
|
||||
def get_auto_id_names(cls, new_class):
|
||||
def get_auto_id_names(mcs, new_class):
|
||||
id_name, id_db_name = ('id', '_id')
|
||||
if id_name not in new_class._fields and \
|
||||
id_db_name not in (v.db_field for v in new_class._fields.values()):
|
||||
@@ -440,7 +439,7 @@ class MetaDict(dict):
|
||||
_merge_options = ('indexes',)
|
||||
|
||||
def merge(self, new_options):
|
||||
for k, v in new_options.iteritems():
|
||||
for k, v in iteritems(new_options):
|
||||
if k in self._merge_options:
|
||||
self[k] = self.get(k, []) + v
|
||||
else:
|
||||
|
22
mongoengine/base/utils.py
Normal file
22
mongoengine/base/utils.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import re
|
||||
|
||||
|
||||
class LazyRegexCompiler(object):
|
||||
"""Descriptor to allow lazy compilation of regex"""
|
||||
|
||||
def __init__(self, pattern, flags=0):
|
||||
self._pattern = pattern
|
||||
self._flags = flags
|
||||
self._compiled_regex = None
|
||||
|
||||
@property
|
||||
def compiled_regex(self):
|
||||
if self._compiled_regex is None:
|
||||
self._compiled_regex = re.compile(self._pattern, self._flags)
|
||||
return self._compiled_regex
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.compiled_regex
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise AttributeError("Can not set attribute LazyRegexCompiler")
|
@@ -1,10 +1,10 @@
|
||||
from pymongo import MongoClient, ReadPreference, uri_parser
|
||||
import six
|
||||
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
from mongoengine.pymongo_support import IS_PYMONGO_3
|
||||
|
||||
__all__ = ['MongoEngineConnectionError', 'connect', 'register_connection',
|
||||
'DEFAULT_CONNECTION_NAME']
|
||||
'DEFAULT_CONNECTION_NAME', 'get_db']
|
||||
|
||||
|
||||
DEFAULT_CONNECTION_NAME = 'default'
|
||||
@@ -104,6 +104,18 @@ def register_connection(alias, db=None, name=None, host=None, port=None,
|
||||
conn_settings['authentication_source'] = uri_options['authsource']
|
||||
if 'authmechanism' in uri_options:
|
||||
conn_settings['authentication_mechanism'] = uri_options['authmechanism']
|
||||
if IS_PYMONGO_3 and 'readpreference' in uri_options:
|
||||
read_preferences = (
|
||||
ReadPreference.NEAREST,
|
||||
ReadPreference.PRIMARY,
|
||||
ReadPreference.PRIMARY_PREFERRED,
|
||||
ReadPreference.SECONDARY,
|
||||
ReadPreference.SECONDARY_PREFERRED)
|
||||
read_pf_mode = uri_options['readpreference'].lower()
|
||||
for preference in read_preferences:
|
||||
if preference.name.lower() == read_pf_mode:
|
||||
conn_settings['read_preference'] = preference
|
||||
break
|
||||
else:
|
||||
resolved_hosts.append(entity)
|
||||
conn_settings['host'] = resolved_hosts
|
||||
|
@@ -1,8 +1,11 @@
|
||||
from contextlib import contextmanager
|
||||
|
||||
from pymongo.write_concern import WriteConcern
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
|
||||
from mongoengine.pymongo_support import count_documents
|
||||
|
||||
__all__ = ('switch_db', 'switch_collection', 'no_dereference',
|
||||
'no_sub_classes', 'query_counter', 'set_write_concern')
|
||||
@@ -112,7 +115,7 @@ class no_dereference(object):
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
ComplexBaseField = _import_class('ComplexBaseField')
|
||||
|
||||
self.deref_fields = [k for k, v in self.cls._fields.iteritems()
|
||||
self.deref_fields = [k for k, v in iteritems(self.cls._fields)
|
||||
if isinstance(v, (ReferenceField,
|
||||
GenericReferenceField,
|
||||
ComplexBaseField))]
|
||||
@@ -145,66 +148,85 @@ class no_sub_classes(object):
|
||||
:param cls: the class to turn querying sub classes on
|
||||
"""
|
||||
self.cls = cls
|
||||
self.cls_initial_subclasses = None
|
||||
|
||||
def __enter__(self):
|
||||
"""Change the objects default and _auto_dereference values."""
|
||||
self.cls._all_subclasses = self.cls._subclasses
|
||||
self.cls._subclasses = (self.cls,)
|
||||
self.cls_initial_subclasses = self.cls._subclasses
|
||||
self.cls._subclasses = (self.cls._class_name,)
|
||||
return self.cls
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
"""Reset the default and _auto_dereference values."""
|
||||
self.cls._subclasses = self.cls._all_subclasses
|
||||
delattr(self.cls, '_all_subclasses')
|
||||
return self.cls
|
||||
self.cls._subclasses = self.cls_initial_subclasses
|
||||
|
||||
|
||||
class query_counter(object):
|
||||
"""Query_counter context manager to get the number of queries."""
|
||||
"""Query_counter context manager to get the number of queries.
|
||||
This works by updating the `profiling_level` of the database so that all queries get logged,
|
||||
resetting the db.system.profile collection at the beginnig of the context and counting the new entries.
|
||||
|
||||
This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes
|
||||
can interfere with it
|
||||
|
||||
Be aware that:
|
||||
- Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of
|
||||
documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches)
|
||||
- Some queries are ignored by default by the counter (killcursors, db.system.indexes)
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Construct the query_counter."""
|
||||
self.counter = 0
|
||||
"""Construct the query_counter
|
||||
"""
|
||||
self.db = get_db()
|
||||
self.initial_profiling_level = None
|
||||
self._ctx_query_counter = 0 # number of queries issued by the context
|
||||
|
||||
def __enter__(self):
|
||||
"""On every with block we need to drop the profile collection."""
|
||||
self._ignored_query = {
|
||||
'ns':
|
||||
{'$ne': '%s.system.indexes' % self.db.name},
|
||||
'op': # MONGODB < 3.2
|
||||
{'$ne': 'killcursors'},
|
||||
'command.killCursors': # MONGODB >= 3.2
|
||||
{'$exists': False}
|
||||
}
|
||||
|
||||
def _turn_on_profiling(self):
|
||||
self.initial_profiling_level = self.db.profiling_level()
|
||||
self.db.set_profiling_level(0)
|
||||
self.db.system.profile.drop()
|
||||
self.db.set_profiling_level(2)
|
||||
|
||||
def _resets_profiling(self):
|
||||
self.db.set_profiling_level(self.initial_profiling_level)
|
||||
|
||||
def __enter__(self):
|
||||
self._turn_on_profiling()
|
||||
return self
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
"""Reset the profiling level."""
|
||||
self.db.set_profiling_level(0)
|
||||
self._resets_profiling()
|
||||
|
||||
def __eq__(self, value):
|
||||
"""== Compare querycounter."""
|
||||
counter = self._get_count()
|
||||
return value == counter
|
||||
|
||||
def __ne__(self, value):
|
||||
"""!= Compare querycounter."""
|
||||
return not self.__eq__(value)
|
||||
|
||||
def __lt__(self, value):
|
||||
"""< Compare querycounter."""
|
||||
return self._get_count() < value
|
||||
|
||||
def __le__(self, value):
|
||||
"""<= Compare querycounter."""
|
||||
return self._get_count() <= value
|
||||
|
||||
def __gt__(self, value):
|
||||
"""> Compare querycounter."""
|
||||
return self._get_count() > value
|
||||
|
||||
def __ge__(self, value):
|
||||
""">= Compare querycounter."""
|
||||
return self._get_count() >= value
|
||||
|
||||
def __int__(self):
|
||||
"""int representation."""
|
||||
return self._get_count()
|
||||
|
||||
def __repr__(self):
|
||||
@@ -212,10 +234,12 @@ class query_counter(object):
|
||||
return u"%s" % self._get_count()
|
||||
|
||||
def _get_count(self):
|
||||
"""Get the number of queries."""
|
||||
ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}}
|
||||
count = self.db.system.profile.find(ignore_query).count() - self.counter
|
||||
self.counter += 1
|
||||
"""Get the number of queries by counting the current number of entries in db.system.profile
|
||||
and substracting the queries issued by this context. In fact everytime this is called, 1 query is
|
||||
issued so we need to balance that
|
||||
"""
|
||||
count = count_documents(self.db.system.profile, self._ignored_query) - self._ctx_query_counter
|
||||
self._ctx_query_counter += 1 # Account for the query we just issued to gather the information
|
||||
return count
|
||||
|
||||
|
||||
|
@@ -1,5 +1,6 @@
|
||||
from bson import DBRef, SON
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass, get_document)
|
||||
@@ -52,26 +53,40 @@ class DeReference(object):
|
||||
[i.__class__ == doc_type for i in items.values()]):
|
||||
return items
|
||||
elif not field.dbref:
|
||||
# We must turn the ObjectIds into DBRefs
|
||||
|
||||
# Recursively dig into the sub items of a list/dict
|
||||
# to turn the ObjectIds into DBRefs
|
||||
def _get_items_from_list(items):
|
||||
new_items = []
|
||||
for v in items:
|
||||
value = v
|
||||
if isinstance(v, dict):
|
||||
value = _get_items_from_dict(v)
|
||||
elif isinstance(v, list):
|
||||
value = _get_items_from_list(v)
|
||||
elif not isinstance(v, (DBRef, Document)):
|
||||
value = field.to_python(v)
|
||||
new_items.append(value)
|
||||
return new_items
|
||||
|
||||
def _get_items_from_dict(items):
|
||||
new_items = {}
|
||||
for k, v in iteritems(items):
|
||||
value = v
|
||||
if isinstance(v, list):
|
||||
value = _get_items_from_list(v)
|
||||
elif isinstance(v, dict):
|
||||
value = _get_items_from_dict(v)
|
||||
elif not isinstance(v, (DBRef, Document)):
|
||||
value = field.to_python(v)
|
||||
new_items[k] = value
|
||||
return new_items
|
||||
|
||||
if not hasattr(items, 'items'):
|
||||
|
||||
def _get_items(items):
|
||||
new_items = []
|
||||
for v in items:
|
||||
if isinstance(v, list):
|
||||
new_items.append(_get_items(v))
|
||||
elif not isinstance(v, (DBRef, Document)):
|
||||
new_items.append(field.to_python(v))
|
||||
else:
|
||||
new_items.append(v)
|
||||
return new_items
|
||||
|
||||
items = _get_items(items)
|
||||
items = _get_items_from_list(items)
|
||||
else:
|
||||
items = {
|
||||
k: (v if isinstance(v, (DBRef, Document))
|
||||
else field.to_python(v))
|
||||
for k, v in items.iteritems()
|
||||
}
|
||||
items = _get_items_from_dict(items)
|
||||
|
||||
self.reference_map = self._find_references(items)
|
||||
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||
@@ -98,7 +113,7 @@ class DeReference(object):
|
||||
depth += 1
|
||||
for item in iterator:
|
||||
if isinstance(item, (Document, EmbeddedDocument)):
|
||||
for field_name, field in item._fields.iteritems():
|
||||
for field_name, field in iteritems(item._fields):
|
||||
v = item._data.get(field_name, None)
|
||||
if isinstance(v, LazyReference):
|
||||
# LazyReference inherits DBRef but should not be dereferenced here !
|
||||
@@ -110,7 +125,7 @@ class DeReference(object):
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
||||
references = self._find_references(v, depth)
|
||||
for key, refs in references.iteritems():
|
||||
for key, refs in iteritems(references):
|
||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||
key = field_cls
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
@@ -123,7 +138,7 @@ class DeReference(object):
|
||||
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
|
||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||
references = self._find_references(item, depth - 1)
|
||||
for key, refs in references.iteritems():
|
||||
for key, refs in iteritems(references):
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
|
||||
return reference_map
|
||||
@@ -132,16 +147,21 @@ class DeReference(object):
|
||||
"""Fetch all references and convert to their document objects
|
||||
"""
|
||||
object_map = {}
|
||||
for collection, dbrefs in self.reference_map.iteritems():
|
||||
if hasattr(collection, 'objects'): # We have a document class for the refs
|
||||
for collection, dbrefs in iteritems(self.reference_map):
|
||||
|
||||
# we use getattr instead of hasattr because hasattr swallows any exception under python2
|
||||
# so it could hide nasty things without raising exceptions (cfr bug #1688))
|
||||
ref_document_cls_exists = (getattr(collection, 'objects', None) is not None)
|
||||
|
||||
if ref_document_cls_exists:
|
||||
col_name = collection._get_collection_name()
|
||||
refs = [dbref for dbref in dbrefs
|
||||
if (col_name, dbref) not in object_map]
|
||||
references = collection.objects.in_bulk(refs)
|
||||
for key, doc in references.iteritems():
|
||||
for key, doc in iteritems(references):
|
||||
object_map[(col_name, key)] = doc
|
||||
else: # Generic reference: use the refs data to convert to document
|
||||
if isinstance(doc_type, (ListField, DictField, MapField,)):
|
||||
if isinstance(doc_type, (ListField, DictField, MapField)):
|
||||
continue
|
||||
|
||||
refs = [dbref for dbref in dbrefs
|
||||
@@ -210,7 +230,7 @@ class DeReference(object):
|
||||
data = []
|
||||
else:
|
||||
is_list = False
|
||||
iterator = items.iteritems()
|
||||
iterator = iteritems(items)
|
||||
data = {}
|
||||
|
||||
depth += 1
|
||||
|
@@ -5,6 +5,7 @@ from bson.dbref import DBRef
|
||||
import pymongo
|
||||
from pymongo.read_preferences import ReadPreference
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import signals
|
||||
from mongoengine.base import (BaseDict, BaseDocument, BaseList,
|
||||
@@ -12,10 +13,12 @@ from mongoengine.base import (BaseDict, BaseDocument, BaseList,
|
||||
TopLevelDocumentMetaclass, get_document)
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
from mongoengine.context_managers import switch_collection, switch_db
|
||||
from mongoengine.context_managers import (set_write_concern,
|
||||
switch_collection,
|
||||
switch_db)
|
||||
from mongoengine.errors import (InvalidDocumentError, InvalidQueryError,
|
||||
SaveConditionError)
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
from mongoengine.pymongo_support import IS_PYMONGO_3, list_collection_names
|
||||
from mongoengine.queryset import (NotUniqueError, OperationError,
|
||||
QuerySet, transform)
|
||||
|
||||
@@ -39,7 +42,7 @@ class InvalidCollectionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class EmbeddedDocument(BaseDocument):
|
||||
class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)):
|
||||
"""A :class:`~mongoengine.Document` that isn't stored in its own
|
||||
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
||||
fields on :class:`~mongoengine.Document`\ s through the
|
||||
@@ -58,7 +61,6 @@ class EmbeddedDocument(BaseDocument):
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = DocumentMetaclass
|
||||
__metaclass__ = DocumentMetaclass
|
||||
|
||||
# A generic embedded document doesn't have any immutable properties
|
||||
# that describe it uniquely, hence it shouldn't be hashable. You can
|
||||
@@ -89,13 +91,19 @@ class EmbeddedDocument(BaseDocument):
|
||||
return data
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
warnings.warn("EmbeddedDocument.save is deprecated and will be removed in a next version of mongoengine."
|
||||
"Use the parent document's .save() or ._instance.save()",
|
||||
DeprecationWarning, stacklevel=2)
|
||||
self._instance.save(*args, **kwargs)
|
||||
|
||||
def reload(self, *args, **kwargs):
|
||||
warnings.warn("EmbeddedDocument.reload is deprecated and will be removed in a next version of mongoengine."
|
||||
"Use the parent document's .reload() or ._instance.reload()",
|
||||
DeprecationWarning, stacklevel=2)
|
||||
self._instance.reload(*args, **kwargs)
|
||||
|
||||
|
||||
class Document(BaseDocument):
|
||||
class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
"""The base class used for defining the structure and properties of
|
||||
collections of documents stored in MongoDB. Inherit from this class, and
|
||||
add fields as class attributes to define a document's structure.
|
||||
@@ -150,7 +158,6 @@ class Document(BaseDocument):
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = TopLevelDocumentMetaclass
|
||||
__metaclass__ = TopLevelDocumentMetaclass
|
||||
|
||||
__slots__ = ('__objects',)
|
||||
|
||||
@@ -172,8 +179,8 @@ class Document(BaseDocument):
|
||||
"""
|
||||
if self.pk is None:
|
||||
return super(BaseDocument, self).__hash__()
|
||||
else:
|
||||
return hash(self.pk)
|
||||
|
||||
return hash(self.pk)
|
||||
|
||||
@classmethod
|
||||
def _get_db(cls):
|
||||
@@ -221,7 +228,7 @@ class Document(BaseDocument):
|
||||
|
||||
# If the collection already exists and has different options
|
||||
# (i.e. isn't capped or has different max/size), raise an error.
|
||||
if collection_name in db.collection_names():
|
||||
if collection_name in list_collection_names(db, include_system_collections=True):
|
||||
collection = db[collection_name]
|
||||
options = collection.options()
|
||||
if (
|
||||
@@ -370,6 +377,8 @@ class Document(BaseDocument):
|
||||
|
||||
signals.pre_save_post_validation.send(self.__class__, document=self,
|
||||
created=created, **signal_kwargs)
|
||||
# it might be refreshed by the pre_save_post_validation hook, e.g., for etag generation
|
||||
doc = self.to_mongo()
|
||||
|
||||
if self._meta.get('auto_create_index', True):
|
||||
self.ensure_indexes()
|
||||
@@ -429,11 +438,18 @@ class Document(BaseDocument):
|
||||
Helper method, should only be used inside save().
|
||||
"""
|
||||
collection = self._get_collection()
|
||||
with set_write_concern(collection, write_concern) as wc_collection:
|
||||
if force_insert:
|
||||
return wc_collection.insert_one(doc).inserted_id
|
||||
# insert_one will provoke UniqueError alongside save does not
|
||||
# therefore, it need to catch and call replace_one.
|
||||
if '_id' in doc:
|
||||
raw_object = wc_collection.find_one_and_replace(
|
||||
{'_id': doc['_id']}, doc)
|
||||
if raw_object:
|
||||
return doc['_id']
|
||||
|
||||
if force_insert:
|
||||
return collection.insert(doc, **write_concern)
|
||||
|
||||
object_id = collection.save(doc, **write_concern)
|
||||
object_id = wc_collection.insert_one(doc).inserted_id
|
||||
|
||||
# In PyMongo 3.0, the save() call calls internally the _update() call
|
||||
# but they forget to return the _id value passed back, therefore getting it back here
|
||||
@@ -585,9 +601,8 @@ class Document(BaseDocument):
|
||||
:param signal_kwargs: (optional) kwargs dictionary to be passed to
|
||||
the signal calls.
|
||||
:param write_concern: Extra keyword arguments are passed down which
|
||||
will be used as options for the resultant
|
||||
``getLastError`` command. For example,
|
||||
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
||||
will be used as options for the resultant ``getLastError`` command.
|
||||
For example, ``save(..., w: 2, fsync: True)`` will
|
||||
wait until at least two servers have recorded the write and
|
||||
will force an fsync on the primary server.
|
||||
|
||||
@@ -599,7 +614,7 @@ class Document(BaseDocument):
|
||||
|
||||
# Delete FileFields separately
|
||||
FileField = _import_class('FileField')
|
||||
for name, field in self._fields.iteritems():
|
||||
for name, field in iteritems(self._fields):
|
||||
if isinstance(field, FileField):
|
||||
getattr(self, name).delete()
|
||||
|
||||
@@ -997,10 +1012,10 @@ class Document(BaseDocument):
|
||||
return {'missing': missing, 'extra': extra}
|
||||
|
||||
|
||||
class DynamicDocument(Document):
|
||||
class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)):
|
||||
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
||||
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
||||
way as an ordinary document but has expando style properties. Any data
|
||||
way as an ordinary document but has expanded style properties. Any data
|
||||
passed or set against the :class:`~mongoengine.DynamicDocument` that is
|
||||
not a field is automatically converted into a
|
||||
:class:`~mongoengine.fields.DynamicField` and data can be attributed to that
|
||||
@@ -1008,13 +1023,12 @@ class DynamicDocument(Document):
|
||||
|
||||
.. note::
|
||||
|
||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||
There is one caveat on Dynamic Documents: undeclared fields cannot start with `_`
|
||||
"""
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = TopLevelDocumentMetaclass
|
||||
__metaclass__ = TopLevelDocumentMetaclass
|
||||
|
||||
_dynamic = True
|
||||
|
||||
@@ -1030,7 +1044,7 @@ class DynamicDocument(Document):
|
||||
super(DynamicDocument, self).__delattr__(*args, **kwargs)
|
||||
|
||||
|
||||
class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||
class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)):
|
||||
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
||||
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
||||
information about dynamic documents.
|
||||
@@ -1039,7 +1053,6 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = DocumentMetaclass
|
||||
__metaclass__ = DocumentMetaclass
|
||||
|
||||
_dynamic = True
|
||||
|
||||
|
@@ -1,6 +1,7 @@
|
||||
from collections import defaultdict
|
||||
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
|
||||
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
|
||||
@@ -71,6 +72,7 @@ class ValidationError(AssertionError):
|
||||
_message = None
|
||||
|
||||
def __init__(self, message='', **kwargs):
|
||||
super(ValidationError, self).__init__(message)
|
||||
self.errors = kwargs.get('errors', {})
|
||||
self.field_name = kwargs.get('field_name')
|
||||
self.message = message
|
||||
@@ -112,7 +114,7 @@ class ValidationError(AssertionError):
|
||||
return errors_dict
|
||||
|
||||
if isinstance(source, dict):
|
||||
for field_name, error in source.iteritems():
|
||||
for field_name, error in iteritems(source):
|
||||
errors_dict[field_name] = build_dict(error)
|
||||
elif isinstance(source, ValidationError) and source.errors:
|
||||
return build_dict(source.errors)
|
||||
@@ -134,12 +136,12 @@ class ValidationError(AssertionError):
|
||||
value = ' '.join([generate_key(k) for k in value])
|
||||
elif isinstance(value, dict):
|
||||
value = ' '.join(
|
||||
[generate_key(v, k) for k, v in value.iteritems()])
|
||||
[generate_key(v, k) for k, v in iteritems(value)])
|
||||
|
||||
results = '%s.%s' % (prefix, value) if prefix else value
|
||||
return results
|
||||
|
||||
error_dict = defaultdict(list)
|
||||
for k, v in self.to_dict().iteritems():
|
||||
for k, v in iteritems(self.to_dict()):
|
||||
error_dict[generate_key(v)].append(k)
|
||||
return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()])
|
||||
return ' '.join(['%s: %s' % (k, v) for k, v in iteritems(error_dict)])
|
||||
|
@@ -5,13 +5,13 @@ import re
|
||||
import socket
|
||||
import time
|
||||
import uuid
|
||||
import warnings
|
||||
from operator import itemgetter
|
||||
|
||||
from bson import Binary, DBRef, ObjectId, SON
|
||||
import gridfs
|
||||
import pymongo
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
@@ -25,15 +25,18 @@ try:
|
||||
except ImportError:
|
||||
Int64 = long
|
||||
|
||||
|
||||
from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField,
|
||||
GeoJsonBaseField, LazyReference, ObjectIdField,
|
||||
get_document)
|
||||
from mongoengine.base.utils import LazyRegexCompiler
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
from mongoengine.document import Document, EmbeddedDocument
|
||||
from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError
|
||||
from mongoengine.python_support import StringIO
|
||||
from mongoengine.queryset import DO_NOTHING, QuerySet
|
||||
from mongoengine.queryset import DO_NOTHING
|
||||
from mongoengine.queryset.base import BaseQuerySet
|
||||
|
||||
try:
|
||||
from PIL import Image, ImageOps
|
||||
@@ -41,9 +44,15 @@ except ImportError:
|
||||
Image = None
|
||||
ImageOps = None
|
||||
|
||||
if six.PY3:
|
||||
# Useless as long as 2to3 gets executed
|
||||
# as it turns `long` into `int` blindly
|
||||
long = int
|
||||
|
||||
|
||||
__all__ = (
|
||||
'StringField', 'URLField', 'EmailField', 'IntField', 'LongField',
|
||||
'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField',
|
||||
'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField', 'DateField',
|
||||
'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField',
|
||||
'GenericEmbeddedDocumentField', 'DynamicField', 'ListField',
|
||||
'SortedListField', 'EmbeddedDocumentListField', 'DictField',
|
||||
@@ -123,9 +132,9 @@ class URLField(StringField):
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
|
||||
_URL_REGEX = re.compile(
|
||||
_URL_REGEX = LazyRegexCompiler(
|
||||
r'^(?:[a-z0-9\.\-]*)://' # scheme is validated separately
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}(?<!-)\.?)|' # domain...
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-_]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}(?<!-)\.?)|' # domain...
|
||||
r'localhost|' # localhost...
|
||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
|
||||
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
|
||||
@@ -133,8 +142,7 @@ class URLField(StringField):
|
||||
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
||||
_URL_SCHEMES = ['http', 'https', 'ftp', 'ftps']
|
||||
|
||||
def __init__(self, verify_exists=False, url_regex=None, schemes=None, **kwargs):
|
||||
self.verify_exists = verify_exists
|
||||
def __init__(self, url_regex=None, schemes=None, **kwargs):
|
||||
self.url_regex = url_regex or self._URL_REGEX
|
||||
self.schemes = schemes or self._URL_SCHEMES
|
||||
super(URLField, self).__init__(**kwargs)
|
||||
@@ -157,7 +165,7 @@ class EmailField(StringField):
|
||||
|
||||
.. versionadded:: 0.4
|
||||
"""
|
||||
USER_REGEX = re.compile(
|
||||
USER_REGEX = LazyRegexCompiler(
|
||||
# `dot-atom` defined in RFC 5322 Section 3.2.3.
|
||||
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z"
|
||||
# `quoted-string` defined in RFC 5322 Section 3.2.4.
|
||||
@@ -165,7 +173,7 @@ class EmailField(StringField):
|
||||
re.IGNORECASE
|
||||
)
|
||||
|
||||
UTF8_USER_REGEX = re.compile(
|
||||
UTF8_USER_REGEX = LazyRegexCompiler(
|
||||
six.u(
|
||||
# RFC 6531 Section 3.3 extends `atext` (used by dot-atom) to
|
||||
# include `UTF8-non-ascii`.
|
||||
@@ -175,7 +183,7 @@ class EmailField(StringField):
|
||||
), re.IGNORECASE | re.UNICODE
|
||||
)
|
||||
|
||||
DOMAIN_REGEX = re.compile(
|
||||
DOMAIN_REGEX = LazyRegexCompiler(
|
||||
r'((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+)(?:[A-Z0-9-]{2,63}(?<!-))\Z',
|
||||
re.IGNORECASE
|
||||
)
|
||||
@@ -267,14 +275,14 @@ class IntField(BaseField):
|
||||
def to_python(self, value):
|
||||
try:
|
||||
value = int(value)
|
||||
except ValueError:
|
||||
except (TypeError, ValueError):
|
||||
pass
|
||||
return value
|
||||
|
||||
def validate(self, value):
|
||||
try:
|
||||
value = int(value)
|
||||
except Exception:
|
||||
except (TypeError, ValueError):
|
||||
self.error('%s could not be converted to int' % value)
|
||||
|
||||
if self.min_value is not None and value < self.min_value:
|
||||
@@ -300,7 +308,7 @@ class LongField(BaseField):
|
||||
def to_python(self, value):
|
||||
try:
|
||||
value = long(value)
|
||||
except ValueError:
|
||||
except (TypeError, ValueError):
|
||||
pass
|
||||
return value
|
||||
|
||||
@@ -310,7 +318,7 @@ class LongField(BaseField):
|
||||
def validate(self, value):
|
||||
try:
|
||||
value = long(value)
|
||||
except Exception:
|
||||
except (TypeError, ValueError):
|
||||
self.error('%s could not be converted to long' % value)
|
||||
|
||||
if self.min_value is not None and value < self.min_value:
|
||||
@@ -364,7 +372,8 @@ class FloatField(BaseField):
|
||||
|
||||
|
||||
class DecimalField(BaseField):
|
||||
"""Fixed-point decimal number field.
|
||||
"""Fixed-point decimal number field. Stores the value as a float by default unless `force_string` is used.
|
||||
If using floats, beware of Decimal to float conversion (potential precision loss)
|
||||
|
||||
.. versionchanged:: 0.8
|
||||
.. versionadded:: 0.3
|
||||
@@ -375,7 +384,9 @@ class DecimalField(BaseField):
|
||||
"""
|
||||
:param min_value: Validation rule for the minimum acceptable value.
|
||||
:param max_value: Validation rule for the maximum acceptable value.
|
||||
:param force_string: Store as a string.
|
||||
:param force_string: Store the value as a string (instead of a float).
|
||||
Be aware that this affects query sorting and operation like lte, gte (as string comparison is applied)
|
||||
and some query operator won't work (e.g: inc, dec)
|
||||
:param precision: Number of decimal places to store.
|
||||
:param rounding: The rounding rule from the python decimal library:
|
||||
|
||||
@@ -406,7 +417,7 @@ class DecimalField(BaseField):
|
||||
# Convert to string for python 2.6 before casting to Decimal
|
||||
try:
|
||||
value = decimal.Decimal('%s' % value)
|
||||
except decimal.InvalidOperation:
|
||||
except (TypeError, ValueError, decimal.InvalidOperation):
|
||||
return value
|
||||
return value.quantize(decimal.Decimal('.%s' % ('0' * self.precision)), rounding=self.rounding)
|
||||
|
||||
@@ -423,7 +434,7 @@ class DecimalField(BaseField):
|
||||
value = six.text_type(value)
|
||||
try:
|
||||
value = decimal.Decimal(value)
|
||||
except Exception as exc:
|
||||
except (TypeError, ValueError, decimal.InvalidOperation) as exc:
|
||||
self.error('Could not convert value to decimal: %s' % exc)
|
||||
|
||||
if self.min_value is not None and value < self.min_value:
|
||||
@@ -462,6 +473,8 @@ class DateTimeField(BaseField):
|
||||
installed you can utilise it to convert varying types of date formats into valid
|
||||
python datetime objects.
|
||||
|
||||
Note: To default the field to the current datetime, use: DateTimeField(default=datetime.utcnow)
|
||||
|
||||
Note: Microseconds are rounded to the nearest millisecond.
|
||||
Pre UTC microsecond support is effectively broken.
|
||||
Use :class:`~mongoengine.fields.ComplexDateTimeField` if you
|
||||
@@ -525,6 +538,22 @@ class DateTimeField(BaseField):
|
||||
return super(DateTimeField, self).prepare_query_value(op, self.to_mongo(value))
|
||||
|
||||
|
||||
class DateField(DateTimeField):
|
||||
def to_mongo(self, value):
|
||||
value = super(DateField, self).to_mongo(value)
|
||||
# drop hours, minutes, seconds
|
||||
if isinstance(value, datetime.datetime):
|
||||
value = datetime.datetime(value.year, value.month, value.day)
|
||||
return value
|
||||
|
||||
def to_python(self, value):
|
||||
value = super(DateField, self).to_python(value)
|
||||
# convert datetime to date
|
||||
if isinstance(value, datetime.datetime):
|
||||
value = datetime.date(value.year, value.month, value.day)
|
||||
return value
|
||||
|
||||
|
||||
class ComplexDateTimeField(StringField):
|
||||
"""
|
||||
ComplexDateTimeField handles microseconds exactly instead of rounding
|
||||
@@ -541,11 +570,15 @@ class ComplexDateTimeField(StringField):
|
||||
The `,` as the separator can be easily modified by passing the `separator`
|
||||
keyword when initializing the field.
|
||||
|
||||
Note: To default the field to the current datetime, use: DateTimeField(default=datetime.utcnow)
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
|
||||
def __init__(self, separator=',', **kwargs):
|
||||
self.names = ['year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond']
|
||||
"""
|
||||
:param separator: Allows to customize the separator used for storage (default ``,``)
|
||||
"""
|
||||
self.separator = separator
|
||||
self.format = separator.join(['%Y', '%m', '%d', '%H', '%M', '%S', '%f'])
|
||||
super(ComplexDateTimeField, self).__init__(**kwargs)
|
||||
@@ -572,20 +605,24 @@ class ComplexDateTimeField(StringField):
|
||||
>>> ComplexDateTimeField()._convert_from_string(a)
|
||||
datetime.datetime(2011, 6, 8, 20, 26, 24, 92284)
|
||||
"""
|
||||
values = map(int, data.split(self.separator))
|
||||
values = [int(d) for d in data.split(self.separator)]
|
||||
return datetime.datetime(*values)
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
data = super(ComplexDateTimeField, self).__get__(instance, owner)
|
||||
if data is None:
|
||||
return None if self.null else datetime.datetime.now()
|
||||
if isinstance(data, datetime.datetime):
|
||||
|
||||
if isinstance(data, datetime.datetime) or data is None:
|
||||
return data
|
||||
return self._convert_from_string(data)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
value = self._convert_from_datetime(value) if value else value
|
||||
return super(ComplexDateTimeField, self).__set__(instance, value)
|
||||
super(ComplexDateTimeField, self).__set__(instance, value)
|
||||
value = instance._data[self.name]
|
||||
if value is not None:
|
||||
instance._data[self.name] = self._convert_from_datetime(value)
|
||||
|
||||
def validate(self, value):
|
||||
value = self.to_python(value)
|
||||
@@ -629,9 +666,17 @@ class EmbeddedDocumentField(BaseField):
|
||||
def document_type(self):
|
||||
if isinstance(self.document_type_obj, six.string_types):
|
||||
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
|
||||
self.document_type_obj = self.owner_document
|
||||
resolved_document_type = self.owner_document
|
||||
else:
|
||||
self.document_type_obj = get_document(self.document_type_obj)
|
||||
resolved_document_type = get_document(self.document_type_obj)
|
||||
|
||||
if not issubclass(resolved_document_type, EmbeddedDocument):
|
||||
# Due to the late resolution of the document_type
|
||||
# There is a chance that it won't be an EmbeddedDocument (#1661)
|
||||
self.error('Invalid embedded document class provided to an '
|
||||
'EmbeddedDocumentField')
|
||||
self.document_type_obj = resolved_document_type
|
||||
|
||||
return self.document_type_obj
|
||||
|
||||
def to_python(self, value):
|
||||
@@ -750,12 +795,12 @@ class DynamicField(BaseField):
|
||||
value = {k: v for k, v in enumerate(value)}
|
||||
|
||||
data = {}
|
||||
for k, v in value.iteritems():
|
||||
for k, v in iteritems(value):
|
||||
data[k] = self.to_mongo(v, use_db_field, fields)
|
||||
|
||||
value = data
|
||||
if is_list: # Convert back to a list
|
||||
value = [v for k, v in sorted(data.iteritems(), key=itemgetter(0))]
|
||||
value = [v for k, v in sorted(iteritems(data), key=itemgetter(0))]
|
||||
return value
|
||||
|
||||
def to_python(self, value):
|
||||
@@ -808,8 +853,7 @@ class ListField(ComplexBaseField):
|
||||
|
||||
def validate(self, value):
|
||||
"""Make sure that a list of valid fields is being used."""
|
||||
if (not isinstance(value, (list, tuple, QuerySet)) or
|
||||
isinstance(value, six.string_types)):
|
||||
if not isinstance(value, (list, tuple, BaseQuerySet)):
|
||||
self.error('Only lists and tuples may be used in a list field')
|
||||
super(ListField, self).validate(value)
|
||||
|
||||
@@ -901,7 +945,7 @@ def key_has_dot_or_dollar(d):
|
||||
dictionary contains a dot or a dollar sign.
|
||||
"""
|
||||
for k, v in d.items():
|
||||
if ('.' in k or '$' in k) or (isinstance(v, dict) and key_has_dot_or_dollar(v)):
|
||||
if ('.' in k or k.startswith('$')) or (isinstance(v, dict) and key_has_dot_or_dollar(v)):
|
||||
return True
|
||||
|
||||
|
||||
@@ -916,14 +960,9 @@ class DictField(ComplexBaseField):
|
||||
.. versionchanged:: 0.5 - Can now handle complex / varying types of data
|
||||
"""
|
||||
|
||||
def __init__(self, basecls=None, field=None, *args, **kwargs):
|
||||
def __init__(self, field=None, *args, **kwargs):
|
||||
self.field = field
|
||||
self._auto_dereference = False
|
||||
self.basecls = basecls or BaseField
|
||||
|
||||
# XXX ValidationError raised outside of the "validate" method.
|
||||
if not issubclass(self.basecls, BaseField):
|
||||
self.error('DictField only accepts dict values')
|
||||
|
||||
kwargs.setdefault('default', lambda: {})
|
||||
super(DictField, self).__init__(*args, **kwargs)
|
||||
@@ -939,11 +978,11 @@ class DictField(ComplexBaseField):
|
||||
self.error(msg)
|
||||
if key_has_dot_or_dollar(value):
|
||||
self.error('Invalid dictionary key name - keys may not contain "."'
|
||||
' or "$" characters')
|
||||
' or startswith "$" characters')
|
||||
super(DictField, self).validate(value)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return DictField(basecls=self.basecls, db_field=member_name)
|
||||
return DictField(db_field=member_name)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
match_operators = ['contains', 'icontains', 'startswith',
|
||||
@@ -953,7 +992,7 @@ class DictField(ComplexBaseField):
|
||||
if op in match_operators and isinstance(value, six.string_types):
|
||||
return StringField().prepare_query_value(op, value)
|
||||
|
||||
if hasattr(self.field, 'field'):
|
||||
if hasattr(self.field, 'field'): # Used for instance when using DictField(ListField(IntField()))
|
||||
if op in ('set', 'unset') and isinstance(value, dict):
|
||||
return {
|
||||
k: self.field.prepare_query_value(op, v)
|
||||
@@ -1011,11 +1050,13 @@ class ReferenceField(BaseField):
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(Document):
|
||||
content = StringField()
|
||||
foo = ReferenceField('Foo')
|
||||
class Org(Document):
|
||||
owner = ReferenceField('User')
|
||||
|
||||
Foo.register_delete_rule(Bar, 'foo', NULLIFY)
|
||||
class User(Document):
|
||||
org = ReferenceField('Org', reverse_delete_rule=CASCADE)
|
||||
|
||||
User.register_delete_rule(Org, 'owner', DENY)
|
||||
|
||||
.. versionchanged:: 0.5 added `reverse_delete_rule`
|
||||
"""
|
||||
@@ -1063,9 +1104,9 @@ class ReferenceField(BaseField):
|
||||
|
||||
# Get value from document instance if available
|
||||
value = instance._data.get(self.name)
|
||||
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
# Dereference DBRefs
|
||||
if self._auto_dereference and isinstance(value, DBRef):
|
||||
if auto_dereference and isinstance(value, DBRef):
|
||||
if hasattr(value, 'cls'):
|
||||
# Dereference using the class type specified in the reference
|
||||
cls = get_document(value.cls)
|
||||
@@ -1136,16 +1177,6 @@ class ReferenceField(BaseField):
|
||||
self.error('You can only reference documents once they have been '
|
||||
'saved to the database')
|
||||
|
||||
if (
|
||||
self.document_type._meta.get('abstract') and
|
||||
not isinstance(value, self.document_type)
|
||||
):
|
||||
self.error(
|
||||
'%s is not an instance of abstract reference type %s' % (
|
||||
self.document_type._class_name
|
||||
)
|
||||
)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return self.document_type._fields.get(member_name)
|
||||
|
||||
@@ -1226,9 +1257,10 @@ class CachedReferenceField(BaseField):
|
||||
|
||||
# Get value from document instance if available
|
||||
value = instance._data.get(self.name)
|
||||
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
|
||||
# Dereference DBRefs
|
||||
if self._auto_dereference and isinstance(value, DBRef):
|
||||
if auto_dereference and isinstance(value, DBRef):
|
||||
dereferenced = self.document_type._get_db().dereference(value)
|
||||
if dereferenced is None:
|
||||
raise DoesNotExist('Trying to dereference unknown document %s' % value)
|
||||
@@ -1361,8 +1393,8 @@ class GenericReferenceField(BaseField):
|
||||
|
||||
value = instance._data.get(self.name)
|
||||
|
||||
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
if self._auto_dereference and isinstance(value, (dict, SON)):
|
||||
auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
if auto_dereference and isinstance(value, (dict, SON)):
|
||||
dereferenced = self.dereference(value)
|
||||
if dereferenced is None:
|
||||
raise DoesNotExist('Trying to dereference unknown document %s' % value)
|
||||
@@ -1444,14 +1476,20 @@ class BinaryField(BaseField):
|
||||
return Binary(value)
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, (six.binary_type, six.text_type, Binary)):
|
||||
if not isinstance(value, (six.binary_type, Binary)):
|
||||
self.error('BinaryField only accepts instances of '
|
||||
'(%s, %s, Binary)' % (
|
||||
six.binary_type.__name__, six.text_type.__name__))
|
||||
six.binary_type.__name__, Binary.__name__))
|
||||
|
||||
if self.max_bytes is not None and len(value) > self.max_bytes:
|
||||
self.error('Binary value is too long')
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return value
|
||||
return super(BinaryField, self).prepare_query_value(
|
||||
op, self.to_mongo(value))
|
||||
|
||||
|
||||
class GridFSError(Exception):
|
||||
pass
|
||||
@@ -1492,9 +1530,11 @@ class GridFSProxy(object):
|
||||
def __get__(self, instance, value):
|
||||
return self
|
||||
|
||||
def __nonzero__(self):
|
||||
def __bool__(self):
|
||||
return bool(self.grid_id)
|
||||
|
||||
__nonzero__ = __bool__ # For Py2 support
|
||||
|
||||
def __getstate__(self):
|
||||
self_dict = self.__dict__
|
||||
self_dict['_fs'] = None
|
||||
@@ -1512,9 +1552,9 @@ class GridFSProxy(object):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
|
||||
|
||||
def __str__(self):
|
||||
name = getattr(
|
||||
self.get(), 'filename', self.grid_id) if self.get() else '(no file)'
|
||||
return '<%s: %s>' % (self.__class__.__name__, name)
|
||||
gridout = self.get()
|
||||
filename = getattr(gridout, 'filename') if gridout else '<no file>'
|
||||
return '<%s: %s (%s)>' % (self.__class__.__name__, filename, self.grid_id)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, GridFSProxy):
|
||||
@@ -1834,12 +1874,9 @@ class ImageField(FileField):
|
||||
"""
|
||||
A Image File storage field.
|
||||
|
||||
@size (width, height, force):
|
||||
max size to store images, if larger will be automatically resized
|
||||
ex: size=(800, 600, True)
|
||||
|
||||
@thumbnail (width, height, force):
|
||||
size to generate a thumbnail
|
||||
:param size: max size to store images, provided as (width, height, force)
|
||||
if larger, it will be automatically resized (ex: size=(800, 600, True))
|
||||
:param thumbnail_size: size to generate a thumbnail, provided as (width, height, force)
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
@@ -1910,8 +1947,7 @@ class SequenceField(BaseField):
|
||||
self.collection_name = collection_name or self.COLLECTION_NAME
|
||||
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
|
||||
self.sequence_name = sequence_name
|
||||
self.value_decorator = (callable(value_decorator) and
|
||||
value_decorator or self.VALUE_DECORATOR)
|
||||
self.value_decorator = value_decorator if callable(value_decorator) else self.VALUE_DECORATOR
|
||||
super(SequenceField, self).__init__(*args, **kwargs)
|
||||
|
||||
def generate(self):
|
||||
@@ -2020,7 +2056,7 @@ class UUIDField(BaseField):
|
||||
if not isinstance(value, six.string_types):
|
||||
value = six.text_type(value)
|
||||
return uuid.UUID(value)
|
||||
except Exception:
|
||||
except (ValueError, TypeError, AttributeError):
|
||||
return original_value
|
||||
return value
|
||||
|
||||
@@ -2042,7 +2078,7 @@ class UUIDField(BaseField):
|
||||
value = str(value)
|
||||
try:
|
||||
uuid.UUID(value)
|
||||
except Exception as exc:
|
||||
except (ValueError, TypeError, AttributeError) as exc:
|
||||
self.error('Could not convert to UUID: %s' % exc)
|
||||
|
||||
|
||||
@@ -2222,9 +2258,9 @@ class LazyReferenceField(BaseField):
|
||||
:param reverse_delete_rule: Determines what to do when the referring
|
||||
object is deleted
|
||||
:param passthrough: When trying to access unknown fields, the
|
||||
:class:`~mongoengine.base.datastructure.LazyReference` instance will
|
||||
automatically call `fetch()` and try to retrive the field on the fetched
|
||||
document. Note this only work getting field (not setting or deleting).
|
||||
:class:`~mongoengine.base.datastructure.LazyReference` instance will
|
||||
automatically call `fetch()` and try to retrive the field on the fetched
|
||||
document. Note this only work getting field (not setting or deleting).
|
||||
"""
|
||||
# XXX ValidationError raised outside of the "validate" method.
|
||||
if (
|
||||
|
21
mongoengine/mongodb_support.py
Normal file
21
mongoengine/mongodb_support.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""
|
||||
Helper functions, constants, and types to aid with MongoDB version support
|
||||
"""
|
||||
from mongoengine.connection import get_connection
|
||||
|
||||
|
||||
# Constant that can be used to compare the version retrieved with
|
||||
# get_mongodb_version()
|
||||
MONGODB_34 = (3, 4)
|
||||
MONGODB_32 = (3, 2)
|
||||
MONGODB_3 = (3, 0)
|
||||
MONGODB_26 = (2, 6)
|
||||
|
||||
|
||||
def get_mongodb_version():
|
||||
"""Return the version of the connected mongoDB (first 2 digits)
|
||||
|
||||
:return: tuple(int, int)
|
||||
"""
|
||||
version_list = get_connection().server_info()['versionArray'][:2] # e.g: (3, 2)
|
||||
return tuple(version_list)
|
33
mongoengine/pymongo_support.py
Normal file
33
mongoengine/pymongo_support.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""
|
||||
Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support.
|
||||
"""
|
||||
import pymongo
|
||||
|
||||
_PYMONGO_37 = (3, 7)
|
||||
|
||||
PYMONGO_VERSION = tuple(pymongo.version_tuple[:2])
|
||||
|
||||
IS_PYMONGO_3 = PYMONGO_VERSION[0] >= 3
|
||||
IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37
|
||||
|
||||
|
||||
def count_documents(collection, filter):
|
||||
"""Pymongo>3.7 deprecates count in favour of count_documents"""
|
||||
if IS_PYMONGO_GTE_37:
|
||||
return collection.count_documents(filter)
|
||||
else:
|
||||
count = collection.find(filter).count()
|
||||
return count
|
||||
|
||||
|
||||
def list_collection_names(db, include_system_collections=False):
|
||||
"""Pymongo>3.7 deprecates collection_names in favour of list_collection_names"""
|
||||
if IS_PYMONGO_GTE_37:
|
||||
collections = db.list_collection_names()
|
||||
else:
|
||||
collections = db.collection_names()
|
||||
|
||||
if not include_system_collections:
|
||||
collections = [c for c in collections if not c.startswith('system.')]
|
||||
|
||||
return collections
|
@@ -1,17 +1,8 @@
|
||||
"""
|
||||
Helper functions, constants, and types to aid with Python v2.7 - v3.x and
|
||||
PyMongo v2.7 - v3.x support.
|
||||
Helper functions, constants, and types to aid with Python v2.7 - v3.x support
|
||||
"""
|
||||
import pymongo
|
||||
import six
|
||||
|
||||
|
||||
if pymongo.version_tuple[0] < 3:
|
||||
IS_PYMONGO_3 = False
|
||||
else:
|
||||
IS_PYMONGO_3 = True
|
||||
|
||||
|
||||
# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3.
|
||||
StringIO = six.BytesIO
|
||||
|
||||
@@ -23,3 +14,10 @@ if not six.PY3:
|
||||
pass
|
||||
else:
|
||||
StringIO = cStringIO.StringIO
|
||||
|
||||
|
||||
if six.PY3:
|
||||
from collections.abc import Hashable
|
||||
else:
|
||||
# raises DeprecationWarnings in Python >=3.7
|
||||
from collections import Hashable
|
||||
|
@@ -2,7 +2,6 @@ from __future__ import absolute_import
|
||||
|
||||
import copy
|
||||
import itertools
|
||||
import operator
|
||||
import pprint
|
||||
import re
|
||||
import warnings
|
||||
@@ -13,6 +12,7 @@ import pymongo
|
||||
import pymongo.errors
|
||||
from pymongo.common import validate_read_preference
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import signals
|
||||
from mongoengine.base import get_document
|
||||
@@ -21,7 +21,7 @@ from mongoengine.connection import get_db
|
||||
from mongoengine.context_managers import set_write_concern, switch_db
|
||||
from mongoengine.errors import (InvalidQueryError, LookUpError,
|
||||
NotUniqueError, OperationError)
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
from mongoengine.pymongo_support import IS_PYMONGO_3
|
||||
from mongoengine.queryset import transform
|
||||
from mongoengine.queryset.field_list import QueryFieldList
|
||||
from mongoengine.queryset.visitor import Q, QNode
|
||||
@@ -39,8 +39,6 @@ CASCADE = 2
|
||||
DENY = 3
|
||||
PULL = 4
|
||||
|
||||
RE_TYPE = type(re.compile(''))
|
||||
|
||||
|
||||
class BaseQuerySet(object):
|
||||
"""A set of results returned from a query. Wraps a MongoDB cursor,
|
||||
@@ -191,7 +189,7 @@ class BaseQuerySet(object):
|
||||
)
|
||||
|
||||
if queryset._as_pymongo:
|
||||
return queryset._get_as_pymongo(queryset._cursor[key])
|
||||
return queryset._cursor[key]
|
||||
|
||||
return queryset._document._from_son(
|
||||
queryset._cursor[key],
|
||||
@@ -209,18 +207,16 @@ class BaseQuerySet(object):
|
||||
queryset = self.order_by()
|
||||
return False if queryset.first() is None else True
|
||||
|
||||
def __nonzero__(self):
|
||||
"""Avoid to open all records in an if stmt in Py2."""
|
||||
return self._has_data()
|
||||
|
||||
def __bool__(self):
|
||||
"""Avoid to open all records in an if stmt in Py3."""
|
||||
return self._has_data()
|
||||
|
||||
__nonzero__ = __bool__ # For Py2 support
|
||||
|
||||
# Core functions
|
||||
|
||||
def all(self):
|
||||
"""Returns all documents."""
|
||||
"""Returns a copy of the current QuerySet."""
|
||||
return self.__call__()
|
||||
|
||||
def filter(self, *q_objs, **query):
|
||||
@@ -269,13 +265,13 @@ class BaseQuerySet(object):
|
||||
queryset = queryset.filter(*q_objs, **query)
|
||||
|
||||
try:
|
||||
result = queryset.next()
|
||||
result = six.next(queryset)
|
||||
except StopIteration:
|
||||
msg = ('%s matching query does not exist.'
|
||||
% queryset._document._class_name)
|
||||
raise queryset._document.DoesNotExist(msg)
|
||||
try:
|
||||
queryset.next()
|
||||
six.next(queryset)
|
||||
except StopIteration:
|
||||
return result
|
||||
|
||||
@@ -359,7 +355,7 @@ class BaseQuerySet(object):
|
||||
|
||||
try:
|
||||
inserted_result = insert_func(raw)
|
||||
ids = return_one and [inserted_result.inserted_id] or inserted_result.inserted_ids
|
||||
ids = [inserted_result.inserted_id] if return_one else inserted_result.inserted_ids
|
||||
except pymongo.errors.DuplicateKeyError as err:
|
||||
message = 'Could not save document (%s)'
|
||||
raise NotUniqueError(message % six.text_type(err))
|
||||
@@ -377,17 +373,20 @@ class BaseQuerySet(object):
|
||||
raise NotUniqueError(message % six.text_type(err))
|
||||
raise OperationError(message % six.text_type(err))
|
||||
|
||||
# Apply inserted_ids to documents
|
||||
for doc, doc_id in zip(docs, ids):
|
||||
doc.pk = doc_id
|
||||
|
||||
if not load_bulk:
|
||||
signals.post_bulk_insert.send(
|
||||
self._document, documents=docs, loaded=False, **signal_kwargs)
|
||||
return return_one and ids[0] or ids
|
||||
return ids[0] if return_one else ids
|
||||
|
||||
documents = self.in_bulk(ids)
|
||||
results = []
|
||||
for obj_id in ids:
|
||||
results.append(documents.get(obj_id))
|
||||
results = [documents.get(obj_id) for obj_id in ids]
|
||||
signals.post_bulk_insert.send(
|
||||
self._document, documents=results, loaded=True, **signal_kwargs)
|
||||
return return_one and results[0] or results
|
||||
return results[0] if return_one else results
|
||||
|
||||
def count(self, with_limit_and_skip=False):
|
||||
"""Count the selected elements in the query.
|
||||
@@ -396,9 +395,11 @@ class BaseQuerySet(object):
|
||||
:meth:`skip` that has been applied to this cursor into account when
|
||||
getting the count
|
||||
"""
|
||||
if self._limit == 0 and with_limit_and_skip or self._none:
|
||||
if self._limit == 0 and with_limit_and_skip is False or self._none:
|
||||
return 0
|
||||
return self._cursor.count(with_limit_and_skip=with_limit_and_skip)
|
||||
count = self._cursor.count(with_limit_and_skip=with_limit_and_skip)
|
||||
self._cursor_obj = None
|
||||
return count
|
||||
|
||||
def delete(self, write_concern=None, _from_doc_delete=False,
|
||||
cascade_refs=None):
|
||||
@@ -498,11 +499,12 @@ class BaseQuerySet(object):
|
||||
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
||||
wait until at least two servers have recorded the write and
|
||||
will force an fsync on the primary server.
|
||||
:param full_result: Return the full result dictionary rather than just the number
|
||||
updated, e.g. return
|
||||
``{'n': 2, 'nModified': 2, 'ok': 1.0, 'updatedExisting': True}``.
|
||||
:param full_result: Return the associated ``pymongo.UpdateResult`` rather than just the number
|
||||
updated items
|
||||
:param update: Django-style update keyword arguments
|
||||
|
||||
:returns the number of updated documents (unless ``full_result`` is True)
|
||||
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
if not update and not upsert:
|
||||
@@ -566,7 +568,7 @@ class BaseQuerySet(object):
|
||||
document = self._document.objects.with_id(atomic_update.upserted_id)
|
||||
return document
|
||||
|
||||
def update_one(self, upsert=False, write_concern=None, **update):
|
||||
def update_one(self, upsert=False, write_concern=None, full_result=False, **update):
|
||||
"""Perform an atomic update on the fields of the first document
|
||||
matched by the query.
|
||||
|
||||
@@ -577,12 +579,19 @@ class BaseQuerySet(object):
|
||||
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
||||
wait until at least two servers have recorded the write and
|
||||
will force an fsync on the primary server.
|
||||
:param full_result: Return the associated ``pymongo.UpdateResult`` rather than just the number
|
||||
updated items
|
||||
:param update: Django-style update keyword arguments
|
||||
|
||||
full_result
|
||||
:returns the number of updated documents (unless ``full_result`` is True)
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
return self.update(
|
||||
upsert=upsert, multi=False, write_concern=write_concern, **update)
|
||||
upsert=upsert,
|
||||
multi=False,
|
||||
write_concern=write_concern,
|
||||
full_result=full_result,
|
||||
**update)
|
||||
|
||||
def modify(self, upsert=False, full_response=False, remove=False, new=False, **update):
|
||||
"""Update and return the updated document.
|
||||
@@ -690,7 +699,7 @@ class BaseQuerySet(object):
|
||||
self._document._from_son(doc, only_fields=self.only_fields))
|
||||
elif self._as_pymongo:
|
||||
for doc in docs:
|
||||
doc_map[doc['_id']] = self._get_as_pymongo(doc)
|
||||
doc_map[doc['_id']] = doc
|
||||
else:
|
||||
for doc in docs:
|
||||
doc_map[doc['_id']] = self._document._from_son(
|
||||
@@ -748,7 +757,7 @@ class BaseQuerySet(object):
|
||||
'_read_preference', '_iter', '_scalar', '_as_pymongo',
|
||||
'_limit', '_skip', '_hint', '_auto_dereference',
|
||||
'_search_text', 'only_fields', '_max_time_ms',
|
||||
'_comment')
|
||||
'_comment', '_batch_size')
|
||||
|
||||
for prop in copy_props:
|
||||
val = getattr(self, prop)
|
||||
@@ -775,10 +784,11 @@ class BaseQuerySet(object):
|
||||
"""Limit the number of returned documents to `n`. This may also be
|
||||
achieved using array-slicing syntax (e.g. ``User.objects[:5]``).
|
||||
|
||||
:param n: the maximum number of objects to return
|
||||
:param n: the maximum number of objects to return if n is greater than 0.
|
||||
When 0 is passed, returns all the documents in the cursor
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset._limit = n if n != 0 else 1
|
||||
queryset._limit = n
|
||||
|
||||
# If a cursor object has already been created, apply the limit to it.
|
||||
if queryset._cursor_obj:
|
||||
@@ -976,11 +986,10 @@ class BaseQuerySet(object):
|
||||
# explicitly included, and then more complicated operators such as
|
||||
# $slice.
|
||||
def _sort_key(field_tuple):
|
||||
key, value = field_tuple
|
||||
if isinstance(value, (int)):
|
||||
_, value = field_tuple
|
||||
if isinstance(value, int):
|
||||
return value # 0 for exclusion, 1 for inclusion
|
||||
else:
|
||||
return 2 # so that complex values appear last
|
||||
return 2 # so that complex values appear last
|
||||
|
||||
fields = sorted(cleaned_fields, key=_sort_key)
|
||||
|
||||
@@ -1477,16 +1486,16 @@ class BaseQuerySet(object):
|
||||
|
||||
# Iterator helpers
|
||||
|
||||
def next(self):
|
||||
def __next__(self):
|
||||
"""Wrap the result in a :class:`~mongoengine.Document` object.
|
||||
"""
|
||||
if self._limit == 0 or self._none:
|
||||
raise StopIteration
|
||||
|
||||
raw_doc = self._cursor.next()
|
||||
raw_doc = six.next(self._cursor)
|
||||
|
||||
if self._as_pymongo:
|
||||
return self._get_as_pymongo(raw_doc)
|
||||
return raw_doc
|
||||
|
||||
doc = self._document._from_son(
|
||||
raw_doc, _auto_dereference=self._auto_dereference,
|
||||
@@ -1497,6 +1506,8 @@ class BaseQuerySet(object):
|
||||
|
||||
return doc
|
||||
|
||||
next = __next__ # For Python2 support
|
||||
|
||||
def rewind(self):
|
||||
"""Rewind the cursor to its unevaluated state.
|
||||
|
||||
@@ -1729,13 +1740,13 @@ class BaseQuerySet(object):
|
||||
}
|
||||
"""
|
||||
total, data, types = self.exec_js(freq_func, field)
|
||||
values = {types.get(k): int(v) for k, v in data.iteritems()}
|
||||
values = {types.get(k): int(v) for k, v in iteritems(data)}
|
||||
|
||||
if normalize:
|
||||
values = {k: float(v) / total for k, v in values.items()}
|
||||
|
||||
frequencies = {}
|
||||
for k, v in values.iteritems():
|
||||
for k, v in iteritems(values):
|
||||
if isinstance(k, float):
|
||||
if int(k) == k:
|
||||
k = int(k)
|
||||
@@ -1831,26 +1842,6 @@ class BaseQuerySet(object):
|
||||
|
||||
return tuple(data)
|
||||
|
||||
def _get_as_pymongo(self, doc):
|
||||
"""Clean up a PyMongo doc, removing fields that were only fetched
|
||||
for the sake of MongoEngine's implementation, and return it.
|
||||
"""
|
||||
# Always remove _cls as a MongoEngine's implementation detail.
|
||||
if '_cls' in doc:
|
||||
del doc['_cls']
|
||||
|
||||
# If the _id was not included in a .only or was excluded in a .exclude,
|
||||
# remove it from the doc (we always fetch it so that we can properly
|
||||
# construct documents).
|
||||
fields = self._loaded_fields
|
||||
if fields and '_id' in doc and (
|
||||
(fields.value == QueryFieldList.ONLY and '_id' not in fields.fields) or
|
||||
(fields.value == QueryFieldList.EXCLUDE and '_id' in fields.fields)
|
||||
):
|
||||
del doc['_id']
|
||||
|
||||
return doc
|
||||
|
||||
def _sub_js_fields(self, code):
|
||||
"""When fields are specified with [~fieldname] syntax, where
|
||||
*fieldname* is the Python name of a field, *fieldname* will be
|
||||
@@ -1872,8 +1863,8 @@ class BaseQuerySet(object):
|
||||
# Substitute the correct name for the field into the javascript
|
||||
return '.'.join([f.db_field for f in fields])
|
||||
|
||||
code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code)
|
||||
code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub,
|
||||
code = re.sub(r'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code)
|
||||
code = re.sub(r'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub,
|
||||
code)
|
||||
return code
|
||||
|
||||
|
@@ -63,9 +63,11 @@ class QueryFieldList(object):
|
||||
self._only_called = True
|
||||
return self
|
||||
|
||||
def __nonzero__(self):
|
||||
def __bool__(self):
|
||||
return bool(self.fields)
|
||||
|
||||
__nonzero__ = __bool__ # For Py2 support
|
||||
|
||||
def as_dict(self):
|
||||
field_list = {field: self.value for field in self.fields}
|
||||
if self.slice:
|
||||
|
@@ -36,7 +36,7 @@ class QuerySetManager(object):
|
||||
queryset_class = owner._meta.get('queryset_class', self.default)
|
||||
queryset = queryset_class(owner, owner._get_collection())
|
||||
if self.get_queryset:
|
||||
arg_count = self.get_queryset.func_code.co_argcount
|
||||
arg_count = self.get_queryset.__code__.co_argcount
|
||||
if arg_count == 1:
|
||||
queryset = self.get_queryset(queryset)
|
||||
elif arg_count == 2:
|
||||
|
@@ -89,7 +89,7 @@ class QuerySet(BaseQuerySet):
|
||||
yield self._result_cache[pos]
|
||||
pos += 1
|
||||
|
||||
# Raise StopIteration if we already established there were no more
|
||||
# return if we already established there were no more
|
||||
# docs in the db cursor.
|
||||
if not self._has_more:
|
||||
return
|
||||
@@ -115,7 +115,7 @@ class QuerySet(BaseQuerySet):
|
||||
# the result cache.
|
||||
try:
|
||||
for _ in six.moves.range(ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(self.next())
|
||||
self._result_cache.append(six.next(self))
|
||||
except StopIteration:
|
||||
# Getting this exception means there are no more docs in the
|
||||
# db cursor. Set _has_more to False so that we can use that
|
||||
@@ -170,7 +170,7 @@ class QuerySetNoCache(BaseQuerySet):
|
||||
data = []
|
||||
for _ in six.moves.range(REPR_OUTPUT_SIZE + 1):
|
||||
try:
|
||||
data.append(self.next())
|
||||
data.append(six.next(self))
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
@@ -186,10 +186,3 @@ class QuerySetNoCache(BaseQuerySet):
|
||||
queryset = self.clone()
|
||||
queryset.rewind()
|
||||
return queryset
|
||||
|
||||
|
||||
class QuerySetNoDeRef(QuerySet):
|
||||
"""Special no_dereference QuerySet"""
|
||||
|
||||
def __dereference(items, max_depth=1, instance=None, name=None):
|
||||
return items
|
||||
|
@@ -4,12 +4,13 @@ from bson import ObjectId, SON
|
||||
from bson.dbref import DBRef
|
||||
import pymongo
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.base import UPDATE_OPERATORS
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import get_connection
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
from mongoengine.pymongo_support import IS_PYMONGO_3
|
||||
|
||||
__all__ = ('query', 'update')
|
||||
|
||||
@@ -147,14 +148,14 @@ def query(_doc_cls=None, **kwargs):
|
||||
if op is None or key not in mongo_query:
|
||||
mongo_query[key] = value
|
||||
elif key in mongo_query:
|
||||
if isinstance(mongo_query[key], dict):
|
||||
if isinstance(mongo_query[key], dict) and isinstance(value, dict):
|
||||
mongo_query[key].update(value)
|
||||
# $max/minDistance needs to come last - convert to SON
|
||||
value_dict = mongo_query[key]
|
||||
if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \
|
||||
('$near' in value_dict or '$nearSphere' in value_dict):
|
||||
value_son = SON()
|
||||
for k, v in value_dict.iteritems():
|
||||
for k, v in iteritems(value_dict):
|
||||
if k == '$maxDistance' or k == '$minDistance':
|
||||
continue
|
||||
value_son[k] = v
|
||||
@@ -201,30 +202,37 @@ def update(_doc_cls=None, **update):
|
||||
format.
|
||||
"""
|
||||
mongo_update = {}
|
||||
|
||||
for key, value in update.items():
|
||||
if key == '__raw__':
|
||||
mongo_update.update(value)
|
||||
continue
|
||||
|
||||
parts = key.split('__')
|
||||
|
||||
# if there is no operator, default to 'set'
|
||||
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
|
||||
parts.insert(0, 'set')
|
||||
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
op = None
|
||||
if parts[0] in UPDATE_OPERATORS:
|
||||
op = parts.pop(0)
|
||||
# Convert Pythonic names to Mongo equivalents
|
||||
if op in ('push_all', 'pull_all'):
|
||||
op = op.replace('_all', 'All')
|
||||
elif op == 'dec':
|
||||
operator_map = {
|
||||
'push_all': 'pushAll',
|
||||
'pull_all': 'pullAll',
|
||||
'dec': 'inc',
|
||||
'add_to_set': 'addToSet',
|
||||
'set_on_insert': 'setOnInsert'
|
||||
}
|
||||
if op == 'dec':
|
||||
# Support decrement by flipping a positive value's sign
|
||||
# and using 'inc'
|
||||
op = 'inc'
|
||||
value = -value
|
||||
elif op == 'add_to_set':
|
||||
op = 'addToSet'
|
||||
elif op == 'set_on_insert':
|
||||
op = 'setOnInsert'
|
||||
# If the operator doesn't found from operator map, the op value
|
||||
# will stay unchanged
|
||||
op = operator_map.get(op, op)
|
||||
|
||||
match = None
|
||||
if parts[-1] in COMPARISON_OPERATORS:
|
||||
@@ -291,6 +299,8 @@ def update(_doc_cls=None, **update):
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op == 'unset':
|
||||
value = 1
|
||||
elif op == 'inc':
|
||||
value = field.prepare_query_value(op, value)
|
||||
|
||||
if match:
|
||||
match = '$' + match
|
||||
@@ -336,7 +346,7 @@ def update(_doc_cls=None, **update):
|
||||
value = {key: {'$each': value}}
|
||||
elif op in ('push', 'pushAll'):
|
||||
if parts[-1].isdigit():
|
||||
key = parts[0]
|
||||
key = '.'.join(parts[0:-1])
|
||||
position = int(parts[-1])
|
||||
# $position expects an iterable. If pushing a single value,
|
||||
# wrap it in a list.
|
||||
@@ -420,7 +430,6 @@ def _infer_geometry(value):
|
||||
'type and coordinates keys')
|
||||
elif isinstance(value, (list, set)):
|
||||
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
|
||||
# TODO: should both TypeError and IndexError be alike interpreted?
|
||||
|
||||
try:
|
||||
value[0][0][0]
|
||||
|
@@ -3,7 +3,7 @@ import copy
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.queryset import transform
|
||||
|
||||
__all__ = ('Q',)
|
||||
__all__ = ('Q', 'QNode')
|
||||
|
||||
|
||||
class QNodeVisitor(object):
|
||||
@@ -131,6 +131,10 @@ class QCombination(QNode):
|
||||
else:
|
||||
self.children.append(node)
|
||||
|
||||
def __repr__(self):
|
||||
op = ' & ' if self.operation is self.AND else ' | '
|
||||
return '(%s)' % op.join([repr(node) for node in self.children])
|
||||
|
||||
def accept(self, visitor):
|
||||
for i in range(len(self.children)):
|
||||
if isinstance(self.children[i], QNode):
|
||||
@@ -151,6 +155,9 @@ class Q(QNode):
|
||||
def __init__(self, **query):
|
||||
self.query = query
|
||||
|
||||
def __repr__(self):
|
||||
return 'Q(**%s)' % repr(self.query)
|
||||
|
||||
def accept(self, visitor):
|
||||
return visitor.visit_query(self)
|
||||
|
||||
|
@@ -5,7 +5,7 @@ detailed-errors=1
|
||||
cover-package=mongoengine
|
||||
|
||||
[flake8]
|
||||
ignore=E501,F401,F403,F405,I201,I202
|
||||
ignore=E501,F401,F403,F405,I201,I202,W504, W605
|
||||
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
||||
max-complexity=47
|
||||
application-import-names=mongoengine,tests
|
||||
|
3
setup.py
3
setup.py
@@ -44,9 +44,8 @@ CLASSIFIERS = [
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
'Topic :: Database',
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from all_warnings import AllWarnings
|
||||
from document import *
|
||||
from queryset import *
|
||||
from fields import *
|
||||
from .all_warnings import AllWarnings
|
||||
from .document import *
|
||||
from .queryset import *
|
||||
from .fields import *
|
||||
|
@@ -1,13 +1,13 @@
|
||||
import unittest
|
||||
|
||||
from class_methods import *
|
||||
from delta import *
|
||||
from dynamic import *
|
||||
from indexes import *
|
||||
from inheritance import *
|
||||
from instance import *
|
||||
from json_serialisation import *
|
||||
from validation import *
|
||||
from .class_methods import *
|
||||
from .delta import *
|
||||
from .dynamic import *
|
||||
from .indexes import *
|
||||
from .inheritance import *
|
||||
from .instance import *
|
||||
from .json_serialisation import *
|
||||
from .validation import *
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -2,10 +2,11 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
|
||||
from mongoengine.queryset import NULLIFY, PULL
|
||||
from mongoengine.connection import get_db
|
||||
from tests.utils import needs_mongodb_v26
|
||||
from tests.utils import requires_mongodb_gte_26
|
||||
|
||||
__all__ = ("ClassMethodsTest", )
|
||||
|
||||
@@ -27,9 +28,7 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
self.Person = Person
|
||||
|
||||
def tearDown(self):
|
||||
for collection in self.db.collection_names():
|
||||
if 'system.' in collection:
|
||||
continue
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_definition(self):
|
||||
@@ -66,10 +65,10 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
"""
|
||||
collection_name = 'person'
|
||||
self.Person(name='Test').save()
|
||||
self.assertTrue(collection_name in self.db.collection_names())
|
||||
self.assertIn(collection_name, list_collection_names(self.db))
|
||||
|
||||
self.Person.drop_collection()
|
||||
self.assertFalse(collection_name in self.db.collection_names())
|
||||
self.assertNotIn(collection_name, list_collection_names(self.db))
|
||||
|
||||
def test_register_delete_rule(self):
|
||||
"""Ensure that register delete rule adds a delete rule to the document
|
||||
@@ -102,16 +101,16 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
|
||||
BlogPost.ensure_index(['author', 'description'])
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('author', 1), ('description', 1)]] })
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('author', 1), ('description', 1)]]})
|
||||
|
||||
BlogPost._get_collection().drop_index('author_1_description_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
|
||||
BlogPost._get_collection().drop_index('author_1_title_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('author', 1), ('title', 1)]], 'extra': [] })
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('author', 1), ('title', 1)]], 'extra': []})
|
||||
|
||||
def test_compare_indexes_inheritance(self):
|
||||
""" Ensure that the indexes are properly created and that
|
||||
@@ -140,16 +139,16 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
|
||||
BlogPostWithTags.ensure_index(['author', 'tag_list'])
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]] })
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]]})
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': [] })
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': []})
|
||||
|
||||
def test_compare_indexes_multiple_subclasses(self):
|
||||
""" Ensure that compare_indexes behaves correctly if called from a
|
||||
@@ -184,11 +183,11 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
BlogPostWithCustomField.ensure_indexes()
|
||||
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
self.assertEqual(BlogPostWithTags.compare_indexes(), {'missing': [], 'extra': []})
|
||||
self.assertEqual(BlogPostWithCustomField.compare_indexes(), {'missing': [], 'extra': []})
|
||||
|
||||
@needs_mongodb_v26
|
||||
@requires_mongodb_gte_26
|
||||
def test_compare_indexes_for_text_indexes(self):
|
||||
""" Ensure that compare_indexes behaves correctly for text indexes """
|
||||
|
||||
@@ -340,7 +339,7 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
meta = {'collection': collection_name}
|
||||
|
||||
Person(name="Test User").save()
|
||||
self.assertTrue(collection_name in self.db.collection_names())
|
||||
self.assertIn(collection_name, list_collection_names(self.db))
|
||||
|
||||
user_obj = self.db[collection_name].find_one()
|
||||
self.assertEqual(user_obj['name'], "Test User")
|
||||
@@ -349,7 +348,7 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
self.assertEqual(user_obj.name, "Test User")
|
||||
|
||||
Person.drop_collection()
|
||||
self.assertFalse(collection_name in self.db.collection_names())
|
||||
self.assertNotIn(collection_name, list_collection_names(self.db))
|
||||
|
||||
def test_collection_name_and_primary(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
|
@@ -3,16 +3,14 @@ import unittest
|
||||
|
||||
from bson import SON
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
__all__ = ("DeltaTest",)
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class DeltaTest(unittest.TestCase):
|
||||
class DeltaTest(MongoDBTestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
super(DeltaTest, self).setUp()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
@@ -25,9 +23,7 @@ class DeltaTest(unittest.TestCase):
|
||||
self.Person = Person
|
||||
|
||||
def tearDown(self):
|
||||
for collection in self.db.collection_names():
|
||||
if 'system.' in collection:
|
||||
continue
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_delta(self):
|
||||
@@ -694,7 +690,7 @@ class DeltaTest(unittest.TestCase):
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertTrue('employees' in updates)
|
||||
self.assertIn('employees', updates)
|
||||
|
||||
def test_delta_with_dbref_false(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(Document, Document, False)
|
||||
@@ -709,7 +705,7 @@ class DeltaTest(unittest.TestCase):
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertTrue('employees' in updates)
|
||||
self.assertIn('employees', updates)
|
||||
|
||||
def test_nested_nested_fields_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
@@ -863,5 +859,6 @@ class DeltaTest(unittest.TestCase):
|
||||
self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"])
|
||||
self.assertEqual(uinfo.id, delta[0]["users.007.info"])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -1,16 +1,15 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
__all__ = ("DynamicTest", )
|
||||
__all__ = ("TestDynamicDocument", )
|
||||
|
||||
|
||||
class DynamicTest(unittest.TestCase):
|
||||
class TestDynamicDocument(MongoDBTestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
super(TestDynamicDocument, self).setUp()
|
||||
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
@@ -98,6 +97,72 @@ class DynamicTest(unittest.TestCase):
|
||||
self.assertEqual(len(p._data), 4)
|
||||
self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name'])
|
||||
|
||||
def test_fields_without_underscore(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
Person = self.Person
|
||||
|
||||
p = self.Person(name='Dean')
|
||||
p.save()
|
||||
|
||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{
|
||||
'_cls': u'Person',
|
||||
'_id': p.id,
|
||||
'name': u'Dean'
|
||||
}
|
||||
)
|
||||
|
||||
p.name = 'OldDean'
|
||||
p.newattr = 'garbage'
|
||||
p.save()
|
||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{
|
||||
'_cls': u'Person',
|
||||
'_id': p.id,
|
||||
'name': 'OldDean',
|
||||
'newattr': u'garbage'
|
||||
}
|
||||
)
|
||||
|
||||
def test_fields_containing_underscore(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
class WeirdPerson(DynamicDocument):
|
||||
name = StringField()
|
||||
_name = StringField()
|
||||
|
||||
WeirdPerson.drop_collection()
|
||||
|
||||
p = WeirdPerson(name='Dean', _name='Dean')
|
||||
p.save()
|
||||
|
||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{
|
||||
'_id': p.id,
|
||||
'_name': u'Dean',
|
||||
'name': u'Dean'
|
||||
}
|
||||
)
|
||||
|
||||
p.name = 'OldDean'
|
||||
p._name = 'NewDean'
|
||||
p._newattr1 = 'garbage' # Unknown fields won't be added
|
||||
p.save()
|
||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{
|
||||
'_id': p.id,
|
||||
'_name': u'NewDean',
|
||||
'name': u'OldDean',
|
||||
}
|
||||
)
|
||||
|
||||
def test_dynamic_document_queries(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
p = self.Person()
|
||||
@@ -174,8 +239,8 @@ class DynamicTest(unittest.TestCase):
|
||||
|
||||
Employee.drop_collection()
|
||||
|
||||
self.assertTrue('name' in Employee._fields)
|
||||
self.assertTrue('salary' in Employee._fields)
|
||||
self.assertIn('name', Employee._fields)
|
||||
self.assertIn('salary', Employee._fields)
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
self.Person._get_collection_name())
|
||||
|
||||
@@ -189,7 +254,7 @@ class DynamicTest(unittest.TestCase):
|
||||
self.assertEqual(1, Employee.objects(age=20).count())
|
||||
|
||||
joe_bloggs = self.Person.objects.first()
|
||||
self.assertTrue(isinstance(joe_bloggs, Employee))
|
||||
self.assertIsInstance(joe_bloggs, Employee)
|
||||
|
||||
def test_embedded_dynamic_document(self):
|
||||
"""Test dynamic embedded documents"""
|
||||
@@ -369,5 +434,6 @@ class DynamicTest(unittest.TestCase):
|
||||
person.save()
|
||||
self.assertEqual(Person.objects.first().age, 35)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -1,15 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
from datetime import datetime
|
||||
from pymongo.errors import OperationFailure
|
||||
import pymongo
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
from tests.utils import get_mongodb_version, needs_mongodb_v26
|
||||
from mongoengine.mongodb_support import get_mongodb_version, MONGODB_32, MONGODB_3
|
||||
from tests.utils import requires_mongodb_gte_26, requires_mongodb_lte_32, requires_mongodb_gte_34
|
||||
|
||||
__all__ = ("IndexesTest", )
|
||||
|
||||
@@ -19,6 +20,7 @@ class IndexesTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.connection = connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
self.mongodb_version = get_mongodb_version()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
@@ -68,9 +70,9 @@ class IndexesTest(unittest.TestCase):
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
# _id, '-date', 'tags', ('cat', 'date')
|
||||
self.assertEqual(len(info), 4)
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
info = [value['key'] for key, value in iteritems(info)]
|
||||
for expected in expected_specs:
|
||||
self.assertTrue(expected['fields'] in info)
|
||||
self.assertIn(expected['fields'], info)
|
||||
|
||||
def _index_test_inheritance(self, InheritFrom):
|
||||
|
||||
@@ -100,9 +102,9 @@ class IndexesTest(unittest.TestCase):
|
||||
# the indices on -date and tags will both contain
|
||||
# _cls as first element in the key
|
||||
self.assertEqual(len(info), 4)
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
info = [value['key'] for key, value in iteritems(info)]
|
||||
for expected in expected_specs:
|
||||
self.assertTrue(expected['fields'] in info)
|
||||
self.assertIn(expected['fields'], info)
|
||||
|
||||
class ExtendedBlogPost(BlogPost):
|
||||
title = StringField()
|
||||
@@ -115,9 +117,9 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
ExtendedBlogPost.ensure_indexes()
|
||||
info = ExtendedBlogPost.objects._collection.index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
info = [value['key'] for key, value in iteritems(info)]
|
||||
for expected in expected_specs:
|
||||
self.assertTrue(expected['fields'] in info)
|
||||
self.assertIn(expected['fields'], info)
|
||||
|
||||
def test_indexes_document_inheritance(self):
|
||||
"""Ensure that indexes are used when meta[indexes] is specified for
|
||||
@@ -225,8 +227,8 @@ class IndexesTest(unittest.TestCase):
|
||||
# Indexes are lazy so use list() to perform query
|
||||
list(Person.objects)
|
||||
info = Person.objects._collection.index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('rank.title', 1)] in info)
|
||||
info = [value['key'] for key, value in iteritems(info)]
|
||||
self.assertIn([('rank.title', 1)], info)
|
||||
|
||||
def test_explicit_geo2d_index(self):
|
||||
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||
@@ -245,8 +247,8 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', '2d')] in info)
|
||||
info = [value['key'] for key, value in iteritems(info)]
|
||||
self.assertIn([('location.point', '2d')], info)
|
||||
|
||||
def test_explicit_geo2d_index_embedded(self):
|
||||
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||
@@ -268,8 +270,8 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('current.location.point', '2d')] in info)
|
||||
info = [value['key'] for key, value in iteritems(info)]
|
||||
self.assertIn([('current.location.point', '2d')], info)
|
||||
|
||||
def test_explicit_geosphere_index(self):
|
||||
"""Ensure that geosphere indexes work when created via meta[indexes]
|
||||
@@ -288,8 +290,8 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', '2dsphere')] in info)
|
||||
info = [value['key'] for key, value in iteritems(info)]
|
||||
self.assertIn([('location.point', '2dsphere')], info)
|
||||
|
||||
def test_explicit_geohaystack_index(self):
|
||||
"""Ensure that geohaystack indexes work when created via meta[indexes]
|
||||
@@ -310,8 +312,8 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', 'geoHaystack')] in info)
|
||||
info = [value['key'] for key, value in iteritems(info)]
|
||||
self.assertIn([('location.point', 'geoHaystack')], info)
|
||||
|
||||
def test_create_geohaystack_index(self):
|
||||
"""Ensure that geohaystack indexes can be created
|
||||
@@ -322,8 +324,8 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10)
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', 'geoHaystack'), ('name', 1)] in info)
|
||||
info = [value['key'] for key, value in iteritems(info)]
|
||||
self.assertIn([('location.point', 'geoHaystack'), ('name', 1)], info)
|
||||
|
||||
def test_dictionary_indexes(self):
|
||||
"""Ensure that indexes are used when meta[indexes] contains
|
||||
@@ -355,8 +357,8 @@ class IndexesTest(unittest.TestCase):
|
||||
info = [(value['key'],
|
||||
value.get('unique', False),
|
||||
value.get('sparse', False))
|
||||
for key, value in info.iteritems()]
|
||||
self.assertTrue(([('addDate', -1)], True, True) in info)
|
||||
for key, value in iteritems(info)]
|
||||
self.assertIn(([('addDate', -1)], True, True), info)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
@@ -476,6 +478,7 @@ class IndexesTest(unittest.TestCase):
|
||||
def test_covered_index(self):
|
||||
"""Ensure that covered indexes can be used
|
||||
"""
|
||||
IS_MONGODB_3 = get_mongodb_version() >= MONGODB_3
|
||||
|
||||
class Test(Document):
|
||||
a = IntField()
|
||||
@@ -491,8 +494,6 @@ class IndexesTest(unittest.TestCase):
|
||||
obj = Test(a=1)
|
||||
obj.save()
|
||||
|
||||
IS_MONGODB_3 = get_mongodb_version()[0] >= 3
|
||||
|
||||
# Need to be explicit about covered indexes as mongoDB doesn't know if
|
||||
# the documents returned might have more keys in that here.
|
||||
query_plan = Test.objects(id=obj.id).exclude('a').explain()
|
||||
@@ -541,19 +542,24 @@ class IndexesTest(unittest.TestCase):
|
||||
[('categories', 1), ('_id', 1)])
|
||||
|
||||
def test_hint(self):
|
||||
MONGO_VER = self.mongodb_version
|
||||
|
||||
TAGS_INDEX_NAME = 'tags_1'
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
meta = {
|
||||
'indexes': [
|
||||
'tags',
|
||||
{
|
||||
'fields': ['tags'],
|
||||
'name': TAGS_INDEX_NAME
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
for i in range(0, 10):
|
||||
tags = [("tag %i" % n) for n in range(0, i % 2)]
|
||||
for i in range(10):
|
||||
tags = [("tag %i" % n) for n in range(i % 2)]
|
||||
BlogPost(tags=tags).save()
|
||||
|
||||
self.assertEqual(BlogPost.objects.count(), 10)
|
||||
@@ -563,18 +569,18 @@ class IndexesTest(unittest.TestCase):
|
||||
if pymongo.version != '3.0':
|
||||
self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10)
|
||||
|
||||
if MONGO_VER >= MONGODB_32:
|
||||
# Mongo32 throws an error if an index exists (i.e `tags` in our case)
|
||||
# and you use hint on an index name that does not exist
|
||||
with self.assertRaises(OperationFailure):
|
||||
BlogPost.objects.hint([('ZZ', 1)]).count()
|
||||
else:
|
||||
self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10)
|
||||
|
||||
if pymongo.version >= '2.8':
|
||||
self.assertEqual(BlogPost.objects.hint('tags').count(), 10)
|
||||
else:
|
||||
def invalid_index():
|
||||
BlogPost.objects.hint('tags').next()
|
||||
self.assertRaises(TypeError, invalid_index)
|
||||
self.assertEqual(BlogPost.objects.hint(TAGS_INDEX_NAME).count(), 10)
|
||||
|
||||
def invalid_index_2():
|
||||
return BlogPost.objects.hint(('tags', 1)).next()
|
||||
self.assertRaises(Exception, invalid_index_2)
|
||||
with self.assertRaises(Exception):
|
||||
BlogPost.objects.hint(('tags', 1)).next()
|
||||
|
||||
def test_unique(self):
|
||||
"""Ensure that uniqueness constraints are applied to fields.
|
||||
@@ -595,6 +601,32 @@ class IndexesTest(unittest.TestCase):
|
||||
# Ensure backwards compatibilty for errors
|
||||
self.assertRaises(OperationError, post2.save)
|
||||
|
||||
@requires_mongodb_gte_34
|
||||
def test_primary_key_unique_not_working_under_mongo_34(self):
|
||||
"""Relates to #1445"""
|
||||
class Blog(Document):
|
||||
id = StringField(primary_key=True, unique=True)
|
||||
|
||||
Blog.drop_collection()
|
||||
|
||||
with self.assertRaises(OperationFailure) as ctx_err:
|
||||
Blog(id='garbage').save()
|
||||
try:
|
||||
self.assertIn("The field 'unique' is not valid for an _id index specification", str(ctx_err.exception))
|
||||
except AssertionError:
|
||||
# error is slightly different on python 3.6
|
||||
self.assertIn("The field 'background' is not valid for an _id index specification", str(ctx_err.exception))
|
||||
|
||||
@requires_mongodb_lte_32
|
||||
def test_primary_key_unique_working_under_mongo_32(self):
|
||||
"""Relates to #1445"""
|
||||
class Blog(Document):
|
||||
id = StringField(primary_key=True, unique=True)
|
||||
|
||||
Blog.drop_collection()
|
||||
|
||||
Blog(id='garbage').save()
|
||||
|
||||
def test_unique_with(self):
|
||||
"""Ensure that unique_with constraints are applied to fields.
|
||||
"""
|
||||
@@ -749,12 +781,12 @@ class IndexesTest(unittest.TestCase):
|
||||
except NotUniqueError:
|
||||
pass
|
||||
|
||||
def test_unique_and_primary(self):
|
||||
def test_primary_save_duplicate_update_existing_object(self):
|
||||
"""If you set a field as primary, then unexpected behaviour can occur.
|
||||
You won't create a duplicate but you will update an existing document.
|
||||
"""
|
||||
class User(Document):
|
||||
name = StringField(primary_key=True, unique=True)
|
||||
name = StringField(primary_key=True)
|
||||
password = StringField()
|
||||
|
||||
User.drop_collection()
|
||||
@@ -801,9 +833,9 @@ class IndexesTest(unittest.TestCase):
|
||||
self.fail('Unbound local error at index + pk definition')
|
||||
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
info = [value['key'] for key, value in iteritems(info)]
|
||||
index_item = [('_id', 1), ('comments.comment_id', 1)]
|
||||
self.assertTrue(index_item in info)
|
||||
self.assertIn(index_item, info)
|
||||
|
||||
def test_compound_key_embedded(self):
|
||||
|
||||
@@ -849,9 +881,9 @@ class IndexesTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
info = MyDoc.objects._collection.index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('provider_ids.foo', 1)] in info)
|
||||
self.assertTrue([('provider_ids.bar', 1)] in info)
|
||||
info = [value['key'] for key, value in iteritems(info)]
|
||||
self.assertIn([('provider_ids.foo', 1)], info)
|
||||
self.assertIn([('provider_ids.bar', 1)], info)
|
||||
|
||||
def test_sparse_compound_indexes(self):
|
||||
|
||||
@@ -867,7 +899,7 @@ class IndexesTest(unittest.TestCase):
|
||||
info['provider_ids.foo_1_provider_ids.bar_1']['key'])
|
||||
self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse'])
|
||||
|
||||
@needs_mongodb_v26
|
||||
@requires_mongodb_gte_26
|
||||
def test_text_indexes(self):
|
||||
class Book(Document):
|
||||
title = DictField()
|
||||
@@ -876,9 +908,9 @@ class IndexesTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
indexes = Book.objects._collection.index_information()
|
||||
self.assertTrue("title_text" in indexes)
|
||||
self.assertIn("title_text", indexes)
|
||||
key = indexes["title_text"]["key"]
|
||||
self.assertTrue(('_fts', 'text') in key)
|
||||
self.assertIn(('_fts', 'text'), key)
|
||||
|
||||
def test_hashed_indexes(self):
|
||||
|
||||
@@ -889,8 +921,8 @@ class IndexesTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
indexes = Book.objects._collection.index_information()
|
||||
self.assertTrue("ref_id_hashed" in indexes)
|
||||
self.assertTrue(('ref_id', 'hashed') in indexes["ref_id_hashed"]["key"])
|
||||
self.assertIn("ref_id_hashed", indexes)
|
||||
self.assertIn(('ref_id', 'hashed'), indexes["ref_id_hashed"]["key"])
|
||||
|
||||
def test_indexes_after_database_drop(self):
|
||||
"""
|
||||
@@ -931,7 +963,6 @@ class IndexesTest(unittest.TestCase):
|
||||
# Drop the temporary database at the end
|
||||
connection.drop_database('tempdatabase')
|
||||
|
||||
|
||||
def test_index_dont_send_cls_option(self):
|
||||
"""
|
||||
Ensure that 'cls' option is not sent through ensureIndex. We shouldn't
|
||||
@@ -1013,7 +1044,7 @@ class IndexesTest(unittest.TestCase):
|
||||
TestDoc.ensure_indexes()
|
||||
|
||||
index_info = TestDoc._get_collection().index_information()
|
||||
self.assertTrue('shard_1_1__cls_1_txt_1_1' in index_info)
|
||||
self.assertIn('shard_1_1__cls_1_txt_1_1', index_info)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@@ -2,30 +2,45 @@
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from datetime import datetime
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import (BooleanField, Document, EmbeddedDocument,
|
||||
EmbeddedDocumentField, GenericReferenceField,
|
||||
IntField, ReferenceField, StringField)
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.utils import MongoDBTestCase
|
||||
from tests.fixtures import Base
|
||||
|
||||
from mongoengine import Document, EmbeddedDocument, connect
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.fields import (BooleanField, GenericReferenceField,
|
||||
IntField, StringField)
|
||||
|
||||
__all__ = ('InheritanceTest', )
|
||||
|
||||
|
||||
class InheritanceTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
class InheritanceTest(MongoDBTestCase):
|
||||
|
||||
def tearDown(self):
|
||||
for collection in self.db.collection_names():
|
||||
if 'system.' in collection:
|
||||
continue
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_constructor_cls(self):
|
||||
# Ensures _cls is properly set during construction
|
||||
# and when object gets reloaded (prevent regression of #1950)
|
||||
class EmbedData(EmbeddedDocument):
|
||||
data = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class DataDoc(Document):
|
||||
name = StringField()
|
||||
embed = EmbeddedDocumentField(EmbedData)
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
test_doc = DataDoc(name='test', embed=EmbedData(data='data'))
|
||||
self.assertEqual(test_doc._cls, 'DataDoc')
|
||||
self.assertEqual(test_doc.embed._cls, 'EmbedData')
|
||||
test_doc.save()
|
||||
saved_doc = DataDoc.objects.with_id(test_doc.id)
|
||||
self.assertEqual(test_doc._cls, saved_doc._cls)
|
||||
self.assertEqual(test_doc.embed._cls, saved_doc.embed._cls)
|
||||
test_doc.delete()
|
||||
|
||||
def test_superclasses(self):
|
||||
"""Ensure that the correct list of superclasses is assembled.
|
||||
"""
|
||||
@@ -258,9 +273,10 @@ class InheritanceTest(unittest.TestCase):
|
||||
name = StringField()
|
||||
|
||||
# can't inherit because Animal didn't explicitly allow inheritance
|
||||
with self.assertRaises(ValueError):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
class Dog(Animal):
|
||||
pass
|
||||
self.assertIn("Document Animal may not be subclassed", str(cm.exception))
|
||||
|
||||
# Check that _cls etc aren't present on simple documents
|
||||
dog = Animal(name='dog').save()
|
||||
@@ -268,7 +284,7 @@ class InheritanceTest(unittest.TestCase):
|
||||
|
||||
collection = self.db[Animal._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertFalse('_cls' in obj)
|
||||
self.assertNotIn('_cls', obj)
|
||||
|
||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||
"""Ensure if inheritance is on in a subclass you cant turn it off.
|
||||
@@ -277,9 +293,10 @@ class InheritanceTest(unittest.TestCase):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
class Mammal(Animal):
|
||||
meta = {'allow_inheritance': False}
|
||||
self.assertEqual(str(cm.exception), 'Only direct subclasses of Document may set "allow_inheritance" to False')
|
||||
|
||||
def test_allow_inheritance_abstract_document(self):
|
||||
"""Ensure that abstract documents can set inheritance rules and that
|
||||
@@ -292,13 +309,48 @@ class InheritanceTest(unittest.TestCase):
|
||||
class Animal(FinalDocument):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
# Check that _cls isn't present in simple documents
|
||||
doc = Animal(name='dog')
|
||||
self.assertFalse('_cls' in doc.to_mongo())
|
||||
self.assertNotIn('_cls', doc.to_mongo())
|
||||
|
||||
def test_using_abstract_class_in_reference_field(self):
|
||||
# Ensures no regression of #1920
|
||||
class AbstractHuman(Document):
|
||||
meta = {'abstract': True}
|
||||
|
||||
class Dad(AbstractHuman):
|
||||
name = StringField()
|
||||
|
||||
class Home(Document):
|
||||
dad = ReferenceField(AbstractHuman) # Referencing the abstract class
|
||||
address = StringField()
|
||||
|
||||
dad = Dad(name='5').save()
|
||||
Home(dad=dad, address='street').save()
|
||||
|
||||
home = Home.objects.first()
|
||||
home.address = 'garbage'
|
||||
home.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_class_referencing_self(self):
|
||||
# Ensures no regression of #1920
|
||||
class Human(Document):
|
||||
meta = {'abstract': True}
|
||||
creator = ReferenceField('self', dbref=True)
|
||||
|
||||
class User(Human):
|
||||
name = StringField()
|
||||
|
||||
user = User(name='John').save()
|
||||
user2 = User(name='Foo', creator=user).save()
|
||||
|
||||
user2 = User.objects.with_id(user2.id)
|
||||
user2.name = 'Bar'
|
||||
user2.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_handle_ids_in_metaclass_properly(self):
|
||||
|
||||
@@ -358,11 +410,11 @@ class InheritanceTest(unittest.TestCase):
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
bkk = City(continent='asia')
|
||||
self.assertEqual(None, bkk.pk)
|
||||
city = City(continent='asia')
|
||||
self.assertEqual(None, city.pk)
|
||||
# TODO: expected error? Shouldn't we create a new error type?
|
||||
with self.assertRaises(KeyError):
|
||||
setattr(bkk, 'pk', 1)
|
||||
setattr(city, 'pk', 1)
|
||||
|
||||
def test_allow_inheritance_embedded_document(self):
|
||||
"""Ensure embedded documents respect inheritance."""
|
||||
@@ -374,14 +426,14 @@ class InheritanceTest(unittest.TestCase):
|
||||
pass
|
||||
|
||||
doc = Comment(content='test')
|
||||
self.assertFalse('_cls' in doc.to_mongo())
|
||||
self.assertNotIn('_cls', doc.to_mongo())
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
doc = Comment(content='test')
|
||||
self.assertTrue('_cls' in doc.to_mongo())
|
||||
self.assertIn('_cls', doc.to_mongo())
|
||||
|
||||
def test_document_inheritance(self):
|
||||
"""Ensure mutliple inheritance of abstract documents
|
||||
@@ -430,12 +482,12 @@ class InheritanceTest(unittest.TestCase):
|
||||
meta = {'abstract': True}
|
||||
class Human(Mammal): pass
|
||||
|
||||
for k, v in defaults.iteritems():
|
||||
for k, v in iteritems(defaults):
|
||||
for cls in [Animal, Fish, Guppy]:
|
||||
self.assertEqual(cls._meta[k], v)
|
||||
|
||||
self.assertFalse('collection' in Animal._meta)
|
||||
self.assertFalse('collection' in Mammal._meta)
|
||||
self.assertNotIn('collection', Animal._meta)
|
||||
self.assertNotIn('collection', Mammal._meta)
|
||||
|
||||
self.assertEqual(Animal._get_collection_name(), None)
|
||||
self.assertEqual(Mammal._get_collection_name(), None)
|
||||
|
@@ -4,13 +4,19 @@ import os
|
||||
import pickle
|
||||
import unittest
|
||||
import uuid
|
||||
import warnings
|
||||
import weakref
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from bson import DBRef, ObjectId
|
||||
from pymongo.errors import DuplicateKeyError
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests import fixtures
|
||||
from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest,
|
||||
PickleDynamicEmbedded, PickleDynamicTest)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import get_document, _document_registry
|
||||
@@ -22,7 +28,7 @@ from mongoengine.queryset import NULLIFY, Q
|
||||
from mongoengine.context_managers import switch_db, query_counter
|
||||
from mongoengine import signals
|
||||
|
||||
from tests.utils import needs_mongodb_v26
|
||||
from tests.utils import requires_mongodb_gte_26
|
||||
|
||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__),
|
||||
'../fields/mongoengine.png')
|
||||
@@ -30,12 +36,9 @@ TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__),
|
||||
__all__ = ("InstanceTest",)
|
||||
|
||||
|
||||
class InstanceTest(unittest.TestCase):
|
||||
class InstanceTest(MongoDBTestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
class Job(EmbeddedDocument):
|
||||
name = StringField()
|
||||
years = IntField()
|
||||
@@ -53,9 +56,7 @@ class InstanceTest(unittest.TestCase):
|
||||
self.Job = Job
|
||||
|
||||
def tearDown(self):
|
||||
for collection in self.db.collection_names():
|
||||
if 'system.' in collection:
|
||||
continue
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def assertDbEqual(self, docs):
|
||||
@@ -357,7 +358,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
user_son = User.objects._collection.find_one()
|
||||
self.assertEqual(user_son['_id'], 'test')
|
||||
self.assertTrue('username' not in user_son['_id'])
|
||||
self.assertNotIn('username', user_son['_id'])
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
@@ -370,7 +371,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
user_son = User.objects._collection.find_one()
|
||||
self.assertEqual(user_son['_id'], 'mongo')
|
||||
self.assertTrue('username' not in user_son['_id'])
|
||||
self.assertNotIn('username', user_son['_id'])
|
||||
|
||||
def test_document_not_registered(self):
|
||||
class Place(Document):
|
||||
@@ -550,21 +551,14 @@ class InstanceTest(unittest.TestCase):
|
||||
pass
|
||||
|
||||
f = Foo()
|
||||
try:
|
||||
with self.assertRaises(Foo.DoesNotExist):
|
||||
f.reload()
|
||||
except Foo.DoesNotExist:
|
||||
pass
|
||||
except Exception:
|
||||
self.assertFalse("Threw wrong exception")
|
||||
|
||||
f.save()
|
||||
f.delete()
|
||||
try:
|
||||
|
||||
with self.assertRaises(Foo.DoesNotExist):
|
||||
f.reload()
|
||||
except Foo.DoesNotExist:
|
||||
pass
|
||||
except Exception:
|
||||
self.assertFalse("Threw wrong exception")
|
||||
|
||||
def test_reload_of_non_strict_with_special_field_name(self):
|
||||
"""Ensures reloading works for documents with meta strict == False."""
|
||||
@@ -577,7 +571,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
Post.drop_collection()
|
||||
|
||||
Post._get_collection().insert({
|
||||
Post._get_collection().insert_one({
|
||||
"title": "Items eclipse",
|
||||
"items": ["more lorem", "even more ipsum"]
|
||||
})
|
||||
@@ -601,10 +595,10 @@ class InstanceTest(unittest.TestCase):
|
||||
# Length = length(assigned fields + id)
|
||||
self.assertEqual(len(person), 5)
|
||||
|
||||
self.assertTrue('age' in person)
|
||||
self.assertIn('age', person)
|
||||
person.age = None
|
||||
self.assertFalse('age' in person)
|
||||
self.assertFalse('nationality' in person)
|
||||
self.assertNotIn('age', person)
|
||||
self.assertNotIn('nationality', person)
|
||||
|
||||
def test_embedded_document_to_mongo(self):
|
||||
class Person(EmbeddedDocument):
|
||||
@@ -634,8 +628,8 @@ class InstanceTest(unittest.TestCase):
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
|
||||
self.assertTrue('content' in Comment._fields)
|
||||
self.assertFalse('id' in Comment._fields)
|
||||
self.assertIn('content', Comment._fields)
|
||||
self.assertNotIn('id', Comment._fields)
|
||||
|
||||
def test_embedded_document_instance(self):
|
||||
"""Ensure that embedded documents can reference parent instance."""
|
||||
@@ -734,12 +728,12 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
t = TestDocument(status="draft", pub_date=datetime.now())
|
||||
|
||||
try:
|
||||
with self.assertRaises(ValidationError) as cm:
|
||||
t.save()
|
||||
except ValidationError as e:
|
||||
expect_msg = "Draft entries may not have a publication date."
|
||||
self.assertTrue(expect_msg in e.message)
|
||||
self.assertEqual(e.to_dict(), {'__all__': expect_msg})
|
||||
|
||||
expected_msg = "Draft entries may not have a publication date."
|
||||
self.assertIn(expected_msg, cm.exception.message)
|
||||
self.assertEqual(cm.exception.to_dict(), {'__all__': expected_msg})
|
||||
|
||||
t = TestDocument(status="published")
|
||||
t.save(clean=False)
|
||||
@@ -773,12 +767,13 @@ class InstanceTest(unittest.TestCase):
|
||||
TestDocument.drop_collection()
|
||||
|
||||
t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25, z=15))
|
||||
try:
|
||||
|
||||
with self.assertRaises(ValidationError) as cm:
|
||||
t.save()
|
||||
except ValidationError as e:
|
||||
expect_msg = "Value of z != x + y"
|
||||
self.assertTrue(expect_msg in e.message)
|
||||
self.assertEqual(e.to_dict(), {'doc': {'__all__': expect_msg}})
|
||||
|
||||
expected_msg = "Value of z != x + y"
|
||||
self.assertIn(expected_msg, cm.exception.message)
|
||||
self.assertEqual(cm.exception.to_dict(), {'doc': {'__all__': expected_msg}})
|
||||
|
||||
t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save()
|
||||
self.assertEqual(t.doc.z, 35)
|
||||
@@ -810,7 +805,8 @@ class InstanceTest(unittest.TestCase):
|
||||
doc2 = self.Person(name="jim", age=20).save()
|
||||
docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())]
|
||||
|
||||
assert not doc1.modify({'name': doc2.name}, set__age=100)
|
||||
n_modified = doc1.modify({'name': doc2.name}, set__age=100)
|
||||
self.assertEqual(n_modified, 0)
|
||||
|
||||
self.assertDbEqual(docs)
|
||||
|
||||
@@ -819,7 +815,8 @@ class InstanceTest(unittest.TestCase):
|
||||
doc2 = self.Person(id=ObjectId(), name="jim", age=20)
|
||||
docs = [dict(doc1.to_mongo())]
|
||||
|
||||
assert not doc2.modify({'name': doc2.name}, set__age=100)
|
||||
n_modified = doc2.modify({'name': doc2.name}, set__age=100)
|
||||
self.assertEqual(n_modified, 0)
|
||||
|
||||
self.assertDbEqual(docs)
|
||||
|
||||
@@ -835,23 +832,30 @@ class InstanceTest(unittest.TestCase):
|
||||
doc.job.name = "Google"
|
||||
doc.job.years = 3
|
||||
|
||||
assert doc.modify(
|
||||
n_modified = doc.modify(
|
||||
set__age=21, set__job__name="MongoDB", unset__job__years=True)
|
||||
self.assertEqual(n_modified, 1)
|
||||
doc_copy.age = 21
|
||||
doc_copy.job.name = "MongoDB"
|
||||
del doc_copy.job.years
|
||||
|
||||
assert doc.to_json() == doc_copy.to_json()
|
||||
assert doc._get_changed_fields() == []
|
||||
self.assertEqual(doc.to_json(), doc_copy.to_json())
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
|
||||
self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())])
|
||||
|
||||
@needs_mongodb_v26
|
||||
@requires_mongodb_gte_26
|
||||
def test_modify_with_positional_push(self):
|
||||
class Content(EmbeddedDocument):
|
||||
keywords = ListField(StringField())
|
||||
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
content = EmbeddedDocumentField(Content)
|
||||
|
||||
post = BlogPost.objects.create(
|
||||
tags=['python'], content=Content(keywords=['ipsum']))
|
||||
|
||||
post = BlogPost.objects.create(tags=['python'])
|
||||
self.assertEqual(post.tags, ['python'])
|
||||
post.modify(push__tags__0=['code', 'mongo'])
|
||||
self.assertEqual(post.tags, ['code', 'mongo', 'python'])
|
||||
@@ -862,6 +866,16 @@ class InstanceTest(unittest.TestCase):
|
||||
['code', 'mongo', 'python']
|
||||
)
|
||||
|
||||
self.assertEqual(post.content.keywords, ['ipsum'])
|
||||
post.modify(push__content__keywords__0=['lorem'])
|
||||
self.assertEqual(post.content.keywords, ['lorem', 'ipsum'])
|
||||
|
||||
# Assert same order of the list items is maintained in the db
|
||||
self.assertEqual(
|
||||
BlogPost._get_collection().find_one({'_id': post.pk})['content']['keywords'],
|
||||
['lorem', 'ipsum']
|
||||
)
|
||||
|
||||
def test_save(self):
|
||||
"""Ensure that a document may be saved in the database."""
|
||||
|
||||
@@ -1428,6 +1442,60 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(person.age, 21)
|
||||
self.assertEqual(person.active, False)
|
||||
|
||||
def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop(self):
|
||||
# Refers to Issue #1685
|
||||
class EmbeddedChildModel(EmbeddedDocument):
|
||||
id = DictField(primary_key=True)
|
||||
|
||||
class ParentModel(Document):
|
||||
child = EmbeddedDocumentField(
|
||||
EmbeddedChildModel)
|
||||
|
||||
emb = EmbeddedChildModel(id={'1': [1]})
|
||||
ParentModel(children=emb)._get_changed_fields()
|
||||
|
||||
def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop(self):
|
||||
class User(Document):
|
||||
id = IntField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
class Message(Document):
|
||||
id = IntField(primary_key=True)
|
||||
author = ReferenceField(User)
|
||||
|
||||
Message.drop_collection()
|
||||
|
||||
# All objects share the same id, but each in a different collection
|
||||
user = User(id=1, name='user-name').save()
|
||||
message = Message(id=1, author=user).save()
|
||||
|
||||
message.author.name = 'tutu'
|
||||
self.assertEqual(message._get_changed_fields(), [])
|
||||
self.assertEqual(user._get_changed_fields(), ['name'])
|
||||
|
||||
def test__get_changed_fields_same_ids_embedded(self):
|
||||
# Refers to Issue #1768
|
||||
class User(EmbeddedDocument):
|
||||
id = IntField()
|
||||
name = StringField()
|
||||
|
||||
class Message(Document):
|
||||
id = IntField(primary_key=True)
|
||||
author = EmbeddedDocumentField(User)
|
||||
|
||||
Message.drop_collection()
|
||||
|
||||
# All objects share the same id, but each in a different collection
|
||||
user = User(id=1, name='user-name') # .save()
|
||||
message = Message(id=1, author=user).save()
|
||||
|
||||
message.author.name = 'tutu'
|
||||
self.assertEqual(message._get_changed_fields(), ['author.name'])
|
||||
message.save()
|
||||
|
||||
message_fetched = Message.objects.with_id(message.id)
|
||||
self.assertEqual(message_fetched.author.name, 'tutu')
|
||||
|
||||
def test_query_count_when_saving(self):
|
||||
"""Ensure references don't cause extra fetches when saving"""
|
||||
class Organization(Document):
|
||||
@@ -1461,9 +1529,9 @@ class InstanceTest(unittest.TestCase):
|
||||
user = User.objects.first()
|
||||
# Even if stored as ObjectId's internally mongoengine uses DBRefs
|
||||
# As ObjectId's aren't automatically derefenced
|
||||
self.assertTrue(isinstance(user._data['orgs'][0], DBRef))
|
||||
self.assertTrue(isinstance(user.orgs[0], Organization))
|
||||
self.assertTrue(isinstance(user._data['orgs'][0], Organization))
|
||||
self.assertIsInstance(user._data['orgs'][0], DBRef)
|
||||
self.assertIsInstance(user.orgs[0], Organization)
|
||||
self.assertIsInstance(user._data['orgs'][0], Organization)
|
||||
|
||||
# Changing a value
|
||||
with query_counter() as q:
|
||||
@@ -1843,9 +1911,8 @@ class InstanceTest(unittest.TestCase):
|
||||
post_obj = BlogPost.objects.first()
|
||||
|
||||
# Test laziness
|
||||
self.assertTrue(isinstance(post_obj._data['author'],
|
||||
bson.DBRef))
|
||||
self.assertTrue(isinstance(post_obj.author, self.Person))
|
||||
self.assertIsInstance(post_obj._data['author'], bson.DBRef)
|
||||
self.assertIsInstance(post_obj.author, self.Person)
|
||||
self.assertEqual(post_obj.author.name, 'Test User')
|
||||
|
||||
# Ensure that the dereferenced object may be changed and saved
|
||||
@@ -1937,7 +2004,6 @@ class InstanceTest(unittest.TestCase):
|
||||
child_record.delete()
|
||||
self.assertEqual(Record.objects(name='parent').get().children, [])
|
||||
|
||||
|
||||
def test_reverse_delete_rule_with_custom_id_field(self):
|
||||
"""Ensure that a referenced document with custom primary key
|
||||
is also deleted upon deletion.
|
||||
@@ -2251,12 +2317,12 @@ class InstanceTest(unittest.TestCase):
|
||||
# Make sure docs are properly identified in a list (__eq__ is used
|
||||
# for the comparison).
|
||||
all_user_list = list(User.objects.all())
|
||||
self.assertTrue(u1 in all_user_list)
|
||||
self.assertTrue(u2 in all_user_list)
|
||||
self.assertTrue(u3 in all_user_list)
|
||||
self.assertTrue(u4 not in all_user_list) # New object
|
||||
self.assertTrue(b1 not in all_user_list) # Other object
|
||||
self.assertTrue(b2 not in all_user_list) # Other object
|
||||
self.assertIn(u1, all_user_list)
|
||||
self.assertIn(u2, all_user_list)
|
||||
self.assertIn(u3, all_user_list)
|
||||
self.assertNotIn(u4, all_user_list) # New object
|
||||
self.assertNotIn(b1, all_user_list) # Other object
|
||||
self.assertNotIn(b2, all_user_list) # Other object
|
||||
|
||||
# Make sure docs can be used as keys in a dict (__hash__ is used
|
||||
# for hashing the docs).
|
||||
@@ -2274,10 +2340,10 @@ class InstanceTest(unittest.TestCase):
|
||||
# Make sure docs are properly identified in a set (__hash__ is used
|
||||
# for hashing the docs).
|
||||
all_user_set = set(User.objects.all())
|
||||
self.assertTrue(u1 in all_user_set)
|
||||
self.assertTrue(u4 not in all_user_set)
|
||||
self.assertTrue(b1 not in all_user_list)
|
||||
self.assertTrue(b2 not in all_user_list)
|
||||
self.assertIn(u1, all_user_set)
|
||||
self.assertNotIn(u4, all_user_set)
|
||||
self.assertNotIn(b1, all_user_list)
|
||||
self.assertNotIn(b2, all_user_list)
|
||||
|
||||
# Make sure duplicate docs aren't accepted in the set
|
||||
self.assertEqual(len(all_user_set), 3)
|
||||
@@ -2694,7 +2760,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
User._get_collection().insert_one({
|
||||
'name': 'John',
|
||||
'foo': 'Bar',
|
||||
'data': [1, 2, 3]
|
||||
@@ -2710,7 +2776,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
User._get_collection().insert_one({
|
||||
'name': 'John',
|
||||
'foo': 'Bar',
|
||||
'data': [1, 2, 3]
|
||||
@@ -2733,7 +2799,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
User._get_collection().insert_one({
|
||||
'name': 'John',
|
||||
'thing': {
|
||||
'name': 'My thing',
|
||||
@@ -2756,7 +2822,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
User._get_collection().insert_one({
|
||||
'name': 'John',
|
||||
'thing': {
|
||||
'name': 'My thing',
|
||||
@@ -2779,7 +2845,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
User._get_collection().insert_one({
|
||||
'name': 'John',
|
||||
'thing': {
|
||||
'name': 'My thing',
|
||||
@@ -2978,7 +3044,7 @@ class InstanceTest(unittest.TestCase):
|
||||
Person(name="Harry Potter").save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertTrue('id' in person._data.keys())
|
||||
self.assertIn('id', person._data.keys())
|
||||
self.assertEqual(person._data.get('id'), person.id)
|
||||
|
||||
def test_complex_nesting_document_and_embedded_document(self):
|
||||
@@ -2996,7 +3062,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
def expand(self):
|
||||
self.flattened_parameter = {}
|
||||
for parameter_name, parameter in self.parameters.iteritems():
|
||||
for parameter_name, parameter in iteritems(self.parameters):
|
||||
parameter.expand()
|
||||
|
||||
class NodesSystem(Document):
|
||||
@@ -3004,7 +3070,7 @@ class InstanceTest(unittest.TestCase):
|
||||
nodes = MapField(ReferenceField(Node, dbref=False))
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
for node_name, node in self.nodes.iteritems():
|
||||
for node_name, node in iteritems(self.nodes):
|
||||
node.expand()
|
||||
node.save(*args, **kwargs)
|
||||
super(NodesSystem, self).save(*args, **kwargs)
|
||||
@@ -3023,6 +3089,24 @@ class InstanceTest(unittest.TestCase):
|
||||
"UNDEFINED",
|
||||
system.nodes["node"].parameters["param"].macros["test"].value)
|
||||
|
||||
def test_embedded_document_save_reload_warning(self):
|
||||
"""Relates to #1570"""
|
||||
class Embedded(EmbeddedDocument):
|
||||
pass
|
||||
|
||||
class Doc(Document):
|
||||
emb = EmbeddedDocumentField(Embedded)
|
||||
|
||||
doc = Doc(emb=Embedded()).save()
|
||||
doc.emb.save() # Make sure its still working
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error", DeprecationWarning)
|
||||
with self.assertRaises(DeprecationWarning):
|
||||
doc.emb.save()
|
||||
|
||||
with self.assertRaises(DeprecationWarning):
|
||||
doc.emb.reload()
|
||||
|
||||
def test_embedded_document_equality(self):
|
||||
class Test(Document):
|
||||
field = StringField(required=True)
|
||||
@@ -3070,36 +3154,36 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
dbref2 = f._data['test2']
|
||||
obj2 = f.test2
|
||||
self.assertTrue(isinstance(dbref2, DBRef))
|
||||
self.assertTrue(isinstance(obj2, Test2))
|
||||
self.assertTrue(obj2.id == dbref2.id)
|
||||
self.assertTrue(obj2 == dbref2)
|
||||
self.assertTrue(dbref2 == obj2)
|
||||
self.assertIsInstance(dbref2, DBRef)
|
||||
self.assertIsInstance(obj2, Test2)
|
||||
self.assertEqual(obj2.id, dbref2.id)
|
||||
self.assertEqual(obj2, dbref2)
|
||||
self.assertEqual(dbref2, obj2)
|
||||
|
||||
dbref3 = f._data['test3']
|
||||
obj3 = f.test3
|
||||
self.assertTrue(isinstance(dbref3, DBRef))
|
||||
self.assertTrue(isinstance(obj3, Test3))
|
||||
self.assertTrue(obj3.id == dbref3.id)
|
||||
self.assertTrue(obj3 == dbref3)
|
||||
self.assertTrue(dbref3 == obj3)
|
||||
self.assertIsInstance(dbref3, DBRef)
|
||||
self.assertIsInstance(obj3, Test3)
|
||||
self.assertEqual(obj3.id, dbref3.id)
|
||||
self.assertEqual(obj3, dbref3)
|
||||
self.assertEqual(dbref3, obj3)
|
||||
|
||||
self.assertTrue(obj2.id == obj3.id)
|
||||
self.assertTrue(dbref2.id == dbref3.id)
|
||||
self.assertFalse(dbref2 == dbref3)
|
||||
self.assertFalse(dbref3 == dbref2)
|
||||
self.assertTrue(dbref2 != dbref3)
|
||||
self.assertTrue(dbref3 != dbref2)
|
||||
self.assertEqual(obj2.id, obj3.id)
|
||||
self.assertEqual(dbref2.id, dbref3.id)
|
||||
self.assertNotEqual(dbref2, dbref3)
|
||||
self.assertNotEqual(dbref3, dbref2)
|
||||
self.assertNotEqual(dbref2, dbref3)
|
||||
self.assertNotEqual(dbref3, dbref2)
|
||||
|
||||
self.assertFalse(obj2 == dbref3)
|
||||
self.assertFalse(dbref3 == obj2)
|
||||
self.assertTrue(obj2 != dbref3)
|
||||
self.assertTrue(dbref3 != obj2)
|
||||
self.assertNotEqual(obj2, dbref3)
|
||||
self.assertNotEqual(dbref3, obj2)
|
||||
self.assertNotEqual(obj2, dbref3)
|
||||
self.assertNotEqual(dbref3, obj2)
|
||||
|
||||
self.assertFalse(obj3 == dbref2)
|
||||
self.assertFalse(dbref2 == obj3)
|
||||
self.assertTrue(obj3 != dbref2)
|
||||
self.assertTrue(dbref2 != obj3)
|
||||
self.assertNotEqual(obj3, dbref2)
|
||||
self.assertNotEqual(dbref2, obj3)
|
||||
self.assertNotEqual(obj3, dbref2)
|
||||
self.assertNotEqual(dbref2, obj3)
|
||||
|
||||
def test_default_values(self):
|
||||
class Person(Document):
|
||||
@@ -3132,8 +3216,7 @@ class InstanceTest(unittest.TestCase):
|
||||
coll = Person._get_collection()
|
||||
for person in Person.objects.as_pymongo():
|
||||
if 'height' not in person:
|
||||
person['height'] = 189
|
||||
coll.save(person)
|
||||
coll.update_one({'_id': person['_id']}, {'$set': {'height': 189}})
|
||||
|
||||
self.assertEquals(Person.objects(height=189).count(), 1)
|
||||
|
||||
@@ -3148,6 +3231,64 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEquals(p.id, None)
|
||||
p.id = "12345" # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here
|
||||
|
||||
def test_from_son_created_False_without_id(self):
|
||||
class MyPerson(Document):
|
||||
name = StringField()
|
||||
|
||||
MyPerson.objects.delete()
|
||||
|
||||
p = MyPerson.from_json('{"name": "a_fancy_name"}', created=False)
|
||||
self.assertFalse(p._created)
|
||||
self.assertIsNone(p.id)
|
||||
p.save()
|
||||
self.assertIsNotNone(p.id)
|
||||
saved_p = MyPerson.objects.get(id=p.id)
|
||||
self.assertEqual(saved_p.name, 'a_fancy_name')
|
||||
|
||||
def test_from_son_created_False_with_id(self):
|
||||
# 1854
|
||||
class MyPerson(Document):
|
||||
name = StringField()
|
||||
|
||||
MyPerson.objects.delete()
|
||||
|
||||
p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=False)
|
||||
self.assertFalse(p._created)
|
||||
self.assertEqual(p._changed_fields, [])
|
||||
self.assertEqual(p.name, 'a_fancy_name')
|
||||
self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e'))
|
||||
p.save()
|
||||
|
||||
with self.assertRaises(DoesNotExist):
|
||||
# Since created=False and we gave an id in the json and _changed_fields is empty
|
||||
# mongoengine assumes that the document exits with that structure already
|
||||
# and calling .save() didn't save anything
|
||||
MyPerson.objects.get(id=p.id)
|
||||
|
||||
self.assertFalse(p._created)
|
||||
p.name = 'a new fancy name'
|
||||
self.assertEqual(p._changed_fields, ['name'])
|
||||
p.save()
|
||||
saved_p = MyPerson.objects.get(id=p.id)
|
||||
self.assertEqual(saved_p.name, p.name)
|
||||
|
||||
def test_from_son_created_True_with_an_id(self):
|
||||
class MyPerson(Document):
|
||||
name = StringField()
|
||||
|
||||
MyPerson.objects.delete()
|
||||
|
||||
p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=True)
|
||||
self.assertTrue(p._created)
|
||||
self.assertEqual(p._changed_fields, [])
|
||||
self.assertEqual(p.name, 'a_fancy_name')
|
||||
self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e'))
|
||||
p.save()
|
||||
|
||||
saved_p = MyPerson.objects.get(id=p.id)
|
||||
self.assertEqual(saved_p, p)
|
||||
self.assertEqual(p.name, 'a_fancy_name')
|
||||
|
||||
def test_null_field(self):
|
||||
# 734
|
||||
class User(Document):
|
||||
@@ -3221,7 +3362,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
person.update(set__height=2.0)
|
||||
|
||||
@needs_mongodb_v26
|
||||
@requires_mongodb_gte_26
|
||||
def test_push_with_position(self):
|
||||
"""Ensure that push with position works properly for an instance."""
|
||||
class BlogPost(Document):
|
||||
@@ -3248,6 +3389,22 @@ class InstanceTest(unittest.TestCase):
|
||||
blog.reload()
|
||||
self.assertEqual(blog.tags, [["value1", 123]])
|
||||
|
||||
def test_accessing_objects_with_indexes_error(self):
|
||||
insert_result = self.db.company.insert_many([{'name': 'Foo'},
|
||||
{'name': 'Foo'}]) # Force 2 doc with same name
|
||||
REF_OID = insert_result.inserted_ids[0]
|
||||
self.db.user.insert_one({'company': REF_OID}) # Force 2 doc with same name
|
||||
|
||||
class Company(Document):
|
||||
name = StringField(unique=True)
|
||||
|
||||
class User(Document):
|
||||
company = ReferenceField(Company)
|
||||
|
||||
# Ensure index creation exception aren't swallowed (#1688)
|
||||
with self.assertRaises(DuplicateKeyError):
|
||||
User.objects().select_related()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -32,12 +32,12 @@ class TestJson(unittest.TestCase):
|
||||
string = StringField(db_field='s')
|
||||
embedded = EmbeddedDocumentField(Embedded, db_field='e')
|
||||
|
||||
doc = Doc( string="Hello", embedded=Embedded(string="Inner Hello"))
|
||||
doc_json = doc.to_json(sort_keys=True, use_db_field=False,separators=(',', ':'))
|
||||
doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello"))
|
||||
doc_json = doc.to_json(sort_keys=True, use_db_field=False, separators=(',', ':'))
|
||||
|
||||
expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}"""
|
||||
|
||||
self.assertEqual( doc_json, expected_json)
|
||||
self.assertEqual(doc_json, expected_json)
|
||||
|
||||
def test_json_simple(self):
|
||||
|
||||
|
@@ -20,16 +20,16 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
# 1st level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st'), }
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertIn('1st', error.to_dict())
|
||||
self.assertEqual(error.to_dict()['1st'], 'bad 1st')
|
||||
|
||||
# 2nd level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd'),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertIn('1st', error.to_dict())
|
||||
self.assertIsInstance(error.to_dict()['1st'], dict)
|
||||
self.assertIn('2nd', error.to_dict()['1st'])
|
||||
self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
||||
|
||||
# moar levels
|
||||
@@ -40,10 +40,10 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
}),
|
||||
}),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
|
||||
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
|
||||
self.assertIn('1st', error.to_dict())
|
||||
self.assertIn('2nd', error.to_dict()['1st'])
|
||||
self.assertIn('3rd', error.to_dict()['1st']['2nd'])
|
||||
self.assertIn('4th', error.to_dict()['1st']['2nd']['3rd'])
|
||||
self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||
'Inception')
|
||||
|
||||
@@ -58,7 +58,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
try:
|
||||
User().validate()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("User:None" in e.message)
|
||||
self.assertIn("User:None", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'username': 'Field is required',
|
||||
'name': 'Field is required'})
|
||||
@@ -68,7 +68,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
try:
|
||||
user.save()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("User:RossC0" in e.message)
|
||||
self.assertIn("User:RossC0", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'name': 'Field is required'})
|
||||
|
||||
@@ -116,7 +116,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
try:
|
||||
Doc(id="bad").validate()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("SubDoc:None" in e.message)
|
||||
self.assertIn("SubDoc:None", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
|
||||
@@ -127,14 +127,14 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
doc = Doc.objects.first()
|
||||
keys = doc._data.keys()
|
||||
self.assertEqual(2, len(keys))
|
||||
self.assertTrue('e' in keys)
|
||||
self.assertTrue('id' in keys)
|
||||
self.assertIn('e', keys)
|
||||
self.assertIn('id', keys)
|
||||
|
||||
doc.e.val = "OK"
|
||||
try:
|
||||
doc.save()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("Doc:test" in e.message)
|
||||
self.assertIn("Doc:test", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
|
||||
|
@@ -1,3 +1,3 @@
|
||||
from fields import *
|
||||
from file_tests import *
|
||||
from geo import *
|
||||
from .fields import *
|
||||
from .file_tests import *
|
||||
from .geo import *
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -24,6 +24,16 @@ TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
|
||||
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png')
|
||||
|
||||
|
||||
def get_file(path):
|
||||
"""Use a BytesIO instead of a file to allow
|
||||
to have a one-liner and avoid that the file remains opened"""
|
||||
bytes_io = StringIO()
|
||||
with open(path, 'rb') as f:
|
||||
bytes_io.write(f.read())
|
||||
bytes_io.seek(0)
|
||||
return bytes_io
|
||||
|
||||
|
||||
class FileTest(MongoDBTestCase):
|
||||
|
||||
def tearDown(self):
|
||||
@@ -53,8 +63,8 @@ class FileTest(MongoDBTestCase):
|
||||
putfile.save()
|
||||
|
||||
result = PutFile.objects.first()
|
||||
self.assertTrue(putfile == result)
|
||||
self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello>")
|
||||
self.assertEqual(putfile, result)
|
||||
self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello (%s)>" % result.the_file.grid_id)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.delete() # Remove file from GridFS
|
||||
@@ -71,7 +81,7 @@ class FileTest(MongoDBTestCase):
|
||||
putfile.save()
|
||||
|
||||
result = PutFile.objects.first()
|
||||
self.assertTrue(putfile == result)
|
||||
self.assertEqual(putfile, result)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.delete()
|
||||
@@ -96,7 +106,7 @@ class FileTest(MongoDBTestCase):
|
||||
streamfile.save()
|
||||
|
||||
result = StreamFile.objects.first()
|
||||
self.assertTrue(streamfile == result)
|
||||
self.assertEqual(streamfile, result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.seek(0)
|
||||
@@ -132,7 +142,7 @@ class FileTest(MongoDBTestCase):
|
||||
streamfile.save()
|
||||
|
||||
result = StreamFile.objects.first()
|
||||
self.assertTrue(streamfile == result)
|
||||
self.assertEqual(streamfile, result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
# self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.seek(0)
|
||||
@@ -161,7 +171,7 @@ class FileTest(MongoDBTestCase):
|
||||
setfile.save()
|
||||
|
||||
result = SetFile.objects.first()
|
||||
self.assertTrue(setfile == result)
|
||||
self.assertEqual(setfile, result)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
|
||||
# Try replacing file with new one
|
||||
@@ -169,7 +179,7 @@ class FileTest(MongoDBTestCase):
|
||||
result.save()
|
||||
|
||||
result = SetFile.objects.first()
|
||||
self.assertTrue(setfile == result)
|
||||
self.assertEqual(setfile, result)
|
||||
self.assertEqual(result.the_file.read(), more_text)
|
||||
result.the_file.delete()
|
||||
|
||||
@@ -231,8 +241,8 @@ class FileTest(MongoDBTestCase):
|
||||
test_file_dupe = TestFile()
|
||||
data = test_file_dupe.the_file.read() # Should be None
|
||||
|
||||
self.assertTrue(test_file.name != test_file_dupe.name)
|
||||
self.assertTrue(test_file.the_file.read() != data)
|
||||
self.assertNotEqual(test_file.name, test_file_dupe.name)
|
||||
self.assertNotEqual(test_file.the_file.read(), data)
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
@@ -247,8 +257,8 @@ class FileTest(MongoDBTestCase):
|
||||
Animal.drop_collection()
|
||||
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||
|
||||
marmot_photo = open(TEST_IMAGE_PATH, 'rb') # Retrieve a photo from disk
|
||||
marmot.photo.put(marmot_photo, content_type='image/jpeg', foo='bar')
|
||||
marmot_photo_content = get_file(TEST_IMAGE_PATH) # Retrieve a photo from disk
|
||||
marmot.photo.put(marmot_photo_content, content_type='image/jpeg', foo='bar')
|
||||
marmot.photo.close()
|
||||
marmot.save()
|
||||
|
||||
@@ -261,11 +271,11 @@ class FileTest(MongoDBTestCase):
|
||||
the_file = FileField()
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save()
|
||||
test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save()
|
||||
self.assertEqual(test_file.the_file.get().length, 8313)
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = open(TEST_IMAGE2_PATH, 'rb')
|
||||
test_file.the_file = get_file(TEST_IMAGE2_PATH)
|
||||
test_file.save()
|
||||
self.assertEqual(test_file.the_file.get().length, 4971)
|
||||
|
||||
@@ -291,7 +301,7 @@ class FileTest(MongoDBTestCase):
|
||||
the_file = FileField()
|
||||
|
||||
test_file = TestFile()
|
||||
self.assertFalse(test_file.the_file in [{"test": 1}])
|
||||
self.assertNotIn(test_file.the_file, [{"test": 1}])
|
||||
|
||||
def test_file_disk_space(self):
|
||||
""" Test disk space usage when we delete/replace a file """
|
||||
@@ -379,7 +389,7 @@ class FileTest(MongoDBTestCase):
|
||||
self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f)
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||
t.image.put(get_file(TEST_IMAGE_PATH))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
@@ -400,11 +410,11 @@ class FileTest(MongoDBTestCase):
|
||||
the_file = ImageField()
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save()
|
||||
test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save()
|
||||
self.assertEqual(test_file.the_file.size, (371, 76))
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = open(TEST_IMAGE2_PATH, 'rb')
|
||||
test_file.the_file = get_file(TEST_IMAGE2_PATH)
|
||||
test_file.save()
|
||||
self.assertEqual(test_file.the_file.size, (45, 101))
|
||||
|
||||
@@ -418,7 +428,7 @@ class FileTest(MongoDBTestCase):
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||
t.image.put(get_file(TEST_IMAGE_PATH))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
@@ -441,7 +451,7 @@ class FileTest(MongoDBTestCase):
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||
t.image.put(get_file(TEST_IMAGE_PATH))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
@@ -464,7 +474,7 @@ class FileTest(MongoDBTestCase):
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||
t.image.put(get_file(TEST_IMAGE_PATH))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
@@ -542,8 +552,8 @@ class FileTest(MongoDBTestCase):
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image1.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||
t.image2.put(open(TEST_IMAGE2_PATH, 'rb'))
|
||||
t.image1.put(get_file(TEST_IMAGE_PATH))
|
||||
t.image2.put(get_file(TEST_IMAGE2_PATH))
|
||||
t.save()
|
||||
|
||||
test = TestImage.objects.first()
|
||||
@@ -563,12 +573,10 @@ class FileTest(MongoDBTestCase):
|
||||
Animal.drop_collection()
|
||||
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||
|
||||
marmot_photo = open(TEST_IMAGE_PATH, 'rb') # Retrieve a photo from disk
|
||||
|
||||
photos_field = marmot._fields['photos'].field
|
||||
new_proxy = photos_field.get_proxy_obj('photos', marmot)
|
||||
new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar')
|
||||
marmot_photo.close()
|
||||
with open(TEST_IMAGE_PATH, 'rb') as marmot_photo: # Retrieve a photo from disk
|
||||
photos_field = marmot._fields['photos'].field
|
||||
new_proxy = photos_field.get_proxy_obj('photos', marmot)
|
||||
new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar')
|
||||
|
||||
marmot.photos.append(new_proxy)
|
||||
marmot.save()
|
||||
@@ -578,5 +586,6 @@ class FileTest(MongoDBTestCase):
|
||||
self.assertEqual(marmot.photos[0].foo, 'bar')
|
||||
self.assertEqual(marmot.photos[0].get().length, 8313)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -298,9 +298,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
polygon = PolygonField()
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies)
|
||||
self.assertIn({'fields': [('line', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('polygon', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('point', '2dsphere')]}, geo_indicies)
|
||||
|
||||
def test_indexes_2dsphere_embedded(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
@@ -316,9 +316,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies)
|
||||
self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies)
|
||||
self.assertIn({'fields': [('venue.line', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('venue.polygon', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('venue.point', '2dsphere')]}, geo_indicies)
|
||||
|
||||
def test_geo_indexes_recursion(self):
|
||||
|
||||
@@ -335,9 +335,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
|
||||
Parent(name='Berlin').save()
|
||||
info = Parent._get_collection().index_information()
|
||||
self.assertFalse('location_2d' in info)
|
||||
self.assertNotIn('location_2d', info)
|
||||
info = Location._get_collection().index_information()
|
||||
self.assertTrue('location_2d' in info)
|
||||
self.assertIn('location_2d', info)
|
||||
|
||||
self.assertEqual(len(Parent._geo_indices()), 0)
|
||||
self.assertEqual(len(Location._geo_indices()), 1)
|
||||
|
143
tests/fields/test_binary_field.py
Normal file
143
tests/fields/test_binary_field.py
Normal file
@@ -0,0 +1,143 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import uuid
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
import six
|
||||
|
||||
from bson import Binary
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
BIN_VALUE = six.b('\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5')
|
||||
|
||||
|
||||
class TestBinaryField(MongoDBTestCase):
|
||||
def test_binary_fields(self):
|
||||
"""Ensure that binary fields can be stored and retrieved.
|
||||
"""
|
||||
class Attachment(Document):
|
||||
content_type = StringField()
|
||||
blob = BinaryField()
|
||||
|
||||
BLOB = six.b('\xe6\x00\xc4\xff\x07')
|
||||
MIME_TYPE = 'application/octet-stream'
|
||||
|
||||
Attachment.drop_collection()
|
||||
|
||||
attachment = Attachment(content_type=MIME_TYPE, blob=BLOB)
|
||||
attachment.save()
|
||||
|
||||
attachment_1 = Attachment.objects().first()
|
||||
self.assertEqual(MIME_TYPE, attachment_1.content_type)
|
||||
self.assertEqual(BLOB, six.binary_type(attachment_1.blob))
|
||||
|
||||
def test_validation_succeeds(self):
|
||||
"""Ensure that valid values can be assigned to binary fields.
|
||||
"""
|
||||
class AttachmentRequired(Document):
|
||||
blob = BinaryField(required=True)
|
||||
|
||||
class AttachmentSizeLimit(Document):
|
||||
blob = BinaryField(max_bytes=4)
|
||||
|
||||
attachment_required = AttachmentRequired()
|
||||
self.assertRaises(ValidationError, attachment_required.validate)
|
||||
attachment_required.blob = Binary(six.b('\xe6\x00\xc4\xff\x07'))
|
||||
attachment_required.validate()
|
||||
|
||||
_5_BYTES = six.b('\xe6\x00\xc4\xff\x07')
|
||||
_4_BYTES = six.b('\xe6\x00\xc4\xff')
|
||||
self.assertRaises(ValidationError, AttachmentSizeLimit(blob=_5_BYTES).validate)
|
||||
AttachmentSizeLimit(blob=_4_BYTES).validate()
|
||||
|
||||
def test_validation_fails(self):
|
||||
"""Ensure that invalid values cannot be assigned to binary fields."""
|
||||
|
||||
class Attachment(Document):
|
||||
blob = BinaryField()
|
||||
|
||||
for invalid_data in (2, u'Im_a_unicode', ['some_str']):
|
||||
self.assertRaises(ValidationError, Attachment(blob=invalid_data).validate)
|
||||
|
||||
def test__primary(self):
|
||||
class Attachment(Document):
|
||||
id = BinaryField(primary_key=True)
|
||||
|
||||
Attachment.drop_collection()
|
||||
binary_id = uuid.uuid4().bytes
|
||||
att = Attachment(id=binary_id).save()
|
||||
self.assertEqual(1, Attachment.objects.count())
|
||||
self.assertEqual(1, Attachment.objects.filter(id=att.id).count())
|
||||
att.delete()
|
||||
self.assertEqual(0, Attachment.objects.count())
|
||||
|
||||
def test_primary_filter_by_binary_pk_as_str(self):
|
||||
raise SkipTest("Querying by id as string is not currently supported")
|
||||
|
||||
class Attachment(Document):
|
||||
id = BinaryField(primary_key=True)
|
||||
|
||||
Attachment.drop_collection()
|
||||
binary_id = uuid.uuid4().bytes
|
||||
att = Attachment(id=binary_id).save()
|
||||
self.assertEqual(1, Attachment.objects.filter(id=binary_id).count())
|
||||
att.delete()
|
||||
self.assertEqual(0, Attachment.objects.count())
|
||||
|
||||
def test_match_querying_with_bytes(self):
|
||||
class MyDocument(Document):
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
||||
matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first()
|
||||
self.assertEqual(matched_doc.id, doc.id)
|
||||
|
||||
def test_match_querying_with_binary(self):
|
||||
class MyDocument(Document):
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
||||
|
||||
matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first()
|
||||
self.assertEqual(matched_doc.id, doc.id)
|
||||
|
||||
def test_modify_operation__set(self):
|
||||
"""Ensures no regression of bug #1127"""
|
||||
class MyDocument(Document):
|
||||
some_field = StringField()
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
doc = MyDocument.objects(some_field='test').modify(
|
||||
upsert=True, new=True,
|
||||
set__bin_field=BIN_VALUE
|
||||
)
|
||||
self.assertEqual(doc.some_field, 'test')
|
||||
if six.PY3:
|
||||
self.assertEqual(doc.bin_field, BIN_VALUE)
|
||||
else:
|
||||
self.assertEqual(doc.bin_field, Binary(BIN_VALUE))
|
||||
|
||||
def test_update_one(self):
|
||||
"""Ensures no regression of bug #1127"""
|
||||
class MyDocument(Document):
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
bin_data = six.b('\xe6\x00\xc4\xff\x07')
|
||||
doc = MyDocument(bin_field=bin_data).save()
|
||||
|
||||
n_updated = MyDocument.objects(bin_field=bin_data).update_one(bin_field=BIN_VALUE)
|
||||
self.assertEqual(n_updated, 1)
|
||||
fetched = MyDocument.objects.with_id(doc.id)
|
||||
if six.PY3:
|
||||
self.assertEqual(fetched.bin_field, BIN_VALUE)
|
||||
else:
|
||||
self.assertEqual(fetched.bin_field, Binary(BIN_VALUE))
|
49
tests/fields/test_boolean_field.py
Normal file
49
tests/fields/test_boolean_field.py
Normal file
@@ -0,0 +1,49 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class TestBooleanField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
person = Person(admin=True)
|
||||
person.save()
|
||||
self.assertEqual(
|
||||
get_as_pymongo(person),
|
||||
{'_id': person.id,
|
||||
'admin': True})
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to boolean
|
||||
fields.
|
||||
"""
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
person = Person()
|
||||
person.admin = True
|
||||
person.validate()
|
||||
|
||||
person.admin = 2
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.admin = 'Yes'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.admin = 'False'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_weirdness_constructor(self):
|
||||
"""When attribute is set in contructor, it gets cast into a bool
|
||||
which causes some weird behavior. We dont necessarily want to maintain this behavior
|
||||
but its a known issue
|
||||
"""
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
new_person = Person(admin='False')
|
||||
self.assertTrue(new_person.admin)
|
||||
|
||||
new_person = Person(admin='0')
|
||||
self.assertTrue(new_person.admin)
|
443
tests/fields/test_cached_reference_field.py
Normal file
443
tests/fields/test_cached_reference_field.py
Normal file
@@ -0,0 +1,443 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
def test_get_and_save(self):
|
||||
"""
|
||||
Tests #1047: CachedReferenceField creates DBRefs on to_python,
|
||||
but can't save them on to_mongo.
|
||||
"""
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
Ocorrence(person="testte",
|
||||
animal=Animal(name="Leopard", tag="heavy").save()).save()
|
||||
p = Ocorrence.objects.get()
|
||||
p.person = 'new_testte'
|
||||
p.save()
|
||||
|
||||
def test_general_things(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(
|
||||
Animal, fields=['tag'])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(name="Leopard", tag="heavy")
|
||||
a.save()
|
||||
|
||||
self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal])
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
|
||||
p = Ocorrence(person="Wilson")
|
||||
p.save()
|
||||
|
||||
self.assertEqual(Ocorrence.objects(animal=None).count(), 1)
|
||||
|
||||
self.assertEqual(
|
||||
a.to_mongo(fields=['tag']), {'tag': 'heavy', "_id": a.pk})
|
||||
|
||||
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(animal__tag='heavy').count()
|
||||
self.assertEqual(count, 1)
|
||||
|
||||
ocorrence = Ocorrence.objects(animal__tag='heavy').first()
|
||||
self.assertEqual(ocorrence.person, "teste")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
|
||||
def test_with_decimal(self):
|
||||
class PersonAuto(Document):
|
||||
name = StringField()
|
||||
salary = DecimalField()
|
||||
|
||||
class SocialTest(Document):
|
||||
group = StringField()
|
||||
person = CachedReferenceField(
|
||||
PersonAuto,
|
||||
fields=('salary',))
|
||||
|
||||
PersonAuto.drop_collection()
|
||||
SocialTest.drop_collection()
|
||||
|
||||
p = PersonAuto(name="Alberto", salary=Decimal('7000.00'))
|
||||
p.save()
|
||||
|
||||
s = SocialTest(group="dev", person=p)
|
||||
s.save()
|
||||
|
||||
self.assertEqual(
|
||||
SocialTest.objects._collection.find_one({'person.salary': 7000.00}), {
|
||||
'_id': s.pk,
|
||||
'group': s.group,
|
||||
'person': {
|
||||
'_id': p.pk,
|
||||
'salary': 7000.00
|
||||
}
|
||||
})
|
||||
|
||||
def test_cached_reference_field_reference(self):
|
||||
class Group(Document):
|
||||
name = StringField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
group = ReferenceField(Group)
|
||||
|
||||
class SocialData(Document):
|
||||
obs = StringField()
|
||||
tags = ListField(
|
||||
StringField())
|
||||
person = CachedReferenceField(
|
||||
Person,
|
||||
fields=('group',))
|
||||
|
||||
Group.drop_collection()
|
||||
Person.drop_collection()
|
||||
SocialData.drop_collection()
|
||||
|
||||
g1 = Group(name='dev')
|
||||
g1.save()
|
||||
|
||||
g2 = Group(name="designers")
|
||||
g2.save()
|
||||
|
||||
p1 = Person(name="Alberto", group=g1)
|
||||
p1.save()
|
||||
|
||||
p2 = Person(name="Andre", group=g1)
|
||||
p2.save()
|
||||
|
||||
p3 = Person(name="Afro design", group=g2)
|
||||
p3.save()
|
||||
|
||||
s1 = SocialData(obs="testing 123", person=p1, tags=['tag1', 'tag2'])
|
||||
s1.save()
|
||||
|
||||
s2 = SocialData(obs="testing 321", person=p3, tags=['tag3', 'tag4'])
|
||||
s2.save()
|
||||
|
||||
self.assertEqual(SocialData.objects._collection.find_one(
|
||||
{'tags': 'tag2'}), {
|
||||
'_id': s1.pk,
|
||||
'obs': 'testing 123',
|
||||
'tags': ['tag1', 'tag2'],
|
||||
'person': {
|
||||
'_id': p1.pk,
|
||||
'group': g1.pk
|
||||
}
|
||||
})
|
||||
|
||||
self.assertEqual(SocialData.objects(person__group=g2).count(), 1)
|
||||
self.assertEqual(SocialData.objects(person__group=g2).first(), s2)
|
||||
|
||||
def test_cached_reference_field_push_with_fields(self):
|
||||
class Product(Document):
|
||||
name = StringField()
|
||||
|
||||
Product.drop_collection()
|
||||
|
||||
class Basket(Document):
|
||||
products = ListField(CachedReferenceField(Product, fields=['name']))
|
||||
|
||||
Basket.drop_collection()
|
||||
product1 = Product(name='abc').save()
|
||||
product2 = Product(name='def').save()
|
||||
basket = Basket(products=[product1]).save()
|
||||
self.assertEqual(
|
||||
Basket.objects._collection.find_one(),
|
||||
{
|
||||
'_id': basket.pk,
|
||||
'products': [
|
||||
{
|
||||
'_id': product1.pk,
|
||||
'name': product1.name
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
# push to list
|
||||
basket.update(push__products=product2)
|
||||
basket.reload()
|
||||
self.assertEqual(
|
||||
Basket.objects._collection.find_one(),
|
||||
{
|
||||
'_id': basket.pk,
|
||||
'products': [
|
||||
{
|
||||
'_id': product1.pk,
|
||||
'name': product1.name
|
||||
},
|
||||
{
|
||||
'_id': product2.pk,
|
||||
'name': product2.name
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
def test_cached_reference_field_update_all(self):
|
||||
class Person(Document):
|
||||
TYPES = (
|
||||
('pf', "PF"),
|
||||
('pj', "PJ")
|
||||
)
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
choices=TYPES
|
||||
)
|
||||
|
||||
father = CachedReferenceField('self', fields=('tp',))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
a1 = Person(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Person(name='Wilson Junior', tp='pf', father=a1)
|
||||
a2.save()
|
||||
|
||||
self.assertEqual(dict(a2.to_mongo()), {
|
||||
"_id": a2.pk,
|
||||
"name": u"Wilson Junior",
|
||||
"tp": u"pf",
|
||||
"father": {
|
||||
"_id": a1.pk,
|
||||
"tp": u"pj"
|
||||
}
|
||||
})
|
||||
|
||||
self.assertEqual(Person.objects(father=a1)._query, {
|
||||
'father._id': a1.pk
|
||||
})
|
||||
self.assertEqual(Person.objects(father=a1).count(), 1)
|
||||
|
||||
Person.objects.update(set__tp="pf")
|
||||
Person.father.sync_all()
|
||||
|
||||
a2.reload()
|
||||
self.assertEqual(dict(a2.to_mongo()), {
|
||||
"_id": a2.pk,
|
||||
"name": u"Wilson Junior",
|
||||
"tp": u"pf",
|
||||
"father": {
|
||||
"_id": a1.pk,
|
||||
"tp": u"pf"
|
||||
}
|
||||
})
|
||||
|
||||
def test_cached_reference_fields_on_embedded_documents(self):
|
||||
with self.assertRaises(InvalidDocumentError):
|
||||
class Test(Document):
|
||||
name = StringField()
|
||||
|
||||
type('WrongEmbeddedDocument', (
|
||||
EmbeddedDocument,), {
|
||||
'test': CachedReferenceField(Test)
|
||||
})
|
||||
|
||||
def test_cached_reference_auto_sync(self):
|
||||
class Person(Document):
|
||||
TYPES = (
|
||||
('pf', "PF"),
|
||||
('pj', "PJ")
|
||||
)
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
choices=TYPES
|
||||
)
|
||||
|
||||
father = CachedReferenceField('self', fields=('tp',))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
a1 = Person(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Person(name='Wilson Junior', tp='pf', father=a1)
|
||||
a2.save()
|
||||
|
||||
a1.tp = 'pf'
|
||||
a1.save()
|
||||
|
||||
a2.reload()
|
||||
self.assertEqual(dict(a2.to_mongo()), {
|
||||
'_id': a2.pk,
|
||||
'name': 'Wilson Junior',
|
||||
'tp': 'pf',
|
||||
'father': {
|
||||
'_id': a1.pk,
|
||||
'tp': 'pf'
|
||||
}
|
||||
})
|
||||
|
||||
def test_cached_reference_auto_sync_disabled(self):
|
||||
class Persone(Document):
|
||||
TYPES = (
|
||||
('pf', "PF"),
|
||||
('pj', "PJ")
|
||||
)
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
choices=TYPES
|
||||
)
|
||||
|
||||
father = CachedReferenceField(
|
||||
'self', fields=('tp',), auto_sync=False)
|
||||
|
||||
Persone.drop_collection()
|
||||
|
||||
a1 = Persone(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Persone(name='Wilson Junior', tp='pf', father=a1)
|
||||
a2.save()
|
||||
|
||||
a1.tp = 'pf'
|
||||
a1.save()
|
||||
|
||||
self.assertEqual(Persone.objects._collection.find_one({'_id': a2.pk}), {
|
||||
'_id': a2.pk,
|
||||
'name': 'Wilson Junior',
|
||||
'tp': 'pf',
|
||||
'father': {
|
||||
'_id': a1.pk,
|
||||
'tp': 'pj'
|
||||
}
|
||||
})
|
||||
|
||||
def test_cached_reference_embedded_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
TPS = (
|
||||
('n', "Normal"),
|
||||
('u', "Urgent")
|
||||
)
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
verbose_name="Type",
|
||||
db_field="t",
|
||||
choices=TPS)
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
owner = EmbeddedDocumentField(Owner)
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(
|
||||
Animal, fields=['tag', 'owner.tp'])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(name="Leopard", tag="heavy",
|
||||
owner=Owner(tp='u', name="Wilson Júnior")
|
||||
)
|
||||
a.save()
|
||||
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tp'])), {
|
||||
'_id': a.pk,
|
||||
'tag': 'heavy',
|
||||
'owner': {
|
||||
't': 'u'
|
||||
}
|
||||
})
|
||||
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
|
||||
self.assertEqual(o.to_mongo()['animal']['owner']['t'], 'u')
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(
|
||||
animal__tag='heavy', animal__owner__tp='u').count()
|
||||
self.assertEqual(count, 1)
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag='heavy',
|
||||
animal__owner__tp='u').first()
|
||||
self.assertEqual(ocorrence.person, "teste")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
|
||||
def test_cached_reference_embedded_list_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
name = StringField()
|
||||
tags = ListField(StringField())
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
owner = EmbeddedDocumentField(Owner)
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(
|
||||
Animal, fields=['tag', 'owner.tags'])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(name="Leopard", tag="heavy",
|
||||
owner=Owner(tags=['cool', 'funny'],
|
||||
name="Wilson Júnior")
|
||||
)
|
||||
a.save()
|
||||
|
||||
o = Ocorrence(person="teste 2", animal=a)
|
||||
o.save()
|
||||
self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tags'])), {
|
||||
'_id': a.pk,
|
||||
'tag': 'heavy',
|
||||
'owner': {
|
||||
'tags': ['cool', 'funny']
|
||||
}
|
||||
})
|
||||
|
||||
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
|
||||
self.assertEqual(o.to_mongo()['animal']['owner']['tags'],
|
||||
['cool', 'funny'])
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
query = Ocorrence.objects(
|
||||
animal__tag='heavy', animal__owner__tags='cool')._query
|
||||
self.assertEqual(
|
||||
query, {'animal.owner.tags': 'cool', 'animal.tag': 'heavy'})
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag='heavy',
|
||||
animal__owner__tags='cool').first()
|
||||
self.assertEqual(ocorrence.person, "teste 2")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
184
tests/fields/test_complex_datetime_field.py
Normal file
184
tests/fields/test_complex_datetime_field.py
Normal file
@@ -0,0 +1,184 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import math
|
||||
import itertools
|
||||
import re
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
def test_complexdatetime_storage(self):
|
||||
"""Tests for complex datetime fields - which can handle
|
||||
microseconds without rounding.
|
||||
"""
|
||||
class LogEntry(Document):
|
||||
date = ComplexDateTimeField()
|
||||
date_with_dots = ComplexDateTimeField(separator='.')
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond and
|
||||
# dropped - with default datetimefields
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond - with
|
||||
# default datetimefields
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
|
||||
# Pre UTC dates microseconds below 1000 are dropped - with default
|
||||
# datetimefields
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
|
||||
# Pre UTC microseconds above 1000 is wonky - with default datetimefields
|
||||
# log.date has an invalid microsecond value so I can't construct
|
||||
# a date to compare.
|
||||
for i in range(1001, 3113, 33):
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
log1 = LogEntry.objects.get(date=d1)
|
||||
self.assertEqual(log, log1)
|
||||
|
||||
# Test string padding
|
||||
microsecond = map(int, [math.pow(10, x) for x in range(6)])
|
||||
mm = dd = hh = ii = ss = [1, 10]
|
||||
|
||||
for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond):
|
||||
stored = LogEntry(date=datetime.datetime(*values)).to_mongo()['date']
|
||||
self.assertTrue(re.match('^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$', stored) is not None)
|
||||
|
||||
# Test separator
|
||||
stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()['date_with_dots']
|
||||
self.assertTrue(re.match('^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$', stored) is not None)
|
||||
|
||||
def test_complexdatetime_usage(self):
|
||||
"""Tests for complex datetime fields - which can handle
|
||||
microseconds without rounding.
|
||||
"""
|
||||
class LogEntry(Document):
|
||||
date = ComplexDateTimeField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
d1 = datetime.datetime(1950, 1, 1, 0, 0, 1, 999)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.save()
|
||||
|
||||
log1 = LogEntry.objects.get(date=d1)
|
||||
self.assertEqual(log, log1)
|
||||
|
||||
# create extra 59 log entries for a total of 60
|
||||
for i in range(1951, 2010):
|
||||
d = datetime.datetime(i, 1, 1, 0, 0, 1, 999)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 60)
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 59:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 59:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2011, 1, 1),
|
||||
date__gte=datetime.datetime(2000, 1, 1),
|
||||
)
|
||||
self.assertEqual(logs.count(), 10)
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Test microsecond-level ordering/filtering
|
||||
for microsecond in (99, 999, 9999, 10000):
|
||||
LogEntry(
|
||||
date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)
|
||||
).save()
|
||||
|
||||
logs = list(LogEntry.objects.order_by('date'))
|
||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
||||
next_log = logs[next_idx]
|
||||
self.assertTrue(log.date < next_log.date)
|
||||
|
||||
logs = list(LogEntry.objects.order_by('-date'))
|
||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
||||
next_log = logs[next_idx]
|
||||
self.assertTrue(log.date > next_log.date)
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000))
|
||||
self.assertEqual(logs.count(), 4)
|
||||
|
||||
def test_no_default_value(self):
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField()
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertIsNone(log.timestamp)
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertIsNone(fetched_log.timestamp)
|
||||
|
||||
def test_default_static_value(self):
|
||||
NOW = datetime.datetime.utcnow()
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField(default=NOW)
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertEqual(log.timestamp, NOW)
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertEqual(fetched_log.timestamp, NOW)
|
||||
|
||||
def test_default_callable(self):
|
||||
NOW = datetime.datetime.utcnow()
|
||||
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField(default=datetime.datetime.utcnow)
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertGreaterEqual(log.timestamp, NOW)
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertGreaterEqual(fetched_log.timestamp, NOW)
|
165
tests/fields/test_date_field.py
Normal file
165
tests/fields/test_date_field.py
Normal file
@@ -0,0 +1,165 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import six
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
except ImportError:
|
||||
dateutil = None
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDateField(MongoDBTestCase):
|
||||
def test_date_from_empty_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast an empty string to datetime.
|
||||
"""
|
||||
class MyDoc(Document):
|
||||
dt = DateField()
|
||||
|
||||
md = MyDoc(dt='')
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
|
||||
def test_date_from_whitespace_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast a whitespace-only string to datetime.
|
||||
"""
|
||||
class MyDoc(Document):
|
||||
dt = DateField()
|
||||
|
||||
md = MyDoc(dt=' ')
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
|
||||
def test_default_values_today(self):
|
||||
"""Ensure that default field values are used when creating
|
||||
a document.
|
||||
"""
|
||||
class Person(Document):
|
||||
day = DateField(default=datetime.date.today)
|
||||
|
||||
person = Person()
|
||||
person.validate()
|
||||
self.assertEqual(person.day, person.day)
|
||||
self.assertEqual(person.day, datetime.date.today())
|
||||
self.assertEqual(person._data['day'], person.day)
|
||||
|
||||
def test_date(self):
|
||||
"""Tests showing pymongo date fields
|
||||
|
||||
See: http://api.mongodb.org/python/current/api/bson/son.html#dt
|
||||
"""
|
||||
class LogEntry(Document):
|
||||
date = DateField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Test can save dates
|
||||
log = LogEntry()
|
||||
log.date = datetime.date.today()
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, datetime.date.today())
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
|
||||
if not six.PY3:
|
||||
# Pre UTC dates microseconds below 1000 are dropped
|
||||
# This does not seem to be true in PY3
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
d2 = datetime.datetime(1969, 12, 31, 23, 59, 59)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
class LogEntry(Document):
|
||||
date = DateField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.validate()
|
||||
log.save()
|
||||
|
||||
for query in (d1, d1.isoformat(' ')):
|
||||
log1 = LogEntry.objects.get(date=query)
|
||||
self.assertEqual(log, log1)
|
||||
|
||||
if dateutil:
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat('T'))
|
||||
self.assertEqual(log, log1)
|
||||
|
||||
# create additional 19 log entries for a total of 20
|
||||
for i in range(1971, 1990):
|
||||
d = datetime.datetime(i, 1, 1, 0, 0, 1)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 20)
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime
|
||||
fields.
|
||||
"""
|
||||
class LogEntry(Document):
|
||||
time = DateField()
|
||||
|
||||
log = LogEntry()
|
||||
log.time = datetime.datetime.now()
|
||||
log.validate()
|
||||
|
||||
log.time = datetime.date.today()
|
||||
log.validate()
|
||||
|
||||
log.time = datetime.datetime.now().isoformat(' ')
|
||||
log.validate()
|
||||
|
||||
if dateutil:
|
||||
log.time = datetime.datetime.now().isoformat('T')
|
||||
log.validate()
|
||||
|
||||
log.time = -1
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = 'ABC'
|
||||
self.assertRaises(ValidationError, log.validate)
|
203
tests/fields/test_datetime_field.py
Normal file
203
tests/fields/test_datetime_field.py
Normal file
@@ -0,0 +1,203 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import six
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
except ImportError:
|
||||
dateutil = None
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine import connection
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDateTimeField(MongoDBTestCase):
|
||||
def test_datetime_from_empty_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast an empty string to datetime.
|
||||
"""
|
||||
class MyDoc(Document):
|
||||
dt = DateTimeField()
|
||||
|
||||
md = MyDoc(dt='')
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
|
||||
def test_datetime_from_whitespace_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast a whitespace-only string to datetime.
|
||||
"""
|
||||
class MyDoc(Document):
|
||||
dt = DateTimeField()
|
||||
|
||||
md = MyDoc(dt=' ')
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
|
||||
def test_default_value_utcnow(self):
|
||||
"""Ensure that default field values are used when creating
|
||||
a document.
|
||||
"""
|
||||
class Person(Document):
|
||||
created = DateTimeField(default=datetime.datetime.utcnow)
|
||||
|
||||
utcnow = datetime.datetime.utcnow()
|
||||
person = Person()
|
||||
person.validate()
|
||||
person_created_t0 = person.created
|
||||
self.assertLess(person.created - utcnow, datetime.timedelta(seconds=1))
|
||||
self.assertEqual(person_created_t0, person.created) # make sure it does not change
|
||||
self.assertEqual(person._data['created'], person.created)
|
||||
|
||||
def test_handling_microseconds(self):
|
||||
"""Tests showing pymongo datetime fields handling of microseconds.
|
||||
Microseconds are rounded to the nearest millisecond and pre UTC
|
||||
handling is wonky.
|
||||
|
||||
See: http://api.mongodb.org/python/current/api/bson/son.html#dt
|
||||
"""
|
||||
class LogEntry(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Test can save dates
|
||||
log = LogEntry()
|
||||
log.date = datetime.date.today()
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date.date(), datetime.date.today())
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond and
|
||||
# dropped
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
|
||||
if not six.PY3:
|
||||
# Pre UTC dates microseconds below 1000 are dropped
|
||||
# This does not seem to be true in PY3
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
d2 = datetime.datetime(1969, 12, 31, 23, 59, 59)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
class LogEntry(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.validate()
|
||||
log.save()
|
||||
|
||||
for query in (d1, d1.isoformat(' ')):
|
||||
log1 = LogEntry.objects.get(date=query)
|
||||
self.assertEqual(log, log1)
|
||||
|
||||
if dateutil:
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat('T'))
|
||||
self.assertEqual(log, log1)
|
||||
|
||||
# create additional 19 log entries for a total of 20
|
||||
for i in range(1971, 1990):
|
||||
d = datetime.datetime(i, 1, 1, 0, 0, 1)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 20)
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(1980, 1, 1),
|
||||
date__gte=datetime.datetime(1975, 1, 1),
|
||||
)
|
||||
self.assertEqual(logs.count(), 5)
|
||||
|
||||
def test_datetime_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime
|
||||
fields.
|
||||
"""
|
||||
class LogEntry(Document):
|
||||
time = DateTimeField()
|
||||
|
||||
log = LogEntry()
|
||||
log.time = datetime.datetime.now()
|
||||
log.validate()
|
||||
|
||||
log.time = datetime.date.today()
|
||||
log.validate()
|
||||
|
||||
log.time = datetime.datetime.now().isoformat(' ')
|
||||
log.validate()
|
||||
|
||||
if dateutil:
|
||||
log.time = datetime.datetime.now().isoformat('T')
|
||||
log.validate()
|
||||
|
||||
log.time = -1
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = 'ABC'
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
|
||||
|
||||
class TestDateTimeTzAware(MongoDBTestCase):
|
||||
def test_datetime_tz_aware_mark_as_changed(self):
|
||||
# Reset the connections
|
||||
connection._connection_settings = {}
|
||||
connection._connections = {}
|
||||
connection._dbs = {}
|
||||
|
||||
connect(db='mongoenginetest', tz_aware=True)
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateTimeField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
LogEntry(time=datetime.datetime(2013, 1, 1, 0, 0, 0)).save()
|
||||
|
||||
log = LogEntry.objects.first()
|
||||
log.time = datetime.datetime(2013, 1, 1, 0, 0, 0)
|
||||
self.assertEqual(['time'], log._changed_fields)
|
91
tests/fields/test_decimal_field.py
Normal file
91
tests/fields/test_decimal_field.py
Normal file
@@ -0,0 +1,91 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDecimalField(MongoDBTestCase):
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to decimal fields.
|
||||
"""
|
||||
class Person(Document):
|
||||
height = DecimalField(min_value=Decimal('0.1'),
|
||||
max_value=Decimal('3.5'))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(height=Decimal('1.89')).save()
|
||||
person = Person.objects.first()
|
||||
self.assertEqual(person.height, Decimal('1.89'))
|
||||
|
||||
person.height = '2.0'
|
||||
person.save()
|
||||
person.height = 0.01
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = Decimal('0.01')
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = Decimal('4.0')
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = 'something invalid'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
person_2 = Person(height='something invalid')
|
||||
self.assertRaises(ValidationError, person_2.validate)
|
||||
|
||||
def test_comparison(self):
|
||||
class Person(Document):
|
||||
money = DecimalField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(money=6).save()
|
||||
Person(money=7).save()
|
||||
Person(money=8).save()
|
||||
Person(money=10).save()
|
||||
|
||||
self.assertEqual(2, Person.objects(money__gt=Decimal("7")).count())
|
||||
self.assertEqual(2, Person.objects(money__gt=7).count())
|
||||
self.assertEqual(2, Person.objects(money__gt="7").count())
|
||||
|
||||
self.assertEqual(3, Person.objects(money__gte="7").count())
|
||||
|
||||
def test_storage(self):
|
||||
class Person(Document):
|
||||
float_value = DecimalField(precision=4)
|
||||
string_value = DecimalField(precision=4, force_string=True)
|
||||
|
||||
Person.drop_collection()
|
||||
values_to_store = [10, 10.1, 10.11, "10.111", Decimal("10.1111"), Decimal("10.11111")]
|
||||
for store_at_creation in [True, False]:
|
||||
for value in values_to_store:
|
||||
# to_python is called explicitly if values were sent in the kwargs of __init__
|
||||
if store_at_creation:
|
||||
Person(float_value=value, string_value=value).save()
|
||||
else:
|
||||
person = Person.objects.create()
|
||||
person.float_value = value
|
||||
person.string_value = value
|
||||
person.save()
|
||||
|
||||
# How its stored
|
||||
expected = [
|
||||
{'float_value': 10.0, 'string_value': '10.0000'},
|
||||
{'float_value': 10.1, 'string_value': '10.1000'},
|
||||
{'float_value': 10.11, 'string_value': '10.1100'},
|
||||
{'float_value': 10.111, 'string_value': '10.1110'},
|
||||
{'float_value': 10.1111, 'string_value': '10.1111'},
|
||||
{'float_value': 10.1111, 'string_value': '10.1111'}]
|
||||
expected.extend(expected)
|
||||
actual = list(Person.objects.exclude('id').as_pymongo())
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
# How it comes out locally
|
||||
expected = [Decimal('10.0000'), Decimal('10.1000'), Decimal('10.1100'),
|
||||
Decimal('10.1110'), Decimal('10.1111'), Decimal('10.1111')]
|
||||
expected.extend(expected)
|
||||
for field_name in ['float_value', 'string_value']:
|
||||
actual = list(Person.objects().scalar(field_name))
|
||||
self.assertEqual(expected, actual)
|
324
tests/fields/test_dict_field.py
Normal file
324
tests/fields/test_dict_field.py
Normal file
@@ -0,0 +1,324 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
from mongoengine.base import BaseDict
|
||||
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class TestDictField(MongoDBTestCase):
|
||||
|
||||
def test_storage(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
info = {'testkey': 'testvalue'}
|
||||
post = BlogPost(info=info).save()
|
||||
self.assertEqual(
|
||||
get_as_pymongo(post),
|
||||
{
|
||||
'_id': post.id,
|
||||
'info': info
|
||||
}
|
||||
)
|
||||
|
||||
def test_general_things(self):
|
||||
"""Ensure that dict types work as expected."""
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = 'my post'
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = ['test', 'test']
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {'$title': 'test'}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {'nested': {'$title': 'test'}}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {'the.title': 'test'}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {'nested': {'the.title': 'test'}}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {1: 'test'}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {'title': 'test'}
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {'title': 'dollar_sign', 'details': {'te$t': 'test'}}
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {'details': {'test': 'test'}}
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {'details': {'test': 3}}
|
||||
post.save()
|
||||
|
||||
self.assertEqual(BlogPost.objects.count(), 4)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__title__exact='test').count(), 1)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__details__test__exact='test').count(), 1)
|
||||
|
||||
post = BlogPost.objects.filter(info__title__exact='dollar_sign').first()
|
||||
self.assertIn('te$t', post['info']['details'])
|
||||
|
||||
# Confirm handles non strings or non existing keys
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__details__test__exact=5).count(), 0)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0)
|
||||
|
||||
post = BlogPost.objects.create(info={'title': 'original'})
|
||||
post.info.update({'title': 'updated'})
|
||||
post.save()
|
||||
post.reload()
|
||||
self.assertEqual('updated', post.info['title'])
|
||||
|
||||
post.info.setdefault('authors', [])
|
||||
post.save()
|
||||
post.reload()
|
||||
self.assertEqual([], post.info['authors'])
|
||||
|
||||
def test_dictfield_dump_document(self):
|
||||
"""Ensure a DictField can handle another document's dump."""
|
||||
class Doc(Document):
|
||||
field = DictField()
|
||||
|
||||
class ToEmbed(Document):
|
||||
id = IntField(primary_key=True, default=1)
|
||||
recursive = DictField()
|
||||
|
||||
class ToEmbedParent(Document):
|
||||
id = IntField(primary_key=True, default=1)
|
||||
recursive = DictField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class ToEmbedChild(ToEmbedParent):
|
||||
pass
|
||||
|
||||
to_embed_recursive = ToEmbed(id=1).save()
|
||||
to_embed = ToEmbed(
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save()
|
||||
doc = Doc(field=to_embed.to_mongo().to_dict())
|
||||
doc.save()
|
||||
self.assertIsInstance(doc.field, dict)
|
||||
self.assertEqual(doc.field, {'_id': 2, 'recursive': {'_id': 1, 'recursive': {}}})
|
||||
# Same thing with a Document with a _cls field
|
||||
to_embed_recursive = ToEmbedChild(id=1).save()
|
||||
to_embed_child = ToEmbedChild(
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save()
|
||||
doc = Doc(field=to_embed_child.to_mongo().to_dict())
|
||||
doc.save()
|
||||
self.assertIsInstance(doc.field, dict)
|
||||
expected = {
|
||||
'_id': 2, '_cls': 'ToEmbedParent.ToEmbedChild',
|
||||
'recursive': {'_id': 1, '_cls': 'ToEmbedParent.ToEmbedChild', 'recursive': {}}
|
||||
}
|
||||
self.assertEqual(doc.field, expected)
|
||||
|
||||
def test_dictfield_strict(self):
|
||||
"""Ensure that dict field handles validation if provided a strict field type."""
|
||||
class Simple(Document):
|
||||
mapping = DictField(field=IntField())
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['someint'] = 1
|
||||
e.save()
|
||||
|
||||
# try creating an invalid mapping
|
||||
with self.assertRaises(ValidationError):
|
||||
e.mapping['somestring'] = "abc"
|
||||
e.save()
|
||||
|
||||
def test_dictfield_complex(self):
|
||||
"""Ensure that the dict field can handle the complex types."""
|
||||
class SettingBase(EmbeddedDocument):
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class StringSetting(SettingBase):
|
||||
value = StringField()
|
||||
|
||||
class IntegerSetting(SettingBase):
|
||||
value = IntField()
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField()
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['somestring'] = StringSetting(value='foo')
|
||||
e.mapping['someint'] = IntegerSetting(value=42)
|
||||
e.mapping['nested_dict'] = {'number': 1, 'string': 'Hi!',
|
||||
'float': 1.001,
|
||||
'complex': IntegerSetting(value=42),
|
||||
'list': [IntegerSetting(value=42),
|
||||
StringSetting(value='foo')]}
|
||||
e.save()
|
||||
|
||||
e2 = Simple.objects.get(id=e.id)
|
||||
self.assertIsInstance(e2.mapping['somestring'], StringSetting)
|
||||
self.assertIsInstance(e2.mapping['someint'], IntegerSetting)
|
||||
|
||||
# Test querying
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__someint__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__number=1).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1)
|
||||
|
||||
# Confirm can update
|
||||
Simple.objects().update(
|
||||
set__mapping={"someint": IntegerSetting(value=10)})
|
||||
Simple.objects().update(
|
||||
set__mapping__nested_dict__list__1=StringSetting(value='Boo'))
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1)
|
||||
|
||||
def test_push_dict(self):
|
||||
class MyModel(Document):
|
||||
events = ListField(DictField())
|
||||
|
||||
doc = MyModel(events=[{'a': 1}]).save()
|
||||
raw_doc = get_as_pymongo(doc)
|
||||
expected_raw_doc = {
|
||||
'_id': doc.id,
|
||||
'events': [{'a': 1}]
|
||||
}
|
||||
self.assertEqual(raw_doc, expected_raw_doc)
|
||||
|
||||
MyModel.objects(id=doc.id).update(push__events={})
|
||||
raw_doc = get_as_pymongo(doc)
|
||||
expected_raw_doc = {
|
||||
'_id': doc.id,
|
||||
'events': [{'a': 1}, {}]
|
||||
}
|
||||
self.assertEqual(raw_doc, expected_raw_doc)
|
||||
|
||||
def test_ensure_unique_default_instances(self):
|
||||
"""Ensure that every field has it's own unique default instance."""
|
||||
class D(Document):
|
||||
data = DictField()
|
||||
data2 = DictField(default=lambda: {})
|
||||
|
||||
d1 = D()
|
||||
d1.data['foo'] = 'bar'
|
||||
d1.data2['foo'] = 'bar'
|
||||
d2 = D()
|
||||
self.assertEqual(d2.data, {})
|
||||
self.assertEqual(d2.data2, {})
|
||||
|
||||
def test_dict_field_invalid_dict_value(self):
|
||||
class DictFieldTest(Document):
|
||||
dictionary = DictField(required=True)
|
||||
|
||||
DictFieldTest.drop_collection()
|
||||
|
||||
test = DictFieldTest(dictionary=None)
|
||||
test.dictionary # Just access to test getter
|
||||
self.assertRaises(ValidationError, test.validate)
|
||||
|
||||
test = DictFieldTest(dictionary=False)
|
||||
test.dictionary # Just access to test getter
|
||||
self.assertRaises(ValidationError, test.validate)
|
||||
|
||||
def test_dict_field_raises_validation_error_if_wrongly_assign_embedded_doc(self):
|
||||
class DictFieldTest(Document):
|
||||
dictionary = DictField(required=True)
|
||||
|
||||
DictFieldTest.drop_collection()
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
embed = Embedded(name='garbage')
|
||||
doc = DictFieldTest(dictionary=embed)
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
doc.validate()
|
||||
self.assertIn("'dictionary'", str(ctx_err.exception))
|
||||
self.assertIn('Only dictionaries may be used in a DictField', str(ctx_err.exception))
|
||||
|
||||
def test_atomic_update_dict_field(self):
|
||||
"""Ensure that the entire DictField can be atomically updated."""
|
||||
class Simple(Document):
|
||||
mapping = DictField(field=ListField(IntField(required=True)))
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['someints'] = [1, 2]
|
||||
e.save()
|
||||
e.update(set__mapping={"ints": [3, 4]})
|
||||
e.reload()
|
||||
self.assertEqual(BaseDict, type(e.mapping))
|
||||
self.assertEqual({"ints": [3, 4]}, e.mapping)
|
||||
|
||||
# try creating an invalid mapping
|
||||
with self.assertRaises(ValueError):
|
||||
e.update(set__mapping={"somestrings": ["foo", "bar", ]})
|
||||
|
||||
def test_dictfield_with_referencefield_complex_nesting_cases(self):
|
||||
"""Ensure complex nesting inside DictField handles dereferencing of ReferenceField(dbref=True | False)"""
|
||||
# Relates to Issue #1453
|
||||
class Doc(Document):
|
||||
s = StringField()
|
||||
|
||||
class Simple(Document):
|
||||
mapping0 = DictField(ReferenceField(Doc, dbref=True))
|
||||
mapping1 = DictField(ReferenceField(Doc, dbref=False))
|
||||
mapping2 = DictField(ListField(ReferenceField(Doc, dbref=True)))
|
||||
mapping3 = DictField(ListField(ReferenceField(Doc, dbref=False)))
|
||||
mapping4 = DictField(DictField(field=ReferenceField(Doc, dbref=True)))
|
||||
mapping5 = DictField(DictField(field=ReferenceField(Doc, dbref=False)))
|
||||
mapping6 = DictField(ListField(DictField(ReferenceField(Doc, dbref=True))))
|
||||
mapping7 = DictField(ListField(DictField(ReferenceField(Doc, dbref=False))))
|
||||
mapping8 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=True)))))
|
||||
mapping9 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=False)))))
|
||||
|
||||
Doc.drop_collection()
|
||||
Simple.drop_collection()
|
||||
|
||||
d = Doc(s='aa').save()
|
||||
e = Simple()
|
||||
e.mapping0['someint'] = e.mapping1['someint'] = d
|
||||
e.mapping2['someint'] = e.mapping3['someint'] = [d]
|
||||
e.mapping4['someint'] = e.mapping5['someint'] = {'d': d}
|
||||
e.mapping6['someint'] = e.mapping7['someint'] = [{'d': d}]
|
||||
e.mapping8['someint'] = e.mapping9['someint'] = [{'d': [d]}]
|
||||
e.save()
|
||||
|
||||
s = Simple.objects.first()
|
||||
self.assertIsInstance(s.mapping0['someint'], Doc)
|
||||
self.assertIsInstance(s.mapping1['someint'], Doc)
|
||||
self.assertIsInstance(s.mapping2['someint'][0], Doc)
|
||||
self.assertIsInstance(s.mapping3['someint'][0], Doc)
|
||||
self.assertIsInstance(s.mapping4['someint']['d'], Doc)
|
||||
self.assertIsInstance(s.mapping5['someint']['d'], Doc)
|
||||
self.assertIsInstance(s.mapping6['someint'][0]['d'], Doc)
|
||||
self.assertIsInstance(s.mapping7['someint'][0]['d'], Doc)
|
||||
self.assertIsInstance(s.mapping8['someint'][0]['d'][0], Doc)
|
||||
self.assertIsInstance(s.mapping9['someint'][0]['d'][0], Doc)
|
120
tests/fields/test_email_field.py
Normal file
120
tests/fields/test_email_field.py
Normal file
@@ -0,0 +1,120 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
from unittest import SkipTest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestEmailField(MongoDBTestCase):
|
||||
def test_generic_behavior(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
user = User(email='ross@example.com')
|
||||
user.validate()
|
||||
|
||||
user = User(email='ross@example.co.uk')
|
||||
user.validate()
|
||||
|
||||
user = User(email=('Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5S'
|
||||
'aJIazqqWkm7.net'))
|
||||
user.validate()
|
||||
|
||||
user = User(email='new-tld@example.technology')
|
||||
user.validate()
|
||||
|
||||
user = User(email='ross@example.com.')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
# unicode domain
|
||||
user = User(email=u'user@пример.рф')
|
||||
user.validate()
|
||||
|
||||
# invalid unicode domain
|
||||
user = User(email=u'user@пример')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
# invalid data type
|
||||
user = User(email=123)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
def test_email_field_unicode_user(self):
|
||||
# Don't run this test on pypy3, which doesn't support unicode regex:
|
||||
# https://bitbucket.org/pypy/pypy/issues/1821/regular-expression-doesnt-find-unicode
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
raise SkipTest('unicode email addresses are not supported on PyPy 3')
|
||||
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
# unicode user shouldn't validate by default...
|
||||
user = User(email=u'Dörte@Sörensen.example.com')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
# ...but it should be fine with allow_utf8_user set to True
|
||||
class User(Document):
|
||||
email = EmailField(allow_utf8_user=True)
|
||||
|
||||
user = User(email=u'Dörte@Sörensen.example.com')
|
||||
user.validate()
|
||||
|
||||
def test_email_field_domain_whitelist(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
# localhost domain shouldn't validate by default...
|
||||
user = User(email='me@localhost')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
# ...but it should be fine if it's whitelisted
|
||||
class User(Document):
|
||||
email = EmailField(domain_whitelist=['localhost'])
|
||||
|
||||
user = User(email='me@localhost')
|
||||
user.validate()
|
||||
|
||||
def test_email_field_ip_domain(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
valid_ipv4 = 'email@[127.0.0.1]'
|
||||
valid_ipv6 = 'email@[2001:dB8::1]'
|
||||
invalid_ip = 'email@[324.0.0.1]'
|
||||
|
||||
# IP address as a domain shouldn't validate by default...
|
||||
user = User(email=valid_ipv4)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
user = User(email=valid_ipv6)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
user = User(email=invalid_ip)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
# ...but it should be fine with allow_ip_domain set to True
|
||||
class User(Document):
|
||||
email = EmailField(allow_ip_domain=True)
|
||||
|
||||
user = User(email=valid_ipv4)
|
||||
user.validate()
|
||||
|
||||
user = User(email=valid_ipv6)
|
||||
user.validate()
|
||||
|
||||
# invalid IP should still fail validation
|
||||
user = User(email=invalid_ip)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
def test_email_field_honors_regex(self):
|
||||
class User(Document):
|
||||
email = EmailField(regex=r'\w+@example.com')
|
||||
|
||||
# Fails regex validation
|
||||
user = User(email='me@foo.com')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
# Passes regex validation
|
||||
user = User(email='me@example.com')
|
||||
self.assertIsNone(user.validate())
|
58
tests/fields/test_float_field.py
Normal file
58
tests/fields/test_float_field.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import six
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestFloatField(MongoDBTestCase):
|
||||
|
||||
def test_float_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
float_fld = FloatField()
|
||||
|
||||
TestDocument.drop_collection()
|
||||
|
||||
TestDocument(float_fld=None).save()
|
||||
TestDocument(float_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count())
|
||||
self.assertEqual(1, TestDocument.objects(float_fld__ne=1).count())
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to float fields.
|
||||
"""
|
||||
class Person(Document):
|
||||
height = FloatField(min_value=0.1, max_value=3.5)
|
||||
|
||||
class BigPerson(Document):
|
||||
height = FloatField()
|
||||
|
||||
person = Person()
|
||||
person.height = 1.89
|
||||
person.validate()
|
||||
|
||||
person.height = '2.0'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
person.height = 0.01
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
person.height = 4.0
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
person_2 = Person(height='something invalid')
|
||||
self.assertRaises(ValidationError, person_2.validate)
|
||||
|
||||
big_person = BigPerson()
|
||||
|
||||
for value, value_type in enumerate(six.integer_types):
|
||||
big_person.height = value_type(value)
|
||||
big_person.validate()
|
||||
|
||||
big_person.height = 2 ** 500
|
||||
big_person.validate()
|
||||
|
||||
big_person.height = 2 ** 100000 # Too big for a float value
|
||||
self.assertRaises(ValidationError, big_person.validate)
|
42
tests/fields/test_int_field.py
Normal file
42
tests/fields/test_int_field.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestIntField(MongoDBTestCase):
|
||||
|
||||
def test_int_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to int fields.
|
||||
"""
|
||||
class Person(Document):
|
||||
age = IntField(min_value=0, max_value=110)
|
||||
|
||||
person = Person()
|
||||
person.age = 0
|
||||
person.validate()
|
||||
|
||||
person.age = 50
|
||||
person.validate()
|
||||
|
||||
person.age = 110
|
||||
person.validate()
|
||||
|
||||
person.age = -1
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.age = 120
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.age = 'ten'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
int_fld = IntField()
|
||||
|
||||
TestDocument.drop_collection()
|
||||
|
||||
TestDocument(int_fld=None).save()
|
||||
TestDocument(int_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count())
|
||||
self.assertEqual(1, TestDocument.objects(int_fld__ne=1).count())
|
524
tests/fields/test_lazy_reference_field.py
Normal file
524
tests/fields/test_lazy_reference_field.py
Normal file
@@ -0,0 +1,524 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from bson import DBRef, ObjectId
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import LazyReference
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestLazyReferenceField(MongoDBTestCase):
|
||||
def test_lazy_reference_config(self):
|
||||
# Make sure ReferenceField only accepts a document class or a string
|
||||
# with a document class name.
|
||||
self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument)
|
||||
|
||||
def test_lazy_reference_simple(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
fetched_animal = p.animal.fetch()
|
||||
self.assertEqual(fetched_animal, animal)
|
||||
# `fetch` keep cache on referenced document by default...
|
||||
animal.tag = "not so heavy"
|
||||
animal.save()
|
||||
double_fetch = p.animal.fetch()
|
||||
self.assertIs(fetched_animal, double_fetch)
|
||||
self.assertEqual(double_fetch.tag, "heavy")
|
||||
# ...unless specified otherwise
|
||||
fetch_force = p.animal.fetch(force=True)
|
||||
self.assertIsNot(fetch_force, fetched_animal)
|
||||
self.assertEqual(fetch_force.tag, "not so heavy")
|
||||
|
||||
def test_lazy_reference_fetch_invalid_ref(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
animal.delete()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
with self.assertRaises(DoesNotExist):
|
||||
p.animal.fetch()
|
||||
|
||||
def test_lazy_reference_set(self):
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
class SubAnimal(Animal):
|
||||
nick = StringField()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
sub_animal = SubAnimal(nick='doggo', name='dog').save()
|
||||
for ref in (
|
||||
animal,
|
||||
animal.pk,
|
||||
DBRef(animal._get_collection_name(), animal.pk),
|
||||
LazyReference(Animal, animal.pk),
|
||||
|
||||
sub_animal,
|
||||
sub_animal.pk,
|
||||
DBRef(sub_animal._get_collection_name(), sub_animal.pk),
|
||||
LazyReference(SubAnimal, sub_animal.pk),
|
||||
):
|
||||
p = Ocurrence(person="test", animal=ref).save()
|
||||
p.reload()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
p.animal.fetch()
|
||||
|
||||
def test_lazy_reference_bad_set(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
class BadDoc(Document):
|
||||
pass
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
baddoc = BadDoc().save()
|
||||
for bad in (
|
||||
42,
|
||||
'foo',
|
||||
baddoc,
|
||||
DBRef(baddoc._get_collection_name(), animal.pk),
|
||||
LazyReference(BadDoc, animal.pk)
|
||||
):
|
||||
with self.assertRaises(ValidationError):
|
||||
p = Ocurrence(person="test", animal=bad).save()
|
||||
|
||||
def test_lazy_reference_query_conversion(self):
|
||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
author = LazyReferenceField(Member, dbref=False)
|
||||
|
||||
Member.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
m1 = Member(user_num=1)
|
||||
m1.save()
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
def test_lazy_reference_query_conversion_dbref(self):
|
||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
author = LazyReferenceField(Member, dbref=True)
|
||||
|
||||
Member.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
m1 = Member(user_num=1)
|
||||
m1.save()
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
def test_lazy_reference_passthrough(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
animal = LazyReferenceField(Animal, passthrough=False)
|
||||
animal_passthrough = LazyReferenceField(Animal, passthrough=True)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(animal=animal, animal_passthrough=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
with self.assertRaises(KeyError):
|
||||
p.animal['name']
|
||||
with self.assertRaises(AttributeError):
|
||||
p.animal.name
|
||||
self.assertEqual(p.animal.pk, animal.pk)
|
||||
|
||||
self.assertEqual(p.animal_passthrough.name, "Leopard")
|
||||
self.assertEqual(p.animal_passthrough['name'], "Leopard")
|
||||
|
||||
# Should not be able to access referenced document's methods
|
||||
with self.assertRaises(AttributeError):
|
||||
p.animal.save
|
||||
with self.assertRaises(KeyError):
|
||||
p.animal['save']
|
||||
|
||||
def test_lazy_reference_not_set(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
Ocurrence(person='foo').save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIs(p.animal, None)
|
||||
|
||||
def test_lazy_reference_equality(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
Animal.drop_collection()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
animalref = LazyReference(Animal, animal.pk)
|
||||
self.assertEqual(animal, animalref)
|
||||
self.assertEqual(animalref, animal)
|
||||
|
||||
other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90"))
|
||||
self.assertNotEqual(animal, other_animalref)
|
||||
self.assertNotEqual(other_animalref, animal)
|
||||
|
||||
def test_lazy_reference_embedded(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class EmbeddedOcurrence(EmbeddedDocument):
|
||||
in_list = ListField(LazyReferenceField(Animal))
|
||||
direct = LazyReferenceField(Animal)
|
||||
|
||||
class Ocurrence(Document):
|
||||
in_list = ListField(LazyReferenceField(Animal))
|
||||
in_embedded = EmbeddedDocumentField(EmbeddedOcurrence)
|
||||
direct = LazyReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal1 = Animal('doggo').save()
|
||||
animal2 = Animal('cheeta').save()
|
||||
|
||||
def check_fields_type(occ):
|
||||
self.assertIsInstance(occ.direct, LazyReference)
|
||||
for elem in occ.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
|
||||
for elem in occ.in_embedded.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
|
||||
occ = Ocurrence(
|
||||
in_list=[animal1, animal2],
|
||||
in_embedded={'in_list': [animal1, animal2], 'direct': animal1},
|
||||
direct=animal1
|
||||
).save()
|
||||
check_fields_type(occ)
|
||||
occ.reload()
|
||||
check_fields_type(occ)
|
||||
occ.direct = animal1.id
|
||||
occ.in_list = [animal1.id, animal2.id]
|
||||
occ.in_embedded.direct = animal1.id
|
||||
occ.in_embedded.in_list = [animal1.id, animal2.id]
|
||||
check_fields_type(occ)
|
||||
|
||||
|
||||
class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
def test_generic_lazy_reference_simple(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = GenericLazyReferenceField()
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
fetched_animal = p.animal.fetch()
|
||||
self.assertEqual(fetched_animal, animal)
|
||||
# `fetch` keep cache on referenced document by default...
|
||||
animal.tag = "not so heavy"
|
||||
animal.save()
|
||||
double_fetch = p.animal.fetch()
|
||||
self.assertIs(fetched_animal, double_fetch)
|
||||
self.assertEqual(double_fetch.tag, "heavy")
|
||||
# ...unless specified otherwise
|
||||
fetch_force = p.animal.fetch(force=True)
|
||||
self.assertIsNot(fetch_force, fetched_animal)
|
||||
self.assertEqual(fetch_force.tag, "not so heavy")
|
||||
|
||||
def test_generic_lazy_reference_choices(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
|
||||
class Vegetal(Document):
|
||||
name = StringField()
|
||||
|
||||
class Mineral(Document):
|
||||
name = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
living_thing = GenericLazyReferenceField(choices=[Animal, Vegetal])
|
||||
thing = GenericLazyReferenceField()
|
||||
|
||||
Animal.drop_collection()
|
||||
Vegetal.drop_collection()
|
||||
Mineral.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal = Animal(name="Leopard").save()
|
||||
vegetal = Vegetal(name="Oak").save()
|
||||
mineral = Mineral(name="Granite").save()
|
||||
|
||||
occ_animal = Ocurrence(living_thing=animal, thing=animal).save()
|
||||
occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save()
|
||||
with self.assertRaises(ValidationError):
|
||||
Ocurrence(living_thing=mineral).save()
|
||||
|
||||
occ = Ocurrence.objects.get(living_thing=animal)
|
||||
self.assertEqual(occ, occ_animal)
|
||||
self.assertIsInstance(occ.thing, LazyReference)
|
||||
self.assertIsInstance(occ.living_thing, LazyReference)
|
||||
|
||||
occ.thing = vegetal
|
||||
occ.living_thing = vegetal
|
||||
occ.save()
|
||||
|
||||
occ.thing = mineral
|
||||
occ.living_thing = mineral
|
||||
with self.assertRaises(ValidationError):
|
||||
occ.save()
|
||||
|
||||
def test_generic_lazy_reference_set(self):
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = GenericLazyReferenceField()
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
class SubAnimal(Animal):
|
||||
nick = StringField()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
sub_animal = SubAnimal(nick='doggo', name='dog').save()
|
||||
for ref in (
|
||||
animal,
|
||||
LazyReference(Animal, animal.pk),
|
||||
{'_cls': 'Animal', '_ref': DBRef(animal._get_collection_name(), animal.pk)},
|
||||
|
||||
sub_animal,
|
||||
LazyReference(SubAnimal, sub_animal.pk),
|
||||
{'_cls': 'SubAnimal', '_ref': DBRef(sub_animal._get_collection_name(), sub_animal.pk)},
|
||||
):
|
||||
p = Ocurrence(person="test", animal=ref).save()
|
||||
p.reload()
|
||||
self.assertIsInstance(p.animal, (LazyReference, Document))
|
||||
p.animal.fetch()
|
||||
|
||||
def test_generic_lazy_reference_bad_set(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = GenericLazyReferenceField(choices=['Animal'])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
class BadDoc(Document):
|
||||
pass
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
baddoc = BadDoc().save()
|
||||
for bad in (
|
||||
42,
|
||||
'foo',
|
||||
baddoc,
|
||||
LazyReference(BadDoc, animal.pk)
|
||||
):
|
||||
with self.assertRaises(ValidationError):
|
||||
p = Ocurrence(person="test", animal=bad).save()
|
||||
|
||||
def test_generic_lazy_reference_query_conversion(self):
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
author = GenericLazyReferenceField()
|
||||
|
||||
Member.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
m1 = Member(user_num=1)
|
||||
m1.save()
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
def test_generic_lazy_reference_not_set(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = GenericLazyReferenceField()
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
Ocurrence(person='foo').save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIs(p.animal, None)
|
||||
|
||||
def test_generic_lazy_reference_embedded(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class EmbeddedOcurrence(EmbeddedDocument):
|
||||
in_list = ListField(GenericLazyReferenceField())
|
||||
direct = GenericLazyReferenceField()
|
||||
|
||||
class Ocurrence(Document):
|
||||
in_list = ListField(GenericLazyReferenceField())
|
||||
in_embedded = EmbeddedDocumentField(EmbeddedOcurrence)
|
||||
direct = GenericLazyReferenceField()
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal1 = Animal('doggo').save()
|
||||
animal2 = Animal('cheeta').save()
|
||||
|
||||
def check_fields_type(occ):
|
||||
self.assertIsInstance(occ.direct, LazyReference)
|
||||
for elem in occ.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
|
||||
for elem in occ.in_embedded.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
|
||||
occ = Ocurrence(
|
||||
in_list=[animal1, animal2],
|
||||
in_embedded={'in_list': [animal1, animal2], 'direct': animal1},
|
||||
direct=animal1
|
||||
).save()
|
||||
check_fields_type(occ)
|
||||
occ.reload()
|
||||
check_fields_type(occ)
|
||||
animal1_ref = {'_cls': 'Animal', '_ref': DBRef(animal1._get_collection_name(), animal1.pk)}
|
||||
animal2_ref = {'_cls': 'Animal', '_ref': DBRef(animal2._get_collection_name(), animal2.pk)}
|
||||
occ.direct = animal1_ref
|
||||
occ.in_list = [animal1_ref, animal2_ref]
|
||||
occ.in_embedded.direct = animal1_ref
|
||||
occ.in_embedded.in_list = [animal1_ref, animal2_ref]
|
||||
check_fields_type(occ)
|
56
tests/fields/test_long_field.py
Normal file
56
tests/fields/test_long_field.py
Normal file
@@ -0,0 +1,56 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import six
|
||||
|
||||
try:
|
||||
from bson.int64 import Int64
|
||||
except ImportError:
|
||||
Int64 = long
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestLongField(MongoDBTestCase):
|
||||
|
||||
def test_long_field_is_considered_as_int64(self):
|
||||
"""
|
||||
Tests that long fields are stored as long in mongo, even if long
|
||||
value is small enough to be an int.
|
||||
"""
|
||||
class TestLongFieldConsideredAsInt64(Document):
|
||||
some_long = LongField()
|
||||
|
||||
doc = TestLongFieldConsideredAsInt64(some_long=42).save()
|
||||
db = get_db()
|
||||
self.assertIsInstance(db.test_long_field_considered_as_int64.find()[0]['some_long'], Int64)
|
||||
self.assertIsInstance(doc.some_long, six.integer_types)
|
||||
|
||||
def test_long_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to long fields.
|
||||
"""
|
||||
class TestDocument(Document):
|
||||
value = LongField(min_value=0, max_value=110)
|
||||
|
||||
doc = TestDocument()
|
||||
doc.value = 50
|
||||
doc.validate()
|
||||
|
||||
doc.value = -1
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
doc.age = 120
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
doc.age = 'ten'
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
|
||||
def test_long_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
long_fld = LongField()
|
||||
|
||||
TestDocument.drop_collection()
|
||||
|
||||
TestDocument(long_fld=None).save()
|
||||
TestDocument(long_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(long_fld__ne=None).count())
|
144
tests/fields/test_map_field.py
Normal file
144
tests/fields/test_map_field.py
Normal file
@@ -0,0 +1,144 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestMapField(MongoDBTestCase):
|
||||
|
||||
def test_mapfield(self):
|
||||
"""Ensure that the MapField handles the declared type."""
|
||||
class Simple(Document):
|
||||
mapping = MapField(IntField())
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['someint'] = 1
|
||||
e.save()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
e.mapping['somestring'] = "abc"
|
||||
e.save()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
class NoDeclaredType(Document):
|
||||
mapping = MapField()
|
||||
|
||||
def test_complex_mapfield(self):
|
||||
"""Ensure that the MapField can handle complex declared types."""
|
||||
|
||||
class SettingBase(EmbeddedDocument):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class StringSetting(SettingBase):
|
||||
value = StringField()
|
||||
|
||||
class IntegerSetting(SettingBase):
|
||||
value = IntField()
|
||||
|
||||
class Extensible(Document):
|
||||
mapping = MapField(EmbeddedDocumentField(SettingBase))
|
||||
|
||||
Extensible.drop_collection()
|
||||
|
||||
e = Extensible()
|
||||
e.mapping['somestring'] = StringSetting(value='foo')
|
||||
e.mapping['someint'] = IntegerSetting(value=42)
|
||||
e.save()
|
||||
|
||||
e2 = Extensible.objects.get(id=e.id)
|
||||
self.assertIsInstance(e2.mapping['somestring'], StringSetting)
|
||||
self.assertIsInstance(e2.mapping['someint'], IntegerSetting)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
e.mapping['someint'] = 123
|
||||
e.save()
|
||||
|
||||
def test_embedded_mapfield_db_field(self):
|
||||
class Embedded(EmbeddedDocument):
|
||||
number = IntField(default=0, db_field='i')
|
||||
|
||||
class Test(Document):
|
||||
my_map = MapField(field=EmbeddedDocumentField(Embedded),
|
||||
db_field='x')
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
test = Test()
|
||||
test.my_map['DICTIONARY_KEY'] = Embedded(number=1)
|
||||
test.save()
|
||||
|
||||
Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1)
|
||||
|
||||
test = Test.objects.get()
|
||||
self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2)
|
||||
doc = self.db.test.find_one()
|
||||
self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2)
|
||||
|
||||
def test_mapfield_numerical_index(self):
|
||||
"""Ensure that MapField accept numeric strings as indexes."""
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Test(Document):
|
||||
my_map = MapField(EmbeddedDocumentField(Embedded))
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
test = Test()
|
||||
test.my_map['1'] = Embedded(name='test')
|
||||
test.save()
|
||||
test.my_map['1'].name = 'test updated'
|
||||
test.save()
|
||||
|
||||
def test_map_field_lookup(self):
|
||||
"""Ensure MapField lookups succeed on Fields without a lookup
|
||||
method.
|
||||
"""
|
||||
|
||||
class Action(EmbeddedDocument):
|
||||
operation = StringField()
|
||||
object = StringField()
|
||||
|
||||
class Log(Document):
|
||||
name = StringField()
|
||||
visited = MapField(DateTimeField())
|
||||
actions = MapField(EmbeddedDocumentField(Action))
|
||||
|
||||
Log.drop_collection()
|
||||
Log(name="wilson", visited={'friends': datetime.datetime.now()},
|
||||
actions={'friends': Action(operation='drink', object='beer')}).save()
|
||||
|
||||
self.assertEqual(1, Log.objects(
|
||||
visited__friends__exists=True).count())
|
||||
|
||||
self.assertEqual(1, Log.objects(
|
||||
actions__friends__operation='drink',
|
||||
actions__friends__object='beer').count())
|
||||
|
||||
def test_map_field_unicode(self):
|
||||
class Info(EmbeddedDocument):
|
||||
description = StringField()
|
||||
value_list = ListField(field=StringField())
|
||||
|
||||
class BlogPost(Document):
|
||||
info_dict = MapField(field=EmbeddedDocumentField(Info))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
tree = BlogPost(info_dict={
|
||||
u"éééé": {
|
||||
'description': u"VALUE: éééé"
|
||||
}
|
||||
})
|
||||
|
||||
tree.save()
|
||||
|
||||
self.assertEqual(
|
||||
BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description,
|
||||
u"VALUE: éééé"
|
||||
)
|
219
tests/fields/test_reference_field.py
Normal file
219
tests/fields/test_reference_field.py
Normal file
@@ -0,0 +1,219 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from bson import SON, DBRef
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestReferenceField(MongoDBTestCase):
|
||||
def test_reference_validation(self):
|
||||
"""Ensure that invalid document objects cannot be assigned to
|
||||
reference fields.
|
||||
"""
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = ReferenceField(User)
|
||||
|
||||
User.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
# Make sure ReferenceField only accepts a document class or a string
|
||||
# with a document class name.
|
||||
self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument)
|
||||
|
||||
user = User(name='Test User')
|
||||
|
||||
# Ensure that the referenced object must have been saved
|
||||
post1 = BlogPost(content='Chips and gravy taste good.')
|
||||
post1.author = user
|
||||
self.assertRaises(ValidationError, post1.save)
|
||||
|
||||
# Check that an invalid object type cannot be used
|
||||
post2 = BlogPost(content='Chips and chilli taste good.')
|
||||
post1.author = post2
|
||||
self.assertRaises(ValidationError, post1.validate)
|
||||
|
||||
# Ensure ObjectID's are accepted as references
|
||||
user_object_id = user.pk
|
||||
post3 = BlogPost(content="Chips and curry sauce taste good.")
|
||||
post3.author = user_object_id
|
||||
post3.save()
|
||||
|
||||
# Make sure referencing a saved document of the right type works
|
||||
user.save()
|
||||
post1.author = user
|
||||
post1.save()
|
||||
|
||||
# Make sure referencing a saved document of the *wrong* type fails
|
||||
post2.save()
|
||||
post1.author = post2
|
||||
self.assertRaises(ValidationError, post1.validate)
|
||||
|
||||
def test_objectid_reference_fields(self):
|
||||
"""Make sure storing Object ID references works."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self')
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="John").save()
|
||||
Person(name="Ross", parent=p1.pk).save()
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
|
||||
def test_dbref_reference_fields(self):
|
||||
"""Make sure storing references as bson.dbref.DBRef works."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self', dbref=True)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="John").save()
|
||||
Person(name="Ross", parent=p1).save()
|
||||
|
||||
self.assertEqual(
|
||||
Person._get_collection().find_one({'name': 'Ross'})['parent'],
|
||||
DBRef('person', p1.pk)
|
||||
)
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
|
||||
def test_dbref_to_mongo(self):
|
||||
"""Make sure that calling to_mongo on a ReferenceField which
|
||||
has dbref=False, but actually actually contains a DBRef returns
|
||||
an ID of that DBRef.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self', dbref=False)
|
||||
|
||||
p = Person(
|
||||
name='Steve',
|
||||
parent=DBRef('person', 'abcdefghijklmnop')
|
||||
)
|
||||
self.assertEqual(p.to_mongo(), SON([
|
||||
('name', u'Steve'),
|
||||
('parent', 'abcdefghijklmnop')
|
||||
]))
|
||||
|
||||
def test_objectid_reference_fields(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self', dbref=False)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="John").save()
|
||||
Person(name="Ross", parent=p1).save()
|
||||
|
||||
col = Person._get_collection()
|
||||
data = col.find_one({'name': 'Ross'})
|
||||
self.assertEqual(data['parent'], p1.pk)
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
|
||||
def test_undefined_reference(self):
|
||||
"""Ensure that ReferenceFields may reference undefined Documents.
|
||||
"""
|
||||
class Product(Document):
|
||||
name = StringField()
|
||||
company = ReferenceField('Company')
|
||||
|
||||
class Company(Document):
|
||||
name = StringField()
|
||||
|
||||
Product.drop_collection()
|
||||
Company.drop_collection()
|
||||
|
||||
ten_gen = Company(name='10gen')
|
||||
ten_gen.save()
|
||||
mongodb = Product(name='MongoDB', company=ten_gen)
|
||||
mongodb.save()
|
||||
|
||||
me = Product(name='MongoEngine')
|
||||
me.save()
|
||||
|
||||
obj = Product.objects(company=ten_gen).first()
|
||||
self.assertEqual(obj, mongodb)
|
||||
self.assertEqual(obj.company, ten_gen)
|
||||
|
||||
obj = Product.objects(company=None).first()
|
||||
self.assertEqual(obj, me)
|
||||
|
||||
obj = Product.objects.get(company=None)
|
||||
self.assertEqual(obj, me)
|
||||
|
||||
def test_reference_query_conversion(self):
|
||||
"""Ensure that ReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
author = ReferenceField(Member, dbref=False)
|
||||
|
||||
Member.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
m1 = Member(user_num=1)
|
||||
m1.save()
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
|
||||
def test_reference_query_conversion_dbref(self):
|
||||
"""Ensure that ReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
author = ReferenceField(Member, dbref=True)
|
||||
|
||||
Member.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
m1 = Member(user_num=1)
|
||||
m1.save()
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
271
tests/fields/test_sequence_field.py
Normal file
271
tests/fields/test_sequence_field.py
Normal file
@@ -0,0 +1,271 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestSequenceField(MongoDBTestCase):
|
||||
def test_sequence_field(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 1000)
|
||||
|
||||
def test_sequence_field_get_next_value(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), 11)
|
||||
self.db['mongoengine.counters'].drop()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), 1)
|
||||
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True, value_decorator=str)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), '11')
|
||||
self.db['mongoengine.counters'].drop()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), '1')
|
||||
|
||||
def test_sequence_field_sequence_name(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True, sequence_name='jelly')
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'})
|
||||
self.assertEqual(c['next'], 1000)
|
||||
|
||||
def test_multiple_sequence_fields(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True)
|
||||
counter = SequenceField()
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
|
||||
counters = [i.counter for i in Person.objects]
|
||||
self.assertEqual(counters, range(1, 11))
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 1000)
|
||||
|
||||
Person.counter.set_next_value(999)
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.counter'})
|
||||
self.assertEqual(c['next'], 999)
|
||||
|
||||
def test_sequence_fields_reload(self):
|
||||
class Animal(Document):
|
||||
counter = SequenceField()
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
Animal.drop_collection()
|
||||
|
||||
a = Animal(name="Boi").save()
|
||||
|
||||
self.assertEqual(a.counter, 1)
|
||||
a.reload()
|
||||
self.assertEqual(a.counter, 1)
|
||||
|
||||
a.counter = None
|
||||
self.assertEqual(a.counter, 2)
|
||||
a.save()
|
||||
|
||||
self.assertEqual(a.counter, 2)
|
||||
|
||||
a = Animal.objects.first()
|
||||
self.assertEqual(a.counter, 2)
|
||||
a.reload()
|
||||
self.assertEqual(a.counter, 2)
|
||||
|
||||
def test_multiple_sequence_fields_on_docs(self):
|
||||
class Animal(Document):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
Animal.drop_collection()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Animal(name="Animal %s" % x).save()
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
|
||||
id = [i.id for i in Animal.objects]
|
||||
self.assertEqual(id, range(1, 11))
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
def test_sequence_field_value_decorator(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True, value_decorator=str)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
p = Person(name="Person %s" % x)
|
||||
p.save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, map(str, range(1, 11)))
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
def test_embedded_sequence_field(self):
|
||||
class Comment(EmbeddedDocument):
|
||||
id = SequenceField()
|
||||
content = StringField(required=True)
|
||||
|
||||
class Post(Document):
|
||||
title = StringField(required=True)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
Post.drop_collection()
|
||||
|
||||
Post(title="MongoEngine",
|
||||
comments=[Comment(content="NoSQL Rocks"),
|
||||
Comment(content="MongoEngine Rocks")]).save()
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'comment.id'})
|
||||
self.assertEqual(c['next'], 2)
|
||||
post = Post.objects.first()
|
||||
self.assertEqual(1, post.comments[0].id)
|
||||
self.assertEqual(2, post.comments[1].id)
|
||||
|
||||
def test_inherited_sequencefield(self):
|
||||
class Base(Document):
|
||||
name = StringField()
|
||||
counter = SequenceField()
|
||||
meta = {'abstract': True}
|
||||
|
||||
class Foo(Base):
|
||||
pass
|
||||
|
||||
class Bar(Base):
|
||||
pass
|
||||
|
||||
bar = Bar(name='Bar')
|
||||
bar.save()
|
||||
|
||||
foo = Foo(name='Foo')
|
||||
foo.save()
|
||||
|
||||
self.assertTrue('base.counter' in
|
||||
self.db['mongoengine.counters'].find().distinct('_id'))
|
||||
self.assertFalse(('foo.counter' or 'bar.counter') in
|
||||
self.db['mongoengine.counters'].find().distinct('_id'))
|
||||
self.assertNotEqual(foo.counter, bar.counter)
|
||||
self.assertEqual(foo._fields['counter'].owner_document, Base)
|
||||
self.assertEqual(bar._fields['counter'].owner_document, Base)
|
||||
|
||||
def test_no_inherited_sequencefield(self):
|
||||
class Base(Document):
|
||||
name = StringField()
|
||||
meta = {'abstract': True}
|
||||
|
||||
class Foo(Base):
|
||||
counter = SequenceField()
|
||||
|
||||
class Bar(Base):
|
||||
counter = SequenceField()
|
||||
|
||||
bar = Bar(name='Bar')
|
||||
bar.save()
|
||||
|
||||
foo = Foo(name='Foo')
|
||||
foo.save()
|
||||
|
||||
self.assertFalse('base.counter' in
|
||||
self.db['mongoengine.counters'].find().distinct('_id'))
|
||||
self.assertTrue(('foo.counter' and 'bar.counter') in
|
||||
self.db['mongoengine.counters'].find().distinct('_id'))
|
||||
self.assertEqual(foo.counter, bar.counter)
|
||||
self.assertEqual(foo._fields['counter'].owner_document, Foo)
|
||||
self.assertEqual(bar._fields['counter'].owner_document, Bar)
|
59
tests/fields/test_url_field.py
Normal file
59
tests/fields/test_url_field.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestURLField(MongoDBTestCase):
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that URLFields validate urls properly."""
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
link = Link()
|
||||
link.url = 'google'
|
||||
self.assertRaises(ValidationError, link.validate)
|
||||
|
||||
link.url = 'http://www.google.com:8080'
|
||||
link.validate()
|
||||
|
||||
def test_unicode_url_validation(self):
|
||||
"""Ensure unicode URLs are validated properly."""
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
link = Link()
|
||||
link.url = u'http://привет.com'
|
||||
|
||||
# TODO fix URL validation - this *IS* a valid URL
|
||||
# For now we just want to make sure that the error message is correct
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
link.validate()
|
||||
self.assertEqual(unicode(ctx_err.exception),
|
||||
u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])")
|
||||
|
||||
def test_url_scheme_validation(self):
|
||||
"""Ensure that URLFields validate urls with specific schemes properly.
|
||||
"""
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
class SchemeLink(Document):
|
||||
url = URLField(schemes=['ws', 'irc'])
|
||||
|
||||
link = Link()
|
||||
link.url = 'ws://google.com'
|
||||
self.assertRaises(ValidationError, link.validate)
|
||||
|
||||
scheme_link = SchemeLink()
|
||||
scheme_link.url = 'ws://google.com'
|
||||
scheme_link.validate()
|
||||
|
||||
def test_underscore_allowed_in_domains_names(self):
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
link = Link()
|
||||
link.url = 'https://san_leandro-ca.geebo.com'
|
||||
link.validate()
|
65
tests/fields/test_uuid_field.py
Normal file
65
tests/fields/test_uuid_field.py
Normal file
@@ -0,0 +1,65 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import uuid
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class Person(Document):
|
||||
api_key = UUIDField(binary=False)
|
||||
|
||||
|
||||
class TestUUIDField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
uid = uuid.uuid4()
|
||||
person = Person(api_key=uid).save()
|
||||
self.assertEqual(
|
||||
get_as_pymongo(person),
|
||||
{'_id': person.id,
|
||||
'api_key': str(uid)
|
||||
}
|
||||
)
|
||||
|
||||
def test_field_string(self):
|
||||
"""Test UUID fields storing as String
|
||||
"""
|
||||
Person.drop_collection()
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
self.assertEqual(uu, Person.objects.first().api_key)
|
||||
|
||||
person = Person()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
for api_key in valid:
|
||||
person.api_key = api_key
|
||||
person.validate()
|
||||
|
||||
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
|
||||
'9d159858-549b-4975-9f98-dd2f987c113')
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_field_binary(self):
|
||||
"""Test UUID fields storing as Binary object."""
|
||||
Person.drop_collection()
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
self.assertEqual(uu, Person.objects.first().api_key)
|
||||
|
||||
person = Person()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
for api_key in valid:
|
||||
person.api_key = api_key
|
||||
person.validate()
|
||||
|
||||
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
|
||||
'9d159858-549b-4975-9f98-dd2f987c113')
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
@@ -48,6 +48,7 @@ class PickleSignalsTest(Document):
|
||||
def post_delete(self, sender, document, **kwargs):
|
||||
pickled = pickle.dumps(document)
|
||||
|
||||
|
||||
signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest)
|
||||
signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest)
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
from transform import *
|
||||
from field_list import *
|
||||
from queryset import *
|
||||
from visitor import *
|
||||
from geo import *
|
||||
from modify import *
|
||||
from .transform import *
|
||||
from .field_list import *
|
||||
from .queryset import *
|
||||
from .visitor import *
|
||||
from .geo import *
|
||||
from .modify import *
|
||||
|
@@ -181,7 +181,7 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
employee.save()
|
||||
|
||||
obj = self.Person.objects(id=employee.id).only('age').get()
|
||||
self.assertTrue(isinstance(obj, Employee))
|
||||
self.assertIsInstance(obj, Employee)
|
||||
|
||||
# Check field names are looked up properly
|
||||
obj = Employee.objects(id=employee.id).only('salary').get()
|
||||
@@ -208,7 +208,7 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(content='Had a good coffee today...', various={'test_dynamic':{'some': True}})
|
||||
post = BlogPost(content='Had a good coffee today...', various={'test_dynamic': {'some': True}})
|
||||
post.author = User(name='Test User')
|
||||
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
|
||||
post.save()
|
||||
@@ -413,7 +413,6 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get()
|
||||
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
||||
|
||||
|
||||
def test_exclude_from_subclasses_docs(self):
|
||||
|
||||
class Base(Document):
|
||||
@@ -436,5 +435,6 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(LookUpError, Base.objects.exclude, "made_up")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -3,7 +3,7 @@ import unittest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase, needs_mongodb_v3
|
||||
from tests.utils import MongoDBTestCase, requires_mongodb_gte_3
|
||||
|
||||
|
||||
__all__ = ("GeoQueriesTest",)
|
||||
@@ -72,7 +72,7 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
# $minDistance was added in MongoDB v2.6, but continued being buggy
|
||||
# until v3.0; skip for older versions
|
||||
@needs_mongodb_v3
|
||||
@requires_mongodb_gte_3
|
||||
def test_near_and_min_distance(self):
|
||||
"""Ensure the "min_distance" operator works alongside the "near"
|
||||
operator.
|
||||
@@ -95,9 +95,9 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = list(events)
|
||||
self.assertTrue(event2 not in events)
|
||||
self.assertTrue(event1 in events)
|
||||
self.assertTrue(event3 in events)
|
||||
self.assertNotIn(event2, events)
|
||||
self.assertIn(event1, events)
|
||||
self.assertIn(event3, events)
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point_and_distance = [[-122.415579, 37.7566023], 10]
|
||||
@@ -245,7 +245,7 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
# $minDistance was added in MongoDB v2.6, but continued being buggy
|
||||
# until v3.0; skip for older versions
|
||||
@needs_mongodb_v3
|
||||
@requires_mongodb_gte_3
|
||||
def test_2dsphere_near_and_min_max_distance(self):
|
||||
"""Ensure "min_distace" and "max_distance" operators work well
|
||||
together with the "near" operator in a 2dsphere index.
|
||||
@@ -285,9 +285,9 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
location__geo_within_center=point_and_distance)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = list(events)
|
||||
self.assertTrue(event2 not in events)
|
||||
self.assertTrue(event1 in events)
|
||||
self.assertTrue(event3 in events)
|
||||
self.assertNotIn(event2, events)
|
||||
self.assertIn(event1, events)
|
||||
self.assertIn(event3, events)
|
||||
|
||||
def _test_embedded(self, point_field_class):
|
||||
"""Helper test method ensuring given point field class works
|
||||
@@ -329,7 +329,7 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
self._test_embedded(point_field_class=PointField)
|
||||
|
||||
# Needs MongoDB > 2.6.4 https://jira.mongodb.org/browse/SERVER-14039
|
||||
@needs_mongodb_v3
|
||||
@requires_mongodb_gte_3
|
||||
def test_spherical_geospatial_operators(self):
|
||||
"""Ensure that spherical geospatial queries are working."""
|
||||
class Point(Document):
|
||||
@@ -534,11 +534,11 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
Location.drop_collection()
|
||||
|
||||
Location(loc=[1,2]).save()
|
||||
Location(loc=[1, 2]).save()
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [1, 2]})
|
||||
|
||||
Location.objects.update(set__loc=[2,1])
|
||||
Location.objects.update(set__loc=[2, 1])
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [2, 1]})
|
||||
|
||||
|
@@ -2,7 +2,7 @@ import unittest
|
||||
|
||||
from mongoengine import connect, Document, IntField, StringField, ListField
|
||||
|
||||
from tests.utils import needs_mongodb_v26
|
||||
from tests.utils import requires_mongodb_gte_26
|
||||
|
||||
__all__ = ("FindAndModifyTest",)
|
||||
|
||||
@@ -96,7 +96,7 @@ class FindAndModifyTest(unittest.TestCase):
|
||||
self.assertEqual(old_doc.to_mongo(), {"_id": 1})
|
||||
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
||||
|
||||
@needs_mongodb_v26
|
||||
@requires_mongodb_gte_26
|
||||
def test_modify_with_push(self):
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
|
@@ -6,10 +6,12 @@ from mongoengine.connection import connect
|
||||
|
||||
__author__ = 'stas'
|
||||
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
|
||||
class TestQuerysetPickable(unittest.TestCase):
|
||||
"""
|
||||
Test for adding pickling support for QuerySet instances
|
||||
@@ -18,7 +20,7 @@ class TestQuerysetPickable(unittest.TestCase):
|
||||
def setUp(self):
|
||||
super(TestQuerysetPickable, self).setUp()
|
||||
|
||||
connection = connect(db="test") #type: pymongo.mongo_client.MongoClient
|
||||
connection = connect(db="test") # type: pymongo.mongo_client.MongoClient
|
||||
|
||||
connection.drop_database("test")
|
||||
|
||||
@@ -27,7 +29,6 @@ class TestQuerysetPickable(unittest.TestCase):
|
||||
age=21
|
||||
)
|
||||
|
||||
|
||||
def test_picke_simple_qs(self):
|
||||
|
||||
qs = Person.objects.all()
|
||||
@@ -46,10 +47,10 @@ class TestQuerysetPickable(unittest.TestCase):
|
||||
|
||||
self.assertEqual(qs.count(), loadedQs.count())
|
||||
|
||||
#can update loadedQs
|
||||
# can update loadedQs
|
||||
loadedQs.update(age=23)
|
||||
|
||||
#check
|
||||
# check
|
||||
self.assertEqual(Person.objects.first().age, 23)
|
||||
|
||||
def test_pickle_support_filtration(self):
|
||||
@@ -70,7 +71,7 @@ class TestQuerysetPickable(unittest.TestCase):
|
||||
|
||||
self.assertEqual(loaded.count(), 2)
|
||||
self.assertEqual(loaded.filter(name="Bob").first().age, 23)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -48,15 +48,15 @@ class TransformTest(unittest.TestCase):
|
||||
|
||||
for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")):
|
||||
update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc})
|
||||
self.assertTrue(isinstance(update[v]["dictField.test"], dict))
|
||||
self.assertIsInstance(update[v]["dictField.test"], dict)
|
||||
|
||||
# Update special cases
|
||||
update = transform.update(DicDoc, unset__dictField__test=doc)
|
||||
self.assertEqual(update["$unset"]["dictField.test"], 1)
|
||||
|
||||
update = transform.update(DicDoc, pull__dictField__test=doc)
|
||||
self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict))
|
||||
|
||||
self.assertIsInstance(update["$pull"]["dictField"]["test"], dict)
|
||||
|
||||
update = transform.update(LisDoc, pull__foo__in=['a'])
|
||||
self.assertEqual(update, {'$pull': {'foo': {'$in': ['a']}}})
|
||||
|
||||
@@ -88,17 +88,15 @@ class TransformTest(unittest.TestCase):
|
||||
post = BlogPost(**data)
|
||||
post.save()
|
||||
|
||||
self.assertTrue('postTitle' in
|
||||
BlogPost.objects(title=data['title'])._query)
|
||||
self.assertIn('postTitle', BlogPost.objects(title=data['title'])._query)
|
||||
self.assertFalse('title' in
|
||||
BlogPost.objects(title=data['title'])._query)
|
||||
self.assertEqual(BlogPost.objects(title=data['title']).count(), 1)
|
||||
|
||||
self.assertTrue('_id' in BlogPost.objects(pk=post.id)._query)
|
||||
self.assertIn('_id', BlogPost.objects(pk=post.id)._query)
|
||||
self.assertEqual(BlogPost.objects(pk=post.id).count(), 1)
|
||||
|
||||
self.assertTrue('postComments.commentContent' in
|
||||
BlogPost.objects(comments__content='test')._query)
|
||||
self.assertIn('postComments.commentContent', BlogPost.objects(comments__content='test')._query)
|
||||
self.assertEqual(BlogPost.objects(comments__content='test').count(), 1)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
@@ -116,8 +114,8 @@ class TransformTest(unittest.TestCase):
|
||||
post = BlogPost(**data)
|
||||
post.save()
|
||||
|
||||
self.assertTrue('_id' in BlogPost.objects(pk=data['title'])._query)
|
||||
self.assertTrue('_id' in BlogPost.objects(title=data['title'])._query)
|
||||
self.assertIn('_id', BlogPost.objects(pk=data['title'])._query)
|
||||
self.assertIn('_id', BlogPost.objects(title=data['title'])._query)
|
||||
self.assertEqual(BlogPost.objects(pk=data['title']).count(), 1)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
@@ -260,31 +258,31 @@ class TransformTest(unittest.TestCase):
|
||||
events = Event.objects(location__within=box)
|
||||
with self.assertRaises(InvalidQueryError):
|
||||
events.count()
|
||||
|
||||
|
||||
def test_update_pull_for_list_fields(self):
|
||||
"""
|
||||
Test added to check pull operation in update for
|
||||
"""
|
||||
Test added to check pull operation in update for
|
||||
EmbeddedDocumentListField which is inside a EmbeddedDocumentField
|
||||
"""
|
||||
class Word(EmbeddedDocument):
|
||||
word = StringField()
|
||||
index = IntField()
|
||||
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
heading = ListField(StringField())
|
||||
text = EmbeddedDocumentListField(Word)
|
||||
|
||||
|
||||
class MainDoc(Document):
|
||||
title = StringField()
|
||||
content = EmbeddedDocumentField(SubDoc)
|
||||
|
||||
|
||||
word = Word(word='abc', index=1)
|
||||
update = transform.update(MainDoc, pull__content__text=word)
|
||||
self.assertEqual(update, {'$pull': {'content.text': SON([('word', u'abc'), ('index', 1)])}})
|
||||
|
||||
update = transform.update(MainDoc, pull__content__heading='xyz')
|
||||
self.assertEqual(update, {'$pull': {'content.heading': 'xyz'}})
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -196,7 +196,7 @@ class QTest(unittest.TestCase):
|
||||
|
||||
test2 = test.clone()
|
||||
self.assertEqual(test2.count(), 3)
|
||||
self.assertFalse(test2 == test)
|
||||
self.assertNotEqual(test2, test)
|
||||
|
||||
test3 = test2.filter(x=6)
|
||||
self.assertEqual(test3.count(), 1)
|
||||
@@ -275,7 +275,6 @@ class QTest(unittest.TestCase):
|
||||
with self.assertRaises(InvalidQueryError):
|
||||
self.Person.objects.filter('user1')
|
||||
|
||||
|
||||
def test_q_regex(self):
|
||||
"""Ensure that Q objects can be queried using regexes.
|
||||
"""
|
||||
@@ -296,6 +295,18 @@ class QTest(unittest.TestCase):
|
||||
obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first()
|
||||
self.assertEqual(obj, None)
|
||||
|
||||
def test_q_repr(self):
|
||||
self.assertEqual(repr(Q()), 'Q(**{})')
|
||||
self.assertEqual(repr(Q(name='test')), "Q(**{'name': 'test'})")
|
||||
|
||||
self.assertEqual(
|
||||
repr(Q(name='test') & Q(age__gte=18)),
|
||||
"(Q(**{'name': 'test'}) & Q(**{'age__gte': 18}))")
|
||||
|
||||
self.assertEqual(
|
||||
repr(Q(name='test') | Q(age__gte=18)),
|
||||
"(Q(**{'name': 'test'}) | Q(**{'age__gte': 18}))")
|
||||
|
||||
def test_q_lists(self):
|
||||
"""Ensure that Q objects query ListFields correctly.
|
||||
"""
|
||||
|
@@ -14,7 +14,7 @@ from mongoengine import (
|
||||
connect, register_connection,
|
||||
Document, DateTimeField
|
||||
)
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
from mongoengine.pymongo_support import IS_PYMONGO_3
|
||||
import mongoengine.connection
|
||||
from mongoengine.connection import (MongoEngineConnectionError, get_db,
|
||||
get_connection)
|
||||
@@ -39,15 +39,15 @@ class ConnectionTest(unittest.TestCase):
|
||||
connect('mongoenginetest')
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
connect('mongoenginetest2', alias='testdb')
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
def test_connect_in_mocking(self):
|
||||
"""Ensure that the connect() method works properly in mocking.
|
||||
@@ -59,31 +59,31 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
connect('mongoenginetest', host='mongomock://localhost')
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2')
|
||||
conn = get_connection('testdb2')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3')
|
||||
conn = get_connection('testdb3')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect('mongoenginetest4', is_mock=True, alias='testdb4')
|
||||
conn = get_connection('testdb4')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5')
|
||||
conn = get_connection('testdb5')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6')
|
||||
conn = get_connection('testdb6')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7')
|
||||
conn = get_connection('testdb7')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
def test_connect_with_host_list(self):
|
||||
"""Ensure that the connect() method works when host is a list
|
||||
@@ -97,27 +97,27 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
connect(host=['mongomock://localhost'])
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['mongodb://localhost'], is_mock=True, alias='testdb2')
|
||||
connect(host=['mongodb://localhost'], is_mock=True, alias='testdb2')
|
||||
conn = get_connection('testdb2')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['localhost'], is_mock=True, alias='testdb3')
|
||||
connect(host=['localhost'], is_mock=True, alias='testdb3')
|
||||
conn = get_connection('testdb3')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4')
|
||||
conn = get_connection('testdb4')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True, alias='testdb5')
|
||||
connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True, alias='testdb5')
|
||||
conn = get_connection('testdb5')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['localhost:27017', 'localhost:27018'], is_mock=True, alias='testdb6')
|
||||
connect(host=['localhost:27017', 'localhost:27018'], is_mock=True, alias='testdb6')
|
||||
conn = get_connection('testdb6')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
|
||||
def test_disconnect(self):
|
||||
"""Ensure that the disconnect() method works properly
|
||||
@@ -147,12 +147,12 @@ class ConnectionTest(unittest.TestCase):
|
||||
def test_connect_uri(self):
|
||||
"""Ensure that the connect() method works properly with URIs."""
|
||||
c = connect(db='mongoenginetest', alias='admin')
|
||||
c.admin.system.users.remove({})
|
||||
c.mongoenginetest.system.users.remove({})
|
||||
c.admin.system.users.delete_many({})
|
||||
c.mongoenginetest.system.users.delete_many({})
|
||||
|
||||
c.admin.add_user("admin", "password")
|
||||
c.admin.command("createUser", "admin", pwd="password", roles=["root"])
|
||||
c.admin.authenticate("admin", "password")
|
||||
c.mongoenginetest.add_user("username", "password")
|
||||
c.admin.command("createUser", "username", pwd="password", roles=["dbOwner"])
|
||||
|
||||
if not IS_PYMONGO_3:
|
||||
self.assertRaises(
|
||||
@@ -163,14 +163,14 @@ class ConnectionTest(unittest.TestCase):
|
||||
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
c.admin.system.users.remove({})
|
||||
c.mongoenginetest.system.users.remove({})
|
||||
c.admin.system.users.delete_many({})
|
||||
c.mongoenginetest.system.users.delete_many({})
|
||||
|
||||
def test_connect_uri_without_db(self):
|
||||
"""Ensure connect() method works properly if the URI doesn't
|
||||
@@ -179,10 +179,10 @@ class ConnectionTest(unittest.TestCase):
|
||||
connect("mongoenginetest", host='mongodb://localhost/')
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
def test_connect_uri_default_db(self):
|
||||
@@ -192,10 +192,10 @@ class ConnectionTest(unittest.TestCase):
|
||||
connect(host='mongodb://localhost/')
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'test')
|
||||
|
||||
def test_uri_without_credentials_doesnt_override_conn_settings(self):
|
||||
@@ -217,8 +217,9 @@ class ConnectionTest(unittest.TestCase):
|
||||
"""
|
||||
# Create users
|
||||
c = connect('mongoenginetest')
|
||||
c.admin.system.users.remove({})
|
||||
c.admin.add_user('username2', 'password')
|
||||
|
||||
c.admin.system.users.delete_many({})
|
||||
c.admin.command("createUser", "username2", pwd="password", roles=["dbOwner"])
|
||||
|
||||
# Authentication fails without "authSource"
|
||||
if IS_PYMONGO_3:
|
||||
@@ -242,11 +243,11 @@ class ConnectionTest(unittest.TestCase):
|
||||
'mongoenginetest?authSource=admin')
|
||||
)
|
||||
db = get_db('test2')
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
# Clear all users
|
||||
authd_conn.admin.system.users.remove({})
|
||||
authd_conn.admin.system.users.delete_many({})
|
||||
|
||||
def test_register_connection(self):
|
||||
"""Ensure that connections with different aliases may be registered.
|
||||
@@ -255,10 +256,10 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(MongoEngineConnectionError, get_connection)
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db('testdb')
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest2')
|
||||
|
||||
def test_register_connection_defaults(self):
|
||||
@@ -267,7 +268,7 @@ class ConnectionTest(unittest.TestCase):
|
||||
register_connection('testdb', 'mongoenginetest', host=None, port=None)
|
||||
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
def test_connection_kwargs(self):
|
||||
"""Ensure that connection kwargs get passed to pymongo."""
|
||||
@@ -326,7 +327,7 @@ class ConnectionTest(unittest.TestCase):
|
||||
if IS_PYMONGO_3:
|
||||
c = connect(host='mongodb://localhost/test?replicaSet=local-rs')
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'test')
|
||||
else:
|
||||
# PyMongo < v3.x raises an exception:
|
||||
@@ -343,7 +344,7 @@ class ConnectionTest(unittest.TestCase):
|
||||
self.assertEqual(c._MongoClient__options.replica_set_name,
|
||||
'local-rs')
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'test')
|
||||
else:
|
||||
# PyMongo < v3.x raises an exception:
|
||||
@@ -364,6 +365,12 @@ class ConnectionTest(unittest.TestCase):
|
||||
date_doc = DateDoc.objects.first()
|
||||
self.assertEqual(d, date_doc.the_date)
|
||||
|
||||
def test_read_preference_from_parse(self):
|
||||
if IS_PYMONGO_3:
|
||||
from pymongo import ReadPreference
|
||||
conn = connect(host="mongodb://a1.vpc,a2.vpc,a3.vpc/prod?readPreference=secondaryPreferred")
|
||||
self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_PREFERRED)
|
||||
|
||||
def test_multiple_connection_settings(self):
|
||||
connect('mongoenginetest', alias='t1', host="localhost")
|
||||
|
||||
@@ -371,8 +378,8 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
mongo_connections = mongoengine.connection._connections
|
||||
self.assertEqual(len(mongo_connections.items()), 2)
|
||||
self.assertTrue('t1' in mongo_connections.keys())
|
||||
self.assertTrue('t2' in mongo_connections.keys())
|
||||
self.assertIn('t1', mongo_connections.keys())
|
||||
self.assertIn('t2', mongo_connections.keys())
|
||||
if not IS_PYMONGO_3:
|
||||
self.assertEqual(mongo_connections['t1'].host, 'localhost')
|
||||
self.assertEqual(mongo_connections['t2'].host, '127.0.0.1')
|
||||
|
@@ -5,6 +5,7 @@ from mongoengine.connection import get_db
|
||||
from mongoengine.context_managers import (switch_db, switch_collection,
|
||||
no_sub_classes, no_dereference,
|
||||
query_counter)
|
||||
from mongoengine.pymongo_support import count_documents
|
||||
|
||||
|
||||
class ContextManagersTest(unittest.TestCase):
|
||||
@@ -89,15 +90,15 @@ class ContextManagersTest(unittest.TestCase):
|
||||
|
||||
with no_dereference(Group) as Group:
|
||||
group = Group.objects.first()
|
||||
self.assertTrue(all([not isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertFalse(isinstance(group.ref, User))
|
||||
self.assertFalse(isinstance(group.generic, User))
|
||||
for m in group.members:
|
||||
self.assertNotIsInstance(m, User)
|
||||
self.assertNotIsInstance(group.ref, User)
|
||||
self.assertNotIsInstance(group.generic, User)
|
||||
|
||||
self.assertTrue(all([isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertTrue(isinstance(group.ref, User))
|
||||
self.assertTrue(isinstance(group.generic, User))
|
||||
for m in group.members:
|
||||
self.assertIsInstance(m, User)
|
||||
self.assertIsInstance(group.ref, User)
|
||||
self.assertIsInstance(group.generic, User)
|
||||
|
||||
def test_no_dereference_context_manager_dbref(self):
|
||||
"""Ensure that DBRef items in ListFields aren't dereferenced.
|
||||
@@ -129,19 +130,17 @@ class ContextManagersTest(unittest.TestCase):
|
||||
group = Group.objects.first()
|
||||
self.assertTrue(all([not isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertFalse(isinstance(group.ref, User))
|
||||
self.assertFalse(isinstance(group.generic, User))
|
||||
self.assertNotIsInstance(group.ref, User)
|
||||
self.assertNotIsInstance(group.generic, User)
|
||||
|
||||
self.assertTrue(all([isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertTrue(isinstance(group.ref, User))
|
||||
self.assertTrue(isinstance(group.generic, User))
|
||||
self.assertIsInstance(group.ref, User)
|
||||
self.assertIsInstance(group.generic, User)
|
||||
|
||||
def test_no_sub_classes(self):
|
||||
class A(Document):
|
||||
x = IntField()
|
||||
y = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class B(A):
|
||||
@@ -152,29 +151,29 @@ class ContextManagersTest(unittest.TestCase):
|
||||
|
||||
A.drop_collection()
|
||||
|
||||
A(x=10, y=20).save()
|
||||
A(x=15, y=30).save()
|
||||
B(x=20, y=40).save()
|
||||
B(x=30, y=50).save()
|
||||
C(x=40, y=60).save()
|
||||
A(x=10).save()
|
||||
A(x=15).save()
|
||||
B(x=20).save()
|
||||
B(x=30).save()
|
||||
C(x=40).save()
|
||||
|
||||
self.assertEqual(A.objects.count(), 5)
|
||||
self.assertEqual(B.objects.count(), 3)
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
|
||||
with no_sub_classes(A) as A:
|
||||
with no_sub_classes(A):
|
||||
self.assertEqual(A.objects.count(), 2)
|
||||
|
||||
for obj in A.objects:
|
||||
self.assertEqual(obj.__class__, A)
|
||||
|
||||
with no_sub_classes(B) as B:
|
||||
with no_sub_classes(B):
|
||||
self.assertEqual(B.objects.count(), 2)
|
||||
|
||||
for obj in B.objects:
|
||||
self.assertEqual(obj.__class__, B)
|
||||
|
||||
with no_sub_classes(C) as C:
|
||||
with no_sub_classes(C):
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
|
||||
for obj in C.objects:
|
||||
@@ -185,18 +184,125 @@ class ContextManagersTest(unittest.TestCase):
|
||||
self.assertEqual(B.objects.count(), 3)
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
|
||||
def test_no_sub_classes_modification_to_document_class_are_temporary(self):
|
||||
class A(Document):
|
||||
x = IntField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class B(A):
|
||||
z = IntField()
|
||||
|
||||
self.assertEqual(A._subclasses, ('A', 'A.B'))
|
||||
with no_sub_classes(A):
|
||||
self.assertEqual(A._subclasses, ('A',))
|
||||
self.assertEqual(A._subclasses, ('A', 'A.B'))
|
||||
|
||||
self.assertEqual(B._subclasses, ('A.B',))
|
||||
with no_sub_classes(B):
|
||||
self.assertEqual(B._subclasses, ('A.B',))
|
||||
self.assertEqual(B._subclasses, ('A.B',))
|
||||
|
||||
def test_no_subclass_context_manager_does_not_swallow_exception(self):
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
with no_sub_classes(User):
|
||||
raise TypeError()
|
||||
|
||||
def test_query_counter_does_not_swallow_exception(self):
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
with query_counter() as q:
|
||||
raise TypeError()
|
||||
|
||||
def test_query_counter_temporarily_modifies_profiling_level(self):
|
||||
connect('mongoenginetest')
|
||||
db = get_db()
|
||||
|
||||
initial_profiling_level = db.profiling_level()
|
||||
|
||||
try:
|
||||
NEW_LEVEL = 1
|
||||
db.set_profiling_level(NEW_LEVEL)
|
||||
self.assertEqual(db.profiling_level(), NEW_LEVEL)
|
||||
with query_counter() as q:
|
||||
self.assertEqual(db.profiling_level(), 2)
|
||||
self.assertEqual(db.profiling_level(), NEW_LEVEL)
|
||||
except Exception:
|
||||
db.set_profiling_level(initial_profiling_level) # Ensures it gets reseted no matter the outcome of the test
|
||||
raise
|
||||
|
||||
def test_query_counter(self):
|
||||
connect('mongoenginetest')
|
||||
db = get_db()
|
||||
db.test.find({})
|
||||
|
||||
collection = db.query_counter
|
||||
collection.drop()
|
||||
|
||||
def issue_1_count_query():
|
||||
count_documents(collection, {})
|
||||
|
||||
def issue_1_insert_query():
|
||||
collection.insert_one({'test': 'garbage'})
|
||||
|
||||
def issue_1_find_query():
|
||||
collection.find_one()
|
||||
|
||||
counter = 0
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, counter)
|
||||
self.assertEqual(q, counter) # Ensures previous count query did not get counted
|
||||
|
||||
for _ in range(10):
|
||||
issue_1_insert_query()
|
||||
counter += 1
|
||||
self.assertEqual(q, counter)
|
||||
|
||||
for _ in range(4):
|
||||
issue_1_find_query()
|
||||
counter += 1
|
||||
self.assertEqual(q, counter)
|
||||
|
||||
for _ in range(3):
|
||||
issue_1_count_query()
|
||||
counter += 1
|
||||
self.assertEqual(q, counter)
|
||||
|
||||
def test_query_counter_counts_getmore_queries(self):
|
||||
connect('mongoenginetest')
|
||||
db = get_db()
|
||||
|
||||
collection = db.query_counter
|
||||
collection.drop()
|
||||
|
||||
many_docs = [{'test': 'garbage %s' % i} for i in range(150)]
|
||||
collection.insert_many(many_docs) # first batch of documents contains 101 documents
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(0, q)
|
||||
self.assertEqual(q, 0)
|
||||
list(collection.find())
|
||||
self.assertEqual(q, 2) # 1st select + 1 getmore
|
||||
|
||||
for i in range(1, 51):
|
||||
db.test.find({}).count()
|
||||
def test_query_counter_ignores_particular_queries(self):
|
||||
connect('mongoenginetest')
|
||||
db = get_db()
|
||||
|
||||
collection = db.query_counter
|
||||
collection.insert_many([{'test': 'garbage %s' % i} for i in range(10)])
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
cursor = collection.find()
|
||||
self.assertEqual(q, 0) # cursor wasn't opened yet
|
||||
_ = next(cursor) # opens the cursor and fires the find query
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
cursor.close() # issues a `killcursors` query that is ignored by the context
|
||||
self.assertEqual(q, 1)
|
||||
_ = db.system.indexes.find_one() # queries on db.system.indexes are ignored as well
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
self.assertEqual(50, q)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -1,6 +1,360 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine.base.datastructures import StrictDict
|
||||
from mongoengine import Document
|
||||
from mongoengine.base.datastructures import StrictDict, BaseList, BaseDict
|
||||
|
||||
|
||||
class DocumentStub(object):
|
||||
def __init__(self):
|
||||
self._changed_fields = []
|
||||
|
||||
def _mark_as_changed(self, key):
|
||||
self._changed_fields.append(key)
|
||||
|
||||
|
||||
class TestBaseDict(unittest.TestCase):
|
||||
|
||||
@staticmethod
|
||||
def _get_basedict(dict_items):
|
||||
"""Get a BaseList bound to a fake document instance"""
|
||||
fake_doc = DocumentStub()
|
||||
base_list = BaseDict(dict_items, instance=None, name='my_name')
|
||||
base_list._instance = fake_doc # hack to inject the mock, it does not work in the constructor
|
||||
return base_list
|
||||
|
||||
def test___init___(self):
|
||||
class MyDoc(Document):
|
||||
pass
|
||||
|
||||
dict_items = {'k': 'v'}
|
||||
doc = MyDoc()
|
||||
base_dict = BaseDict(dict_items, instance=doc, name='my_name')
|
||||
self.assertIsInstance(base_dict._instance, Document)
|
||||
self.assertEqual(base_dict._name, 'my_name')
|
||||
self.assertEqual(base_dict, dict_items)
|
||||
|
||||
def test_setdefault_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict.setdefault('k', 'v')
|
||||
self.assertEqual(base_dict._instance._changed_fields, [base_dict._name])
|
||||
|
||||
def test_popitems_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({'k': 'v'})
|
||||
self.assertEqual(base_dict.popitem(), ('k', 'v'))
|
||||
self.assertEqual(base_dict._instance._changed_fields, [base_dict._name])
|
||||
self.assertFalse(base_dict)
|
||||
|
||||
def test_pop_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({'k': 'v'})
|
||||
self.assertEqual(base_dict.pop('k'), 'v')
|
||||
self.assertEqual(base_dict._instance._changed_fields, [base_dict._name])
|
||||
self.assertFalse(base_dict)
|
||||
|
||||
def test_pop_calls_does_not_mark_as_changed_when_it_fails(self):
|
||||
base_dict = self._get_basedict({'k': 'v'})
|
||||
with self.assertRaises(KeyError):
|
||||
base_dict.pop('X')
|
||||
self.assertFalse(base_dict._instance._changed_fields)
|
||||
|
||||
def test_clear_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({'k': 'v'})
|
||||
base_dict.clear()
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_dict, {})
|
||||
|
||||
def test___delitem___calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({'k': 'v'})
|
||||
del base_dict['k']
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name.k'])
|
||||
self.assertEqual(base_dict, {})
|
||||
|
||||
def test___getitem____KeyError(self):
|
||||
base_dict = self._get_basedict({})
|
||||
with self.assertRaises(KeyError):
|
||||
base_dict['new']
|
||||
|
||||
def test___getitem____simple_value(self):
|
||||
base_dict = self._get_basedict({'k': 'v'})
|
||||
base_dict['k'] = 'v'
|
||||
|
||||
def test___getitem____sublist_gets_converted_to_BaseList(self):
|
||||
base_dict = self._get_basedict({'k': [0, 1, 2]})
|
||||
sub_list = base_dict['k']
|
||||
self.assertEqual(sub_list, [0, 1, 2])
|
||||
self.assertIsInstance(sub_list, BaseList)
|
||||
self.assertIs(sub_list._instance, base_dict._instance)
|
||||
self.assertEqual(sub_list._name, 'my_name.k')
|
||||
self.assertEqual(base_dict._instance._changed_fields, [])
|
||||
|
||||
# Challenge mark_as_changed from sublist
|
||||
sub_list[1] = None
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name.k.1'])
|
||||
|
||||
def test___getitem____subdict_gets_converted_to_BaseDict(self):
|
||||
base_dict = self._get_basedict({'k': {'subk': 'subv'}})
|
||||
sub_dict = base_dict['k']
|
||||
self.assertEqual(sub_dict, {'subk': 'subv'})
|
||||
self.assertIsInstance(sub_dict, BaseDict)
|
||||
self.assertIs(sub_dict._instance, base_dict._instance)
|
||||
self.assertEqual(sub_dict._name, 'my_name.k')
|
||||
self.assertEqual(base_dict._instance._changed_fields, [])
|
||||
|
||||
# Challenge mark_as_changed from subdict
|
||||
sub_dict['subk'] = None
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name.k.subk'])
|
||||
|
||||
def test_get_sublist_gets_converted_to_BaseList_just_like__getitem__(self):
|
||||
base_dict = self._get_basedict({'k': [0, 1, 2]})
|
||||
sub_list = base_dict.get('k')
|
||||
self.assertEqual(sub_list, [0, 1, 2])
|
||||
self.assertIsInstance(sub_list, BaseList)
|
||||
|
||||
def test_get_returns_the_same_as___getitem__(self):
|
||||
base_dict = self._get_basedict({'k': [0, 1, 2]})
|
||||
get_ = base_dict.get('k')
|
||||
getitem_ = base_dict['k']
|
||||
self.assertEqual(get_, getitem_)
|
||||
|
||||
def test_get_default(self):
|
||||
base_dict = self._get_basedict({})
|
||||
sentinel = object()
|
||||
self.assertEqual(base_dict.get('new'), None)
|
||||
self.assertIs(base_dict.get('new', sentinel), sentinel)
|
||||
|
||||
def test___setitem___calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict['k'] = 'v'
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name.k'])
|
||||
self.assertEqual(base_dict, {'k': 'v'})
|
||||
|
||||
def test_update_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict.update({'k': 'v'})
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name'])
|
||||
|
||||
def test___setattr____not_tracked_by_changes(self):
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict.a_new_attr = 'test'
|
||||
self.assertEqual(base_dict._instance._changed_fields, [])
|
||||
|
||||
def test___delattr____tracked_by_changes(self):
|
||||
# This is probably a bug as __setattr__ is not tracked
|
||||
# This is even bad because it could be that there is an attribute
|
||||
# with the same name as a key
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict.a_new_attr = 'test'
|
||||
del base_dict.a_new_attr
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name.a_new_attr'])
|
||||
|
||||
|
||||
class TestBaseList(unittest.TestCase):
|
||||
|
||||
@staticmethod
|
||||
def _get_baselist(list_items):
|
||||
"""Get a BaseList bound to a fake document instance"""
|
||||
fake_doc = DocumentStub()
|
||||
base_list = BaseList(list_items, instance=None, name='my_name')
|
||||
base_list._instance = fake_doc # hack to inject the mock, it does not work in the constructor
|
||||
return base_list
|
||||
|
||||
def test___init___(self):
|
||||
class MyDoc(Document):
|
||||
pass
|
||||
|
||||
list_items = [True]
|
||||
doc = MyDoc()
|
||||
base_list = BaseList(list_items, instance=doc, name='my_name')
|
||||
self.assertIsInstance(base_list._instance, Document)
|
||||
self.assertEqual(base_list._name, 'my_name')
|
||||
self.assertEqual(base_list, list_items)
|
||||
|
||||
def test___iter__(self):
|
||||
values = [True, False, True, False]
|
||||
base_list = BaseList(values, instance=None, name='my_name')
|
||||
self.assertEqual(values, list(base_list))
|
||||
|
||||
def test___iter___allow_modification_while_iterating_withou_error(self):
|
||||
# regular list allows for this, thus this subclass must comply to that
|
||||
base_list = BaseList([True, False, True, False], instance=None, name='my_name')
|
||||
for idx, val in enumerate(base_list):
|
||||
if val:
|
||||
base_list.pop(idx)
|
||||
|
||||
def test_append_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([])
|
||||
self.assertFalse(base_list._instance._changed_fields)
|
||||
base_list.append(True)
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
|
||||
def test_subclass_append(self):
|
||||
# Due to the way mark_as_changed_wrapper is implemented
|
||||
# it is good to test subclasses
|
||||
class SubBaseList(BaseList):
|
||||
pass
|
||||
|
||||
base_list = SubBaseList([], instance=None, name='my_name')
|
||||
base_list.append(True)
|
||||
|
||||
def test___getitem__using_simple_index(self):
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
self.assertEqual(base_list[0], 0)
|
||||
self.assertEqual(base_list[1], 1)
|
||||
self.assertEqual(base_list[-1], 2)
|
||||
|
||||
def test___getitem__using_slice(self):
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
self.assertEqual(base_list[1:3], [1, 2])
|
||||
self.assertEqual(base_list[0:3:2], [0, 2])
|
||||
|
||||
def test___getitem___using_slice_returns_list(self):
|
||||
# Bug: using slice does not properly handles the instance
|
||||
# and mark_as_changed behaviour.
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
sliced = base_list[1:3]
|
||||
self.assertEqual(sliced, [1, 2])
|
||||
self.assertIsInstance(sliced, list)
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
|
||||
def test___getitem__sublist_returns_BaseList_bound_to_instance(self):
|
||||
base_list = self._get_baselist(
|
||||
[
|
||||
[1, 2],
|
||||
[3, 4]
|
||||
]
|
||||
)
|
||||
sub_list = base_list[0]
|
||||
self.assertEqual(sub_list, [1, 2])
|
||||
self.assertIsInstance(sub_list, BaseList)
|
||||
self.assertIs(sub_list._instance, base_list._instance)
|
||||
self.assertEqual(sub_list._name, 'my_name.0')
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
|
||||
# Challenge mark_as_changed from sublist
|
||||
sub_list[1] = None
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name.0.1'])
|
||||
|
||||
def test___getitem__subdict_returns_BaseList_bound_to_instance(self):
|
||||
base_list = self._get_baselist(
|
||||
[
|
||||
{'subk': 'subv'}
|
||||
]
|
||||
)
|
||||
sub_dict = base_list[0]
|
||||
self.assertEqual(sub_dict, {'subk': 'subv'})
|
||||
self.assertIsInstance(sub_dict, BaseDict)
|
||||
self.assertIs(sub_dict._instance, base_list._instance)
|
||||
self.assertEqual(sub_dict._name, 'my_name.0')
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
|
||||
# Challenge mark_as_changed from subdict
|
||||
sub_dict['subk'] = None
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name.0.subk'])
|
||||
|
||||
def test_extend_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([])
|
||||
base_list.extend([True])
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
|
||||
def test_insert_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([])
|
||||
base_list.insert(0, True)
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
|
||||
def test_remove_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
base_list.remove(True)
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
|
||||
def test_remove_not_mark_as_changed_when_it_fails(self):
|
||||
base_list = self._get_baselist([True])
|
||||
with self.assertRaises(ValueError):
|
||||
base_list.remove(False)
|
||||
self.assertFalse(base_list._instance._changed_fields)
|
||||
|
||||
def test_pop_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
base_list.pop()
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
|
||||
def test_reverse_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True, False])
|
||||
base_list.reverse()
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
|
||||
def test___delitem___calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
del base_list[0]
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
|
||||
def test___setitem___calls_with_full_slice_mark_as_changed(self):
|
||||
base_list = self._get_baselist([])
|
||||
base_list[:] = [0, 1] # Will use __setslice__ under py2 and __setitem__ under py3
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_list, [0, 1])
|
||||
|
||||
def test___setitem___calls_with_partial_slice_mark_as_changed(self):
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
base_list[0:2] = [1, 0] # Will use __setslice__ under py2 and __setitem__ under py3
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_list, [1, 0, 2])
|
||||
|
||||
def test___setitem___calls_with_step_slice_mark_as_changed(self):
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
base_list[0:3:2] = [-1, -2] # uses __setitem__ in both py2 & 3
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_list, [-1, 1, -2])
|
||||
|
||||
def test___setitem___with_slice(self):
|
||||
base_list = self._get_baselist([0, 1, 2, 3, 4, 5])
|
||||
base_list[0:6:2] = [None, None, None]
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_list, [None, 1, None, 3, None, 5])
|
||||
|
||||
def test___setitem___item_0_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
base_list[0] = False
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_list, [False])
|
||||
|
||||
def test___setitem___item_1_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True, True])
|
||||
base_list[1] = False
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name.1'])
|
||||
self.assertEqual(base_list, [True, False])
|
||||
|
||||
def test___delslice___calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([0, 1])
|
||||
del base_list[0:1]
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_list, [1])
|
||||
|
||||
def test___iadd___calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
base_list += [False]
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
|
||||
def test___imul___calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
base_list *= 2
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
|
||||
def test_sort_calls_not_marked_as_changed_when_it_fails(self):
|
||||
base_list = self._get_baselist([True])
|
||||
with self.assertRaises(TypeError):
|
||||
base_list.sort(key=1)
|
||||
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
|
||||
def test_sort_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True, False])
|
||||
base_list.sort()
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
|
||||
def test_sort_calls_with_key(self):
|
||||
base_list = self._get_baselist([1, 2, 11])
|
||||
base_list.sort(key=lambda i: str(i))
|
||||
self.assertEqual(base_list, [1, 11, 2])
|
||||
|
||||
|
||||
class TestStrictDict(unittest.TestCase):
|
||||
@@ -72,8 +426,8 @@ class TestStrictDict(unittest.TestCase):
|
||||
|
||||
def test_mappings_protocol(self):
|
||||
d = self.dtype(a=1, b=2)
|
||||
assert dict(d) == {'a': 1, 'b': 2}
|
||||
assert dict(**d) == {'a': 1, 'b': 2}
|
||||
self.assertEqual(dict(d), {'a': 1, 'b': 2})
|
||||
self.assertEqual(dict(**d), {'a': 1, 'b': 2})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@@ -2,6 +2,7 @@
|
||||
import unittest
|
||||
|
||||
from bson import DBRef, ObjectId
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
@@ -200,8 +201,8 @@ class FieldTest(unittest.TestCase):
|
||||
group = Group(author=user, members=[user]).save()
|
||||
|
||||
raw_data = Group._get_collection().find_one()
|
||||
self.assertTrue(isinstance(raw_data['author'], DBRef))
|
||||
self.assertTrue(isinstance(raw_data['members'][0], DBRef))
|
||||
self.assertIsInstance(raw_data['author'], DBRef)
|
||||
self.assertIsInstance(raw_data['members'][0], DBRef)
|
||||
group = Group.objects.first()
|
||||
|
||||
self.assertEqual(group.author, user)
|
||||
@@ -224,8 +225,8 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(group.members, [user])
|
||||
|
||||
raw_data = Group._get_collection().find_one()
|
||||
self.assertTrue(isinstance(raw_data['author'], ObjectId))
|
||||
self.assertTrue(isinstance(raw_data['members'][0], ObjectId))
|
||||
self.assertIsInstance(raw_data['author'], ObjectId)
|
||||
self.assertIsInstance(raw_data['members'][0], ObjectId)
|
||||
|
||||
def test_recursive_reference(self):
|
||||
"""Ensure that ReferenceFields can reference their own documents.
|
||||
@@ -469,7 +470,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
@@ -485,7 +486,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
@@ -502,7 +503,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
UserA.drop_collection()
|
||||
UserB.drop_collection()
|
||||
@@ -560,7 +561,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
@@ -576,7 +577,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
@@ -593,7 +594,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for m in group_obj.members:
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
UserA.drop_collection()
|
||||
UserB.drop_collection()
|
||||
@@ -632,8 +633,8 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, User))
|
||||
for k, m in iteritems(group_obj.members):
|
||||
self.assertIsInstance(m, User)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
@@ -645,8 +646,8 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, User))
|
||||
for k, m in iteritems(group_obj.members):
|
||||
self.assertIsInstance(m, User)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
@@ -659,8 +660,8 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, User))
|
||||
for k, m in iteritems(group_obj.members):
|
||||
self.assertIsInstance(m, User)
|
||||
|
||||
User.drop_collection()
|
||||
Group.drop_collection()
|
||||
@@ -714,8 +715,8 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
for k, m in iteritems(group_obj.members):
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
@@ -730,8 +731,8 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
for k, m in iteritems(group_obj.members):
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
@@ -747,8 +748,8 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
for k, m in iteritems(group_obj.members):
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
Group.objects.delete()
|
||||
Group().save()
|
||||
@@ -805,8 +806,8 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, UserA))
|
||||
for k, m in iteritems(group_obj.members):
|
||||
self.assertIsInstance(m, UserA)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
@@ -821,8 +822,8 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, UserA))
|
||||
for k, m in iteritems(group_obj.members):
|
||||
self.assertIsInstance(m, UserA)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
@@ -838,8 +839,8 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue(isinstance(m, UserA))
|
||||
for k, m in iteritems(group_obj.members):
|
||||
self.assertIsInstance(m, UserA)
|
||||
|
||||
UserA.drop_collection()
|
||||
Group.drop_collection()
|
||||
@@ -893,8 +894,8 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
for k, m in iteritems(group_obj.members):
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Document select_related
|
||||
with query_counter() as q:
|
||||
@@ -909,8 +910,8 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
for k, m in iteritems(group_obj.members):
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
# Queryset select_related
|
||||
with query_counter() as q:
|
||||
@@ -926,8 +927,8 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
self.assertEqual(q, 4)
|
||||
|
||||
for k, m in group_obj.members.iteritems():
|
||||
self.assertTrue('User' in m.__class__.__name__)
|
||||
for k, m in iteritems(group_obj.members):
|
||||
self.assertIn('User', m.__class__.__name__)
|
||||
|
||||
Group.objects.delete()
|
||||
Group().save()
|
||||
@@ -1029,7 +1030,6 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(type(foo.bar), Bar)
|
||||
self.assertEqual(type(foo.baz), Baz)
|
||||
|
||||
|
||||
def test_document_reload_reference_integrity(self):
|
||||
"""
|
||||
Ensure reloading a document with multiple similar id
|
||||
@@ -1065,7 +1065,6 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(msg.author, user)
|
||||
self.assertEqual(msg.author.name, 'new-name')
|
||||
|
||||
|
||||
def test_list_lookup_not_checked_in_map(self):
|
||||
"""Ensure we dereference list data correctly
|
||||
"""
|
||||
@@ -1209,10 +1208,10 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
# Can't use query_counter across databases - so test the _data object
|
||||
book = Book.objects.first()
|
||||
self.assertFalse(isinstance(book._data['author'], User))
|
||||
self.assertNotIsInstance(book._data['author'], User)
|
||||
|
||||
book.select_related()
|
||||
self.assertTrue(isinstance(book._data['author'], User))
|
||||
self.assertIsInstance(book._data['author'], User)
|
||||
|
||||
def test_non_ascii_pk(self):
|
||||
"""
|
||||
@@ -1287,5 +1286,6 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -2,7 +2,7 @@ import unittest
|
||||
|
||||
from pymongo import ReadPreference
|
||||
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
from mongoengine.pymongo_support import IS_PYMONGO_3
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
from pymongo import MongoClient
|
||||
@@ -47,5 +47,6 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(conn.read_preference, READ_PREF)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -39,7 +39,6 @@ class SignalTests(unittest.TestCase):
|
||||
def post_init(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_init signal, %s, document._created = %s' % (document, document._created))
|
||||
|
||||
|
||||
@classmethod
|
||||
def pre_save(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_save signal, %s' % document)
|
||||
@@ -247,7 +246,7 @@ class SignalTests(unittest.TestCase):
|
||||
def load_existing_author():
|
||||
a = self.Author(name='Bill Shakespeare')
|
||||
a.save()
|
||||
self.get_signal_output(lambda: None) # eliminate signal output
|
||||
self.get_signal_output(lambda: None) # eliminate signal output
|
||||
a1 = self.Author.objects(name='Bill Shakespeare')[0]
|
||||
|
||||
self.assertEqual(self.get_signal_output(create_author), [
|
||||
@@ -431,5 +430,6 @@ class SignalTests(unittest.TestCase):
|
||||
{}
|
||||
])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
38
tests/test_utils.py
Normal file
38
tests/test_utils.py
Normal file
@@ -0,0 +1,38 @@
|
||||
import unittest
|
||||
import re
|
||||
|
||||
from mongoengine.base.utils import LazyRegexCompiler
|
||||
|
||||
signal_output = []
|
||||
|
||||
|
||||
class LazyRegexCompilerTest(unittest.TestCase):
|
||||
|
||||
def test_lazy_regex_compiler_verify_laziness_of_descriptor(self):
|
||||
class UserEmail(object):
|
||||
EMAIL_REGEX = LazyRegexCompiler('@', flags=32)
|
||||
|
||||
descriptor = UserEmail.__dict__['EMAIL_REGEX']
|
||||
self.assertIsNone(descriptor._compiled_regex)
|
||||
|
||||
regex = UserEmail.EMAIL_REGEX
|
||||
self.assertEqual(regex, re.compile('@', flags=32))
|
||||
self.assertEqual(regex.search('user@domain.com').group(), '@')
|
||||
|
||||
user_email = UserEmail()
|
||||
self.assertIs(user_email.EMAIL_REGEX, UserEmail.EMAIL_REGEX)
|
||||
|
||||
def test_lazy_regex_compiler_verify_cannot_set_descriptor_on_instance(self):
|
||||
class UserEmail(object):
|
||||
EMAIL_REGEX = LazyRegexCompiler('@')
|
||||
|
||||
user_email = UserEmail()
|
||||
with self.assertRaises(AttributeError):
|
||||
user_email.EMAIL_REGEX = re.compile('@')
|
||||
|
||||
def test_lazy_regex_compiler_verify_can_override_class_attr(self):
|
||||
class UserEmail(object):
|
||||
EMAIL_REGEX = LazyRegexCompiler('@')
|
||||
|
||||
UserEmail.EMAIL_REGEX = re.compile('cookies')
|
||||
self.assertEqual(UserEmail.EMAIL_REGEX.search('Cake & cookies').group(), 'cookies')
|
@@ -1,18 +1,20 @@
|
||||
import operator
|
||||
import unittest
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from mongoengine import connect
|
||||
from mongoengine.connection import get_db, get_connection
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.mongodb_support import get_mongodb_version, MONGODB_26, MONGODB_3, MONGODB_32, MONGODB_34
|
||||
from mongoengine.pymongo_support import IS_PYMONGO_3
|
||||
|
||||
|
||||
MONGO_TEST_DB = 'mongoenginetest'
|
||||
MONGO_TEST_DB = 'mongoenginetest' # standard name for the test database
|
||||
|
||||
|
||||
class MongoDBTestCase(unittest.TestCase):
|
||||
"""Base class for tests that need a mongodb connection
|
||||
db is being dropped automatically
|
||||
It ensures that the db is clean at the beginning and dropped at the end automatically
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
@@ -26,41 +28,57 @@ class MongoDBTestCase(unittest.TestCase):
|
||||
cls._connection.drop_database(MONGO_TEST_DB)
|
||||
|
||||
|
||||
def get_mongodb_version():
|
||||
"""Return the version tuple of the MongoDB server that the default
|
||||
connection is connected to.
|
||||
"""
|
||||
return tuple(get_connection().server_info()['versionArray'])
|
||||
def get_as_pymongo(doc):
|
||||
"""Fetch the pymongo version of a certain Document"""
|
||||
return doc.__class__.objects.as_pymongo().get(id=doc.id)
|
||||
|
||||
def _decorated_with_ver_requirement(func, ver_tuple):
|
||||
|
||||
def _decorated_with_ver_requirement(func, mongo_version_req, oper):
|
||||
"""Return a given function decorated with the version requirement
|
||||
for a particular MongoDB version tuple.
|
||||
|
||||
:param mongo_version_req: The mongodb version requirement (tuple(int, int))
|
||||
:param oper: The operator to apply (e.g: operator.ge)
|
||||
"""
|
||||
def _inner(*args, **kwargs):
|
||||
mongodb_ver = get_mongodb_version()
|
||||
if mongodb_ver >= ver_tuple:
|
||||
mongodb_v = get_mongodb_version()
|
||||
if oper(mongodb_v, mongo_version_req):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
raise SkipTest('Needs MongoDB v{}+'.format(
|
||||
'.'.join([str(v) for v in ver_tuple])
|
||||
))
|
||||
raise SkipTest('Needs MongoDB v{}+'.format('.'.join(str(n) for n in mongo_version_req)))
|
||||
|
||||
_inner.__name__ = func.__name__
|
||||
_inner.__doc__ = func.__doc__
|
||||
|
||||
return _inner
|
||||
|
||||
def needs_mongodb_v26(func):
|
||||
|
||||
def requires_mongodb_gte_34(func):
|
||||
"""Raise a SkipTest exception if we're working with MongoDB version
|
||||
lower than v3.4
|
||||
"""
|
||||
return _decorated_with_ver_requirement(func, MONGODB_34, oper=operator.ge)
|
||||
|
||||
|
||||
def requires_mongodb_lte_32(func):
|
||||
"""Raise a SkipTest exception if we're working with MongoDB version
|
||||
greater than v3.2.
|
||||
"""
|
||||
return _decorated_with_ver_requirement(func, MONGODB_32, oper=operator.le)
|
||||
|
||||
|
||||
def requires_mongodb_gte_26(func):
|
||||
"""Raise a SkipTest exception if we're working with MongoDB version
|
||||
lower than v2.6.
|
||||
"""
|
||||
return _decorated_with_ver_requirement(func, (2, 6))
|
||||
return _decorated_with_ver_requirement(func, MONGODB_26, oper=operator.ge)
|
||||
|
||||
def needs_mongodb_v3(func):
|
||||
|
||||
def requires_mongodb_gte_3(func):
|
||||
"""Raise a SkipTest exception if we're working with MongoDB version
|
||||
lower than v3.0.
|
||||
"""
|
||||
return _decorated_with_ver_requirement(func, (3, 0))
|
||||
return _decorated_with_ver_requirement(func, MONGODB_3, oper=operator.ge)
|
||||
|
||||
|
||||
def skip_pymongo3(f):
|
||||
"""Raise a SkipTest exception if we're running a test against
|
||||
|
Reference in New Issue
Block a user