Compare commits

...

227 Commits

Author SHA1 Message Date
Omer Katz
50fb5d83f1 Added landscape.io badge. 2014-08-10 18:26:18 +03:00
Wilson Júnior
fda672f806 Merge pull request #727 from DavidBord/fix-725
fix-#725: queryset delete() should return the number of deleted objects
2014-08-10 11:43:59 -03:00
Omer Katz
2bf783b04d Added PyPy3 to the build matrix. 2014-08-10 17:01:18 +03:00
DavidBord
2f72b23a0d fix-#725: queryset delete() should return the number of deleted objects 2014-08-10 14:58:39 +03:00
Yohan Graterol
174d964553 Update changelog.rst 2014-08-06 01:54:42 -05:00
Yohan Graterol
cf8677248e Merge pull request #723 from DavidBord/fix-620
Fix 620
2014-08-06 01:53:28 -05:00
DavidBord
1e6a3163af fix-#620: saving document doesn't create new fields in existing collection 2014-08-05 17:29:14 +03:00
DavidBord
e008919978 fix-#399: Not overriding default values when loading a subset of fields 2014-08-05 14:34:54 +03:00
Wilson Júnior
4814066c67 Merge pull request #709 from wpjunior/cached-reference-field
CachedReferenceField implementation
2014-08-03 21:38:06 -03:00
Wilson Junior
f17f8b48c2 small fixes for python2.6 2014-08-03 18:59:50 -04:00
Yohan Graterol
ab0aec0ac5 Merge pull request #720 from pashadia/master
Fixed typo.
2014-08-03 14:04:15 -05:00
pashadia
b49a641ba5 Fixed typo. 2014-08-03 20:44:46 +03:00
Yohan Graterol
2f50051426 Merge pull request #718 from mbalasso/patch-1
Update README.rst
2014-08-01 10:46:50 -05:00
Matteo Balasso
43cc32db40 Update README.rst 2014-08-01 16:35:53 +02:00
Wilson Júnior
b4d6f6b947 added documentation about CachedReferenceField 2014-07-30 09:32:33 -03:00
Omer Katz
71ff533623 Updated Django development version in the build matrix. 2014-07-30 02:02:21 +03:00
Wilson Júnior
e33a5bbef5 fixes for python2.6 2014-07-26 07:24:04 -03:00
Wilson Júnior
6c0112c2be refs #709, added support to disable auto_sync 2014-07-25 18:12:26 -03:00
Wilson Júnior
15bbf26b93 refs #709, fix typos 2014-07-25 08:48:24 -03:00
Wilson Júnior
87c97efce0 refs #709, added CachedReferenceField.sync_all to sync all documents on demand 2014-07-25 08:44:59 -03:00
Wilson Júnior
6c4aee1479 added CachedReferenceField restriction to use in EmbeddedDocument 2014-07-17 13:42:34 -03:00
Wilson Júnior
73549a9044 fixes for rebase branch 2014-07-17 09:41:06 -03:00
Wilson Júnior
30fdd3e184 Added initial CachedReferenceField 2014-07-16 10:32:43 -03:00
Wilson Júnior
c97eb5d63f Added retry in apt-get operations for travis 2014-07-14 16:05:49 -03:00
Wilson Júnior
5729c7d5e7 Merge branch 'master' of https://github.com/MongoEngine/mongoengine 2014-07-14 16:00:26 -03:00
Wilson Júnior
d77b13efcb Merge pull request #703 from wpjunior/aggregate-framework
Simple aggregation framework wrapper
2014-07-14 14:29:59 -03:00
Wilson Júnior
c43faca7b9 refs #703, added changelog 2014-07-13 09:55:46 -03:00
Wilson Júnior
892ddd5724 added a wrapper to aggregate in Queryset.aggregate 2014-07-12 23:18:08 -03:00
Wilson Júnior
a9de779f33 Merge pull request #701 from yograterol/master
Remove allow_failures from .travis.yml file
2014-07-08 14:21:39 -03:00
Yohan Graterol
1c2f016ba0 Remove allow_failures from .travis.yml file 2014-07-08 10:25:12 -05:00
Wilson Júnior
7b4d9140af merge #700 2014-07-08 09:19:15 -03:00
Wilson Júnior
c1fc87ff4e added entry in changelog 2014-07-08 09:16:01 -03:00
Wilson Júnior
cd5ea5d4e0 testing a travis with mongodb 2.6 2014-07-08 08:49:03 -03:00
Wilson Júnior
30c01089f5 added ordering support for text queries 2014-07-08 08:38:41 -03:00
Wilson Júnior
89825a2b21 added skip to mongodb older 2014-07-07 23:45:44 -03:00
Wilson Júnior
a743b75bb4 fixed a order in command 2014-07-07 21:02:13 -03:00
Wilson Júnior
f7ebf8dedd Added support for text search and text_score. 2014-07-07 20:24:37 -03:00
Yohan Graterol
f6220cab3b Merge pull request #697 from nleite/master
to_json not resolving db_fields #654
2014-07-07 17:39:34 -05:00
Norberto
0c5e1c4138 adding myself to authors 2014-07-07 22:07:01 +02:00
Norberto
03fe431f1a merge with origin done 2014-07-07 21:59:42 +02:00
Norberto
a8e4554fec Update change log on #697 merge 2014-07-07 19:27:39 +02:00
Norberto
e81b09b9aa adding capability to extract json (to_json) using the field names and instead of the defined db_names, if those are set 2014-07-06 20:49:19 +02:00
Yohan Graterol
c6e846e0ae Merge pull request #696 from lexqt/fix_django_17_compat
Fix tests for django 1.7
2014-07-06 12:43:12 -05:00
Aleksey Porfirov
03dcfb5c4b Update changelog 2014-07-06 12:27:34 +04:00
Aleksey Porfirov
3e54da03e2 Fix MongoTestCase and add test for it 2014-07-05 21:35:31 +04:00
Aleksey Porfirov
c4b3196917 Fix MongoTestCase and add test for it 2014-07-05 21:13:25 +04:00
Aleksey Porfirov
0d81e7933e Prevent accessing not yet configured settings in django.MongoTestCase 2014-07-05 00:06:10 +04:00
Aleksey Porfirov
b2a2735034 Update AUTHORS 2014-07-04 22:32:07 +04:00
Aleksey Porfirov
f865c5de90 Fix tests for Django 1.7 2014-07-04 22:30:29 +04:00
Yohan Graterol
4159369e8b Merge pull request #690 from claymation/select-related
Follow ReferenceFields in EmbeddedDocuments with select_related
2014-07-03 15:28:09 -05:00
Clay McClure
170693cf0b Follow ReferenceFields in EmbeddedDocuments with select_related
For the following structure:

    class Playlist(Document):
        items = ListField(EmbeddedDocumentField("PlaylistItem"))

    class PlaylistItem(EmbeddedDocument):
        song = ReferenceField("Song")

    class Song(Document):
        title = StringField()

this patch prevents the N+1 queries otherwise required to fetch all
the `Song` instances referenced by all the `PlaylistItem`s.
2014-07-03 13:14:45 -04:00
Omer Katz
4e7b5d4af8 Added @rochacbrun to the AUTHORS file. #692 2014-07-03 17:35:55 +03:00
Omer Katz
67bf789fcf Merge pull request #692 from rochacbruno/master
Updates with no operator should default to $set Fix #667
2014-07-03 09:02:12 +03:00
Omer Katz
f5cf616c2f Merge pull request #635 from SergeChmelev/master
Fix for post_init signal to receive correct state of _created flag.
2014-07-03 02:38:52 +03:00
Bruno Rocha
7975f19817 Update querying.rst 2014-07-02 18:56:42 -03:00
Serge Chmelev
017602056d Add testcase 2014-07-03 01:48:19 +04:00
Serge Chmelev
c63f43854b Fix setting of _created flag in base.Document constructor 2014-07-03 01:48:19 +04:00
Serge Chmelev
5cc71ec2ad Fix for post_init signal to receive correct state of _created flag. 2014-07-03 01:48:19 +04:00
Omer Katz
80e81f8475 Python 2.6 requires positioning by number of fragments in format.
This commit will restore Python 2.6 compatability.
2014-07-02 23:22:06 +03:00
Omer Katz
3685c8e015 Allowed failures for Django development version. 2014-07-02 22:50:58 +03:00
Bruno Rocha
99e943c365 Updates with no operator should default to $set Fix #667 2014-07-02 14:39:29 -03:00
Yohan Graterol
21818e71f5 Revert change in .travis.yml: Delete apt cache 2014-06-30 08:48:04 -05:00
Ross Lawley
bcc6d25e21 Merge branch 'master' of github.com:MongoEngine/mongoengine 2014-06-30 10:30:12 +01:00
Ross Lawley
7b885ee0d3 Fix StrictDict repr 2014-06-30 10:29:28 +01:00
Omer Katz
c10e808a4f Fixed requirements file to fit the new PyMongo>=2.7.1 requirement. 2014-06-30 11:30:51 +03:00
Omer Katz
54e9be0ed8 Merge pull request #689 from brianhelba/pymongo_2.5
Make requirement for PyMongo>=2.5 more consistent
2014-06-30 11:30:15 +03:00
Yohan Graterol
938cdf316a Added cache for apt in Travis 2014-06-30 00:14:47 -05:00
Yohan Graterol
27c33911e6 Update .travis.yml 2014-06-30 00:09:28 -05:00
Yohan Graterol
e88f8759e7 Replace before_script for before_install 2014-06-29 23:33:30 -05:00
Yohan Graterol
f2992e3165 Travis problem with before_script 2014-06-29 23:31:08 -05:00
Yohan Graterol
c71fd1ee3b Before_script fixed. 2014-06-29 23:29:10 -05:00
Yohan Graterol
fb45b19fdc Enabling textSearch for build in Travis 2014-06-29 23:26:02 -05:00
Brian Helba
c4ea8d4942 Make requirement for PyMongo>=2.5 more consistent
Commit 7aa1f47378 requires PyMongo >= v2.5.
This updates the requirements file to make this requirement explicit to
package managers.

Commit 29309dac9a removed some legacy
compatibility code that would run only with versions of PyMongo < 2.1. The
options 'is_slave' and 'slaves' for register_connection were only used in
this compatibility code, so they are removed too.
2014-06-30 00:05:34 -04:00
Yohan Graterol
646aa131ef Corrected Travis config syntax 2014-06-29 22:58:10 -05:00
Yohan Graterol
0adb40bf92 Merge pull request #684 from brianhelba/doc-spelling
Fix some minor spelling and grammar in documentation
2014-06-29 22:41:23 -05:00
Brian Helba
17d6014bf1 Fix some minor spelling and grammar in documentation 2014-06-29 23:07:28 -04:00
Yohan Graterol
ff57cd4eaf Merge pull request #680 from claymation/text-index-specs
Include preliminary support for text indexes
2014-06-27 22:18:17 -05:00
Clay McClure
74bd7c3744 Include preliminary support for text indexes
To index a text field, prefix the field name with `$`, as in `$title`.
2014-06-27 14:48:32 -04:00
Omer Katz
cfbb283f85 Added Django 1.7RC1 to the build process and excluded it from running on Python 2.6. 2014-06-27 16:48:38 +03:00
Omer Katz
74a3c4451b using() was added in 0.9. Not 0.8. 2014-06-27 16:35:26 +03:00
Ross Lawley
be3643c962 Added elemMatch operator as well - match is too obscure #653 2014-06-27 13:39:47 +01:00
Ross Lawley
f4aa546af8 Added support for progressive JPEG #486 #548 2014-06-27 12:54:32 +01:00
Ross Lawley
67b876a7f4 Merge pull request #548 from yograterol/feature-progressive-jpeg
Feature for progressive JPEG. Issue #486
2014-06-27 12:53:51 +01:00
Ross Lawley
94e177c0ef Allow strings to be used in index creation #677 2014-06-27 12:49:31 +01:00
Ross Lawley
1bd83cc9bc Merge branch 'master' into pr/675 2014-06-27 12:48:28 +01:00
Ross Lawley
ecda3f4a7d Fixed EmbeddedDoc weakref proxy issue #592 2014-06-27 12:42:45 +01:00
Ross Lawley
8f972a965d Merge branch 'master' into pr/592 2014-06-27 12:36:39 +01:00
Ross Lawley
0f051fc57c Changelog & Authors #583 2014-06-27 12:33:19 +01:00
Ross Lawley
c3f8925f46 Merge pull request #583 from Gerrrr/distinct_bug
Fixed bug in distinct method
2014-06-27 12:22:09 +01:00
Ross Lawley
5d0cab2052 Merge branch 'master' into pr/539
Conflicts:
	mongoengine/base/datastructures.py
2014-06-27 12:20:44 +01:00
Ross Lawley
4d7492f682 Changelog & Author updates #425 #507 2014-06-27 12:10:17 +01:00
Ross Lawley
fc9d99080f Merge branch 'master' into pr/507
Conflicts:
	tests/document/dynamic.py
2014-06-27 12:06:18 +01:00
Ross Lawley
47ebac0276 Add authentication_source option to register_connection #178 #464 #573 #580 #590 2014-06-27 11:59:35 +01:00
Ross Lawley
cb3fca03e9 Merge branch 'master' into pr/590
Conflicts:
	mongoengine/connection.py
2014-06-27 11:53:46 +01:00
Ross Lawley
abbbd83729 Merge pull request #433 from reachveera/master
Overridden the prepare_query_value method in SequenceField inorder to re...
2014-06-27 11:49:28 +01:00
Ross Lawley
1743ab7812 Changelog update #567 2014-06-27 11:38:06 +01:00
Ross Lawley
324e3972a6 Merge pull request #567 from tomprimozic/master
Implemented equality between Documents and DBRefs
2014-06-27 11:37:24 +01:00
Ross Lawley
1502dda2ab Fixed ReferenceField inside nested ListFields dereferencing problem #368 2014-06-27 11:33:56 +01:00
Ross Lawley
f31b2c4a79 Merge branch 'master' into pr/368 2014-06-27 11:32:19 +01:00
Ross Lawley
89b9b60e0c Geo SON tweaks 2014-06-27 11:27:10 +01:00
Ross Lawley
de9ba12779 Turn on tests 2014-06-27 11:16:23 +01:00
Ross Lawley
9cc4359c04 Added the ability to reload specific document fields #100 2014-06-27 11:10:14 +01:00
Ross Lawley
67eaf120b9 db_alias support and fixes for custom map/reduce output #586 2014-06-27 10:07:05 +01:00
Ross Lawley
b8353c4a33 Merge branch 'master' into pr/586
Conflicts:
	mongoengine/queryset/base.py
	tests/queryset/queryset.py
2014-06-27 10:06:30 +01:00
Ross Lawley
7013033ae4 Update changelog & AUTHORS #594 #589 2014-06-27 10:03:35 +01:00
Ross Lawley
cb8cd03852 Merge pull request #594 from idlead/feature/post_save_delta
post_save signal should have delta information about field changes #594 #589
2014-06-27 10:02:08 +01:00
Ross Lawley
f63fb62014 Merge branch 'master' of github.com:MongoEngine/mongoengine 2014-06-27 10:00:42 +01:00
Ross Lawley
2e4fb86b86 Don't query with $orderby for qs.get() #600 2014-06-27 10:00:16 +01:00
Stefan Wojcik
5e776a07dd allow ordering to be cleared 2014-06-27 09:58:49 +01:00
Ross Lawley
81e637e50e Merge pull request #598 from philfreo/patch-1
clarifying the 'push' atomic update docs
2014-06-27 09:38:01 +01:00
Ross Lawley
0971ad0a80 Update changelog & authors - #636 2014-06-27 09:31:01 +01:00
Ross Lawley
8267ded7ec Merge branch 'master' into pr/636 2014-06-27 09:29:19 +01:00
Ross Lawley
7f36ea55f5 Fix bulk test where behaviour changes based on mongo version 2014-06-27 09:14:56 +01:00
Ross Lawley
72a051f2d3 Update AUTHORS & Changelog #557 2014-06-27 09:12:05 +01:00
Ross Lawley
51b197888c Merge remote-tracking branch 'origin/master' 2014-06-27 09:10:40 +01:00
Ross Lawley
cd63865d31 Fix clear_changed_fields() clearing unsaved documents bug #602 2014-06-27 09:08:07 +01:00
Martyn Smith
5be5685a09 Test to illustrate failure in changed attribute tracking 2014-06-27 09:06:17 +01:00
Yohan Graterol
76b2f25d46 Merge pull request #557 from SpotOnInc/recursive_embedded_errors_fix
Fixes issue with recursive embedded document errors
2014-06-26 21:55:49 -05:00
Ross Lawley
58607d4a7f Merge pull request #609 from nicolasdespres/fix-gridfs-guide
Save is called on the document not the file field.
2014-06-26 19:56:32 +01:00
Ross Lawley
c0a5b16a7f Travis bump 2014-06-26 19:52:05 +01:00
Ross Lawley
3a0c69005b Update AUTHORS and Changelog
Refs: #664, #677, #676, #673, #674, #655, #657, #626, #625, #619, #613, #608, #511, #559
2014-06-26 19:41:40 +01:00
Ross Lawley
5c295fb9e3 Merge branch 'master' of github.com:MongoEngine/mongoengine 2014-06-26 19:25:35 +01:00
Ross Lawley
4ee212e7d5 Skip Test due to server bug in 2.6 2014-06-26 19:25:05 +01:00
Ross Lawley
70651ce994 Fix as_pymongo bug 2014-06-26 19:24:52 +01:00
Yohan Graterol
a778a91106 Merge pull request #584 from FrankSomething/consistent-inits
inherit parent Document type _auto_id_field value
2014-06-26 11:24:13 -05:00
Ross Lawley
cfc31eead3 Fixed $maxDistance location for geoJSON $near queries with MongoDB 2.6+
Closes #664
2014-06-26 17:13:35 +01:00
Ross Lawley
da0a1bbe9f Fix test_using 2014-06-26 17:13:21 +01:00
Ross Lawley
bc66fb33e9 Merge branch 'master' into pr/625 2014-06-26 16:48:12 +01:00
Ross Lawley
b1b6493755 Merge branch 'pr/676' 2014-06-26 16:46:01 +01:00
Ross Lawley
1d189f239b Merge branch 'pr/562' 2014-06-26 16:42:23 +01:00
Ross Lawley
5b90691bcc Merge branch 'master' into pr/585 2014-06-26 16:41:27 +01:00
Ross Lawley
d1d5972277 Removed support for old versions
- Removing support for Django 1.4.x, pymongo 2.5.x, pymongo 2.6.x.
- Removing support for Python < 2.6.6
2014-06-26 16:34:02 +01:00
Ross Lawley
2c07d77368 Updated changelog
Enabled connection pooling
2014-06-26 16:24:37 +01:00
Ross Lawley
642cfbf59a Merge branch 'master' into pr/672
Conflicts:
	.travis.yml
2014-06-26 16:23:32 +01:00
Ross Lawley
bb1367cfb9 Merge branch 'master' into pr/674 2014-06-26 16:22:21 +01:00
Dmitry Konishchev
11724aa555 QuerySet.modify() method to provide find_and_modify() like behaviour 2014-06-26 16:18:42 +01:00
Ross Lawley
4d374712de Merge branch '0.9'
Conflicts:
	.travis.yml
2014-06-26 16:14:34 +01:00
Ross Lawley
eb9003187d Updated changelog & authors #673 2014-06-26 16:13:01 +01:00
Ross Lawley
caba444962 Merge branch '0.9' into pr/673
Conflicts:
	.travis.yml
2014-06-26 16:11:12 +01:00
Ross Lawley
5b6c8c191f Updated .travis.yml 2014-06-26 16:06:30 +01:00
Ross Lawley
dd51589f67 Updates 2014-06-26 16:02:40 +01:00
Ross Lawley
b02a31d4b9 Updated .travis.yml 2014-06-26 14:44:44 +01:00
Omer Katz
0e7878b406 Only run 2to3 on Python 3.x. Makes sense no? 2014-06-26 12:41:26 +03:00
Omer Katz
cae91ce0c5 Convert codebase to Python 3 using 2to3 before running benchmarks. 2014-06-26 12:31:07 +03:00
Omer Katz
67a65a2aa9 Installing unittest2 on Python 2.6. 2014-06-26 11:17:57 +03:00
Yohan Graterol
364b0a7163 Merge pull request #591 from pavlov99/master
fix docstring for DictField
2014-06-25 22:52:07 -05:00
Yohan Graterol
d6419f2059 Merge pull request #613 from falcondai/master
minor change to geo-related docs
2014-06-25 10:44:36 -05:00
Yohan Graterol
6f7ad7ef91 Merge pull request #619 from polyrabbit/master
Fixed incorrectly split a query key, when it ends with "_"
2014-06-25 10:42:27 -05:00
Omer Katz
5ae588833b Allowed to switch databases for a specific query. 2014-06-25 18:22:39 +03:00
Yohan Graterol
a70dbac0e6 Merge pull request #626 from KonishchevDmitry/pr-not-unique-error-on-update
Raise NotUniqueError in Document.update() on pymongo.errors.DuplicateKeyError
2014-06-25 10:18:09 -05:00
Yohan Graterol
4d34a02afe Merge pull request #634 from jatin/patch-1
Updated Jatin's name and github name
2014-06-25 10:16:49 -05:00
Yohan Graterol
4db4f45897 Merge pull request #651 from elasticsales/server-13975-precaution
Don't use a system collection in the tests
2014-06-25 09:48:30 -05:00
Yohan Graterol
2d5280fc95 Merge pull request #655 from jonathansp/master
Avoid to open all documents from cursors in an if stmt
2014-06-25 09:45:51 -05:00
Omer Katz
b8d568761e Getting rid of xrange since it's not in Python 3 and does not affect the benchmark. 2014-06-25 17:24:52 +03:00
Omer Katz
29309dac9a Mongo clients with the same settings should be shared since they manage a connection pool.
Also, I removed old code that was supposed to support Pymongo<2.1 which we don't support anymore.
2014-06-25 16:53:24 +03:00
Omer Katz
7f7745071a Found more print statements that were not turned into function calls. 2014-06-25 15:47:54 +03:00
Omer Katz
1914032e35 Missed some of the print statements in the benchmarks script. 2014-06-25 14:20:54 +03:00
Omer Katz
f44c8f1205 Skipping a test that does not work on PyPy due to a PyPy bug/feature. 2014-06-25 13:11:32 +03:00
Omer Katz
fe2ef4e61c Made the benchmark script compatitable with Python 3 and ensured it runs on every build. 2014-06-25 11:39:08 +03:00
Omer Katz
fc3eda55c7 Added a note about optional dependencies to the README file. 2014-06-25 11:32:41 +03:00
Omer Katz
8adf1cdd02 Fast finish the build if there are failures since we have a very large build matrix and each build takes a very long time. 2014-06-25 11:18:35 +03:00
Omer Katz
adbbc656d4 Removing zlib hack since only PIL needs it. The build should pass without it. 2014-06-25 11:12:40 +03:00
Omer Katz
8e852bce02 Pillow provides a more descriptive error message, therefor the build failure. 2014-06-25 10:58:00 +03:00
Omer Katz
bb461b009f Travis build improvements.
The latest patch version of each Django minor version is used.
The build now installs existing pymongo versions.
The build now actually tests against the specified Django version.
Replaced PIL with Pillow.
Added PyPy and Python 3.4 to the build.

Rebase Log:

Installing Pillow instead of PIL for testing since it's recommended and it supports PyPy.

Excluding Django versions that do not work with Python 3.

Improved formatting of .travis.yml.

Specifying Pillow 2.0.0 and above since it's the first version that is supported in Python 3.

PIL should not be installed alongside Pillow.

Also, I installed some libraries that both PIL and Pillow depend on.

It seems I have to be explicit on all envvars in order to exclude Django 1.4 from the build matrix.

The build is now installing pymongo versions that actually exist.

openjpeg has a different name on Ubuntu 12.04.

Restoring libz hack.

Also installing all Pillow requirements just in case.

Fixed the build matrix.

Acting according to @BanzaiMan's advice in travis-ci/travis-ci/#1492.
2014-06-25 10:40:28 +03:00
Omer Katz
03559a3cc4 Added Python 3.4 to the build process. 2014-06-24 19:20:15 +03:00
Jonathan Prates
7bb2fe128a Added PR #657 2014-06-12 11:08:41 -03:00
Jonathan Prates
2312e17a8e Merge remote-tracking branch 'elasticsales/clear-default-ordering' 2014-06-12 10:28:36 -03:00
Sagiv Malihi
9835b382da added __slots__ to BaseDocument and Document
changed the _data field to static key-value mapping instead of hash table
This implements #624
2014-06-10 16:11:27 +03:00
Stefan Wojcik
1eacc6fbff clear ordering via empty order_by 2014-05-30 15:08:03 -07:00
Jonathan Prates
85187239b6 Fix tests msg 2014-05-29 15:21:24 -03:00
Jonathan Prates
819ff2a902 Renamed to has_data() 2014-05-29 14:36:30 -03:00
Jonathan Prates
c744104a18 Added test with meta 2014-05-29 10:53:20 -03:00
Jonathan Prates
c87801f0a9 Using first() from cloned queryset 2014-05-28 17:26:28 -03:00
Jonathan Prates
39735594bd Removed blank line 2014-05-28 17:15:48 -03:00
Jonathan Prates
30964f65e4 Remove orderby in if stmt 2014-05-28 17:06:15 -03:00
Jonathan Prates
ee0c7fd8bf Change for loop to self.first() 2014-05-28 13:21:00 -03:00
Jonathan Prates
dfdecef8e7 Fix py2 and py3 2014-05-28 09:40:22 -03:00
Jonathan Prates
edcdfeb057 Fix syntax error 2014-05-28 09:03:12 -03:00
Jonathan Prates
47f0de9836 Py3 fix 2014-05-28 08:36:57 -03:00
Jonathan Prates
9ba657797e Authors updated according guideline 2014-05-28 08:33:22 -03:00
Clay McClure
07442a6f84 Allow index specs to be composed from raw strings
This allows an index spec to reference arbitrary keys of a DictField:

    class MyDoc(Document):
        frobs = DictField()
        meta = {
            "indexes": ["frobs.fmep", "frobs.gorp"],
        }
2014-05-28 01:31:35 -04:00
Jonathan Prates
3faf3c84be Avoid to open all documents from cursors in an if stmt
Using a cursos in an if statement:

cursor = Collection.objects

	if cursor:
		(...)

Will open all documents, because there are not an __nonzero__ method.
This change check only one document (if present) and returns True or False.
2014-05-27 16:33:38 -03:00
Stefan Wojcik
abcacc82f3 dont use a system collection 2014-05-21 22:21:46 -07:00
Ronald van Rij
9544b7d968 Fixed unit test which used assertIsNotNone 2014-05-09 14:33:18 +02:00
Ronald van Rij
babbc8bcd6 When using autogenerated document ids in a sharded collection, do set that id back into the Document 2014-05-06 09:34:16 +02:00
Jatin Chopra
12809ebc74 Updated Jatin's name and github name 2014-05-06 00:25:55 -07:00
Dmitry Konishchev
b45a601ad2 Test raising NotUniqueError by Document.update() 2014-04-15 19:32:42 +04:00
Serge Matveenko
f099dc6a37 Merge pull request #608 from cloudbuy/dateutil-bug-workaround
workaround a dateutil bug
2014-04-10 12:18:25 +04:00
Dmitry Konishchev
803caddbd4 Raise NotUniqueError in Document.update() on pymongo.errors.DuplicateKeyError 2014-04-09 14:25:53 +04:00
poly
4d7b988018 Fixed uncorrectly split a query key, when it ends with "_" 2014-04-01 19:52:21 +08:00
Falcon Dai
c1f88a4e14 minor change to geo-related docs 2014-03-17 22:29:53 -05:00
Nicolas Despres
5d9ec0b208 Save is called on the document not the file field. 2014-03-17 17:19:17 +01:00
Damien Churchill
1877cacf9c fix modifying slices under python3 2014-03-12 19:49:43 +00:00
Damien Churchill
2f4978cfea Merge branch 'dateutil-bug-workaround' 2014-03-12 17:27:04 +00:00
Damien Churchill
d27a1103fa workaround a dateutil bug
In the latest released version of dateutil, there's a bug whereby a TypeError
can be raised whilst parsing a date. This is because it calls a method which
it expects to return 2 arguments, however it can return 1 depending upon the
input, which results in a TypeError: ArgType not iterable exception. Since
this is equivalent to a failed parse anyway, we can treat it the same as a
ValueError.
2014-03-12 17:19:49 +00:00
Damien Churchill
b85bb95082 Merge branch 'master' of github.com:cloudbuy/mongoengine 2014-03-12 15:11:53 +00:00
Damien Churchill
db7f93cff3 improved update queries for BaseDict & BaseList
Migrate changes to include updating single elements of ListFields as
well as MapFields by adding the same changes to BaseList. This is
done by ensuring all BaseDicts and BaseLists have the correct name
from the base of the nearest (Embedded)Document, then marking changes
with their key or index when they are changed.

Tests also all fixed up.
2014-03-12 15:07:40 +00:00
Damien Churchill
85e271098f Merge branch 'master' of https://github.com/MongoEngine/mongoengine 2014-03-12 12:44:04 +00:00
Damien Churchill
17001e2f74 Merge remote-tracking branch 'origin/master' 2014-03-11 13:00:08 +00:00
Phil Freo
c82f4f0d45 clarifying the 'push' atomic update docs
the first time I read this I was all like... "no duh it will remove either the first or the last, but which does it do???"
2014-03-07 13:37:15 -08:00
tprimozi
88247a3af9 Bugfix for weakref _instance bug. 2014-03-03 15:11:05 +00:00
tprimozi
158578a406 Added test that fails due to weakref _instance bug. 2014-03-03 15:10:35 +00:00
Kirill Pavlov
19314e7e06 fix docstring for DictField 2014-03-03 13:09:26 +08:00
Brian Helba
8bcbc6d545 Add authentication_source option to register_connection (#573) (#580)
Since v2.5, PyMongo has supported a "source" option, to specify a
particular database to authenticate against. This adds support for that
option, in the form of a "authentication_source" option to
register_connection.
2014-03-02 18:35:49 -05:00
Vlad Zloteanu
ef55e6d476 fixes MongoEngine/mongoengine#589 2014-03-01 17:51:59 +01:00
Wilson Júnior
295ef3dc1d db_alias support and fixes for custom map/reduce output 2014-02-25 15:36:30 -03:00
Frank Battaglia
9d125c9e79 inherit parent Document type _auto_id_field value 2014-02-24 11:10:02 -05:00
Frank Battaglia
86363986fc whitespace 2014-02-24 11:04:29 -05:00
Frank Battaglia
0a2dbbc58b add tests for mongo query operators 2014-02-24 11:03:50 -05:00
Frank Battaglia
673a966541 add tests for save_condition kwarg in document.save() 2014-02-24 11:02:37 -05:00
Frank Battaglia
db1e69813b add atomic conditions to save
Conflicts:
	mongoengine/document.py
2014-02-24 10:57:32 -05:00
Aleksandr Sorokoumov
e60d56f060 test implemented 2014-02-24 19:22:36 +04:00
Aleksandr Sorokoumov
328e062ae9 Distinct method bugfix
Creation of instances is executed now only for EmbeddedDocumentField
and GenericEmbeddedDocumentField in distinct method
2014-02-24 19:21:11 +04:00
tprimozi
0523c2ea4b Fixed document equality: documents in different collections can have equal ids. 2014-02-13 18:12:33 +00:00
tprimozi
c5c7378c63 Implemented equality between Documents and DBRefs 2014-02-04 13:41:17 +00:00
Brian J. Dowling
9b2080d036 Added a test for allowing dynamic dictionary-style field access
Closes #559
2014-01-28 22:10:26 -05:00
Ross Lawley
d4b3649640 Added coveralls.io badge
https://coveralls.io/r/MongoEngine/mongoengine
2014-01-28 09:25:59 +00:00
Brian J. Dowling
b085993901 Allow dynamic dictionary-style field access
Allows the doc[key] syntax to work for dynamicembeddeddocument fields

Fixes #559
2014-01-27 23:05:29 +00:00
Andrei Zbikowski
0d4afad342 Fixes issue with recursive embedded document errors 2014-01-24 16:54:29 -06:00
Damien Churchill
eacb614750 Merge branch 'master' of https://github.com/MongoEngine/mongoengine 2014-01-10 11:03:48 +00:00
Yohan Graterol
341e1e7a6d Feature for progressive JPEG. Issue #486 2014-01-08 14:48:34 -04:30
Damien Churchill
2f6890c78a fix for nested MapFields
When using nested MapFields from a document loaded from the database, the
nested dictionaries aren't converted to BaseDict, so changes aren't
marked.

This also includes a change when marking a field as changed to ensure that
nested fields aren't included in a $set query if a parent is already marked
as changed. Not sure if this could occur but it prevents breakage if it does.
2013-12-16 13:44:07 +00:00
Eric Plumb
857cd718df Fix for issue #425 - allow undeclared fields in an embedded dynamic document to be seen by queryset methods 2013-11-08 14:57:35 -08:00
veera
c9dc441915 Overridden the prepare_query_value method in SequenceField inorder to return the value as the required type. 2013-08-05 15:33:54 +05:30
Stefan Wojcik
a7ca9950fc potential fix for dereferencing nested lists 2013-06-11 15:36:35 -07:00
Stefan Wojcik
e0dd33e6be move the test into a more appropriate location 2013-06-11 12:18:03 -07:00
Stefan Wojcik
2e718e1130 unit test showing the problem 2013-06-11 12:00:59 -07:00
47 changed files with 3186 additions and 648 deletions

View File

@@ -1,31 +1,48 @@
# http://travis-ci.org/#!/MongoEngine/mongoengine
language: python
services: mongodb
python:
- "2.6"
- "2.7"
- "3.2"
- "3.3"
- "3.4"
- "pypy"
- "pypy3"
env:
- PYMONGO=dev DJANGO=1.6
- PYMONGO=dev DJANGO=1.5.5
- PYMONGO=dev DJANGO=1.4.10
- PYMONGO=2.5 DJANGO=1.6
- PYMONGO=2.5 DJANGO=1.5.5
- PYMONGO=2.5 DJANGO=1.4.10
- PYMONGO=3.2 DJANGO=1.6
- PYMONGO=3.2 DJANGO=1.5.5
- PYMONGO=3.3 DJANGO=1.6
- PYMONGO=3.3 DJANGO=1.5.5
- PYMONGO=dev DJANGO=dev
- PYMONGO=dev DJANGO=1.6.5
- PYMONGO=dev DJANGO=1.5.8
- PYMONGO=2.7.1 DJANGO=dev
- PYMONGO=2.7.1 DJANGO=1.6.5
- PYMONGO=2.7.1 DJANGO=1.5.8
matrix:
exclude:
- python: "2.6"
env: PYMONGO=dev DJANGO=dev
- python: "2.6"
env: PYMONGO=2.7.1 DJANGO=dev
fast_finish: true
before_install:
- "travis_retry sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10"
- "echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' | sudo tee /etc/apt/sources.list.d/mongodb.list"
- "travis_retry sudo apt-get update"
- "travis_retry sudo apt-get install mongodb-org-server"
install:
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi
- if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
- if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi
- pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b
- python setup.py install
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev python-tk
- if [[ $PYMONGO == 'dev' ]]; then travis_retry pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
- if [[ $PYMONGO != 'dev' ]]; then travis_retry pip install pymongo==$PYMONGO; true; fi
- if [[ $DJANGO == 'dev' ]]; then travis_retry pip install https://www.djangoproject.com/download/1.7c2/tarball/; fi
- if [[ $DJANGO != 'dev' ]]; then travis_retry pip install Django==$DJANGO; fi
- travis_retry pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b
- travis_retry python setup.py install
script:
- python setup.py test
- travis_retry python setup.py test
- if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then 2to3 . -w; fi;
- python benchmark.py
notifications:
irc: "irc.freenode.org#mongoengine"
branches:

21
AUTHORS
View File

@@ -142,7 +142,7 @@ that much better:
* Pete Campton
* Martyn Smith
* Marcelo Anton
* Aleksey Porfirov
* Aleksey Porfirov (https://github.com/lexqt)
* Nicolas Trippar
* Manuel Hermann
* Gustavo Gawryszewski
@@ -171,7 +171,7 @@ that much better:
* Michael Bartnett (https://github.com/michaelbartnett)
* Alon Horev (https://github.com/alonho)
* Kelvin Hammond (https://github.com/kelvinhammond)
* Jatin- (https://github.com/jatin-)
* Jatin Chopra (https://github.com/jatin)
* Paul Uithol (https://github.com/PaulUithol)
* Thom Knowles (https://github.com/fleat)
* Paul (https://github.com/squamous)
@@ -189,3 +189,20 @@ that much better:
* Tom (https://github.com/tomprimozic)
* j0hnsmith (https://github.com/j0hnsmith)
* Damien Churchill (https://github.com/damoxc)
* Jonathan Simon Prates (https://github.com/jonathansp)
* Thiago Papageorgiou (https://github.com/tmpapageorgiou)
* Omer Katz (https://github.com/thedrow)
* Falcon Dai (https://github.com/falcondai)
* Polyrabbit (https://github.com/polyrabbit)
* Sagiv Malihi (https://github.com/sagivmalihi)
* Dmitry Konishchev (https://github.com/KonishchevDmitry)
* Martyn Smith (https://github.com/martynsmith)
* Andrei Zbikowski (https://github.com/b1naryth1ef)
* Ronald van Rij (https://github.com/ronaldvanrij)
* François Schmidts (https://github.com/jaesivsm)
* Eric Plumb (https://github.com/professorplumb)
* Damien Churchill (https://github.com/damoxc)
* Aleksandr Sorokoumov (https://github.com/Gerrrr)
* Clay McClure (https://github.com/claymation)
* Bruno Rocha (https://github.com/rochacbruno)
* Norberto Leite (https://github.com/nleite)

View File

@@ -8,6 +8,13 @@ MongoEngine
.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
:target: http://travis-ci.org/MongoEngine/mongoengine
.. image:: https://coveralls.io/repos/MongoEngine/mongoengine/badge.png?branch=master
:target: https://coveralls.io/r/MongoEngine/mongoengine?branch=master
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.png
:target: https://landscape.io/github/MongoEngine/mongoengine/master
:alt: Code Health
About
=====
@@ -26,9 +33,18 @@ setup.py install``.
Dependencies
============
- pymongo 2.5+
- pymongo>=2.7.1
- sphinx (optional - for documentation generation)
Optional Dependencies
---------------------
- **Django Integration:** Django>=1.4.0 for Python 2.x or PyPy and Django>=1.5.0 for Python 3.x
- **Image Fields**: Pillow>=2.0.0 or PIL (not recommended since MongoEngine is tested with Pillow)
- dateutil>=2.1.0
.. note
MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: Django 1.6.5
Examples
========
Some simple examples of what MongoEngine code looks like::

View File

@@ -15,7 +15,7 @@ def cprofile_main():
class Noddy(Document):
fields = DictField()
for i in xrange(1):
for i in range(1):
noddy = Noddy()
for j in range(20):
noddy.fields["key" + str(j)] = "value " + str(j)
@@ -113,6 +113,7 @@ def main():
4.68946313858
----------------------------------------------------------------------------------------------------
"""
print("Benchmarking...")
setup = """
from pymongo import MongoClient
@@ -127,7 +128,7 @@ connection = MongoClient()
db = connection.timeit_test
noddy = db.noddy
for i in xrange(10000):
for i in range(10000):
example = {'fields': {}}
for j in range(20):
example['fields']["key"+str(j)] = "value "+str(j)
@@ -138,10 +139,10 @@ myNoddys = noddy.find()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - Pymongo"""
print("-" * 100)
print("""Creating 10000 dictionaries - Pymongo""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
stmt = """
from pymongo import MongoClient
@@ -150,7 +151,7 @@ connection = MongoClient()
db = connection.timeit_test
noddy = db.noddy
for i in xrange(10000):
for i in range(10000):
example = {'fields': {}}
for j in range(20):
example['fields']["key"+str(j)] = "value "+str(j)
@@ -161,10 +162,10 @@ myNoddys = noddy.find()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - Pymongo write_concern={"w": 0}"""
print("-" * 100)
print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
setup = """
from pymongo import MongoClient
@@ -180,7 +181,7 @@ class Noddy(Document):
"""
stmt = """
for i in xrange(10000):
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
@@ -190,13 +191,13 @@ myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
stmt = """
for i in xrange(10000):
for i in range(10000):
noddy = Noddy()
fields = {}
for j in range(20):
@@ -208,13 +209,13 @@ myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries without continual assign - MongoEngine"""
print("-" * 100)
print("""Creating 10000 dictionaries without continual assign - MongoEngine""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
stmt = """
for i in xrange(10000):
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
@@ -224,13 +225,13 @@ myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
stmt = """
for i in xrange(10000):
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
@@ -240,13 +241,13 @@ myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
stmt = """
for i in xrange(10000):
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
@@ -256,13 +257,13 @@ myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
stmt = """
for i in xrange(10000):
for i in range(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
@@ -272,11 +273,11 @@ myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False"""
print("-" * 100)
print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""")
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
print(t.timeit(1))
if __name__ == "__main__":
main()
main()

View File

@@ -84,6 +84,7 @@ Fields
.. autoclass:: mongoengine.fields.MapField
.. autoclass:: mongoengine.fields.ReferenceField
.. autoclass:: mongoengine.fields.GenericReferenceField
.. autoclass:: mongoengine.fields.CachedReferenceField
.. autoclass:: mongoengine.fields.BinaryField
.. autoclass:: mongoengine.fields.FileField
.. autoclass:: mongoengine.fields.ImageField

View File

@@ -2,6 +2,51 @@
Changelog
=========
Changes in 0.9.X - DEV
======================
- Not overriding default values when loading a subset of fields #399
- Saving document doesn't create new fields in existing collection #620
- Added `Queryset.aggregate` wrapper to aggregation framework #703
- Added support to show original model fields on to_json calls instead of db_field #697
- Added Queryset.search_text to Text indexes searchs #700
- Fixed tests for Django 1.7 #696
- Follow ReferenceFields in EmbeddedDocuments with select_related #690
- Added preliminary support for text indexes #680
- Added `elemMatch` operator as well - `match` is too obscure #653
- Added support for progressive JPEG #486 #548
- Allow strings to be used in index creation #675
- Fixed EmbeddedDoc weakref proxy issue #592
- Fixed nested reference field distinct error #583
- Fixed change tracking on nested MapFields #539
- Dynamic fields in embedded documents now visible to queryset.only() / qs.exclude() #425 #507
- Add authentication_source option to register_connection #178 #464 #573 #580 #590
- Implemented equality between Documents and DBRefs #597
- Fixed ReferenceField inside nested ListFields dereferencing problem #368
- Added the ability to reload specific document fields #100
- Added db_alias support and fixes for custom map/reduce output #586
- post_save signal now has access to delta information about field changes #594 #589
- Don't query with $orderby for qs.get() #600
- Fix id shard key save issue #636
- Fixes issue with recursive embedded document errors #557
- Fix clear_changed_fields() clearing unsaved documents bug #602
- Removing support for Django 1.4.x, pymongo 2.5.x, pymongo 2.6.x.
- Removing support for Python < 2.6.6
- Fixed $maxDistance location for geoJSON $near queries with MongoDB 2.6+ #664
- QuerySet.modify() method to provide find_and_modify() like behaviour #677
- Added support for the using() method on a queryset #676
- PYPY support #673
- Connection pooling #674
- Avoid to open all documents from cursors in an if stmt #655
- Ability to clear the ordering #657
- Raise NotUniqueError in Document.update() on pymongo.errors.DuplicateKeyError #626
- Slots - memory improvements #625
- Fixed incorrectly split a query key when it ends with "_" #619
- Geo docs updates #613
- Workaround a dateutil bug #608
- Conditional save for atomic-style operations #511
- Allow dynamic dictionary-style field access #559
Changes in 0.8.7
================
- Calling reload on deleted / nonexistant documents raises DoesNotExist (#538)

View File

@@ -35,8 +35,8 @@ in ::func:`~mongoengine.connect`
ReplicaSets
===========
MongoEngine supports :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient`
to use them please use a URI style connection and provide the `replicaSet` name in the
MongoEngine supports :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient`.
To use them, please use a URI style connection and provide the `replicaSet` name in the
connection kwargs.
Read preferences are supported through the connection or via individual

View File

@@ -4,7 +4,7 @@ Defining documents
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
working with relational databases, rows are stored in **tables**, which have a
strict **schema** that the rows follow. MongoDB stores documents in
**collections** rather than tables - the principle difference is that no schema
**collections** rather than tables - the principal difference is that no schema
is enforced at a database level.
Defining a document's schema
@@ -459,7 +459,8 @@ by creating a list of index specifications called :attr:`indexes` in the
either be a single field name, a tuple containing multiple field names, or a
dictionary containing a full index definition. A direction may be specified on
fields by prefixing the field name with a **+** (for ascending) or a **-** sign
(for descending). Note that direction only matters on multi-field indexes. ::
(for descending). Note that direction only matters on multi-field indexes.
Text indexes may be specified by prefixing the field name with a **$**. ::
class Page(Document):
title = StringField()
@@ -531,6 +532,8 @@ field name to the index definition.
Sometimes its more efficient to index parts of Embedded / dictionary fields,
in this case use 'dot' notation to identify the value to index eg: `rank.title`
.. _geospatial-indexes:
Geospatial indexes
------------------

View File

@@ -46,7 +46,7 @@ slightly different manner. First, a new file must be created by calling the
marmot.photo.write('some_more_image_data')
marmot.photo.close()
marmot.photo.save()
marmot.save()
Deletion
--------

View File

@@ -12,3 +12,4 @@ User Guide
querying
gridfs
signals
text-indexes

View File

@@ -488,8 +488,9 @@ calling it with keyword arguments::
Atomic updates
==============
Documents may be updated atomically by using the
:meth:`~mongoengine.queryset.QuerySet.update_one` and
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
:meth:`~mongoengine.queryset.QuerySet.update_one`,
:meth:`~mongoengine.queryset.QuerySet.update` and
:meth:`~mongoengine.queryset.QuerySet.modify` methods on a
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
that you may use with these methods:
@@ -499,11 +500,13 @@ that you may use with these methods:
* ``dec`` -- decrement a value by a given amount
* ``push`` -- append a value to a list
* ``push_all`` -- append several values to a list
* ``pop`` -- remove the first or last element of a list
* ``pop`` -- remove the first or last element of a list `depending on the value`_
* ``pull`` -- remove a value from a list
* ``pull_all`` -- remove several values from a list
* ``add_to_set`` -- add value to a list only if its not in the list already
.. _depending on the value: http://docs.mongodb.org/manual/reference/operator/update/pop/
The syntax for atomic updates is similar to the querying syntax, but the
modifier comes before the field, not after it::
@@ -522,6 +525,13 @@ modifier comes before the field, not after it::
>>> post.tags
['database', 'nosql']
.. note::
If no modifier operator is specified the default will be ``$set``. So the following sentences are identical::
>>> BlogPost.objects(id=post.id).update(title='Example Post')
>>> BlogPost.objects(id=post.id).update(set__title='Example Post')
.. note::
In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates

View File

@@ -0,0 +1,49 @@
===========
Text Search
===========
After MongoDB 2.4 version, supports search documents by text indexes.
Defining a Document with text index
===================================
Use the *$* prefix to set a text index, Look the declaration::
class News(Document):
title = StringField()
content = StringField()
is_active = BooleanField()
meta = {'indexes': [
{'fields': ['$title', "$content"],
'default_language': 'english',
'weight': {'title': 10, 'content': 2}
}
]}
Querying
========
Saving a document::
News(title="Using mongodb text search",
content="Testing text search").save()
News(title="MongoEngine 0.9 released",
content="Various improvements").save()
Next, start a text search using :attr:`QuerySet.search_text` method::
document = News.objects.search_text('testing').first()
document.title # may be: "Using mongodb text search"
document = News.objects.search_text('released').first()
document.title # may be: "MongoEngine 0.9 released"
Ordering by text score
======================
objects = News.objects.search('mongo').order_by('$text_score')

View File

@@ -1,12 +1,13 @@
import weakref
import functools
import itertools
from mongoengine.common import _import_class
__all__ = ("BaseDict", "BaseList")
class BaseDict(dict):
"""A special dict so we can watch any changes
"""
"""A special dict so we can watch any changes"""
_dereferenced = False
_instance = None
@@ -21,29 +22,37 @@ class BaseDict(dict):
self._name = name
return super(BaseDict, self).__init__(dict_items)
def __getitem__(self, *args, **kwargs):
value = super(BaseDict, self).__getitem__(*args, **kwargs)
def __getitem__(self, key, *args, **kwargs):
value = super(BaseDict, self).__getitem__(key)
EmbeddedDocument = _import_class('EmbeddedDocument')
if isinstance(value, EmbeddedDocument) and value._instance is None:
value._instance = self._instance
elif not isinstance(value, BaseDict) and isinstance(value, dict):
value = BaseDict(value, None, '%s.%s' % (self._name, key))
super(BaseDict, self).__setitem__(key, value)
value._instance = self._instance
elif not isinstance(value, BaseList) and isinstance(value, list):
value = BaseList(value, None, '%s.%s' % (self._name, key))
super(BaseDict, self).__setitem__(key, value)
value._instance = self._instance
return value
def __setitem__(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseDict, self).__setitem__(*args, **kwargs)
def __setitem__(self, key, value, *args, **kwargs):
self._mark_as_changed(key)
return super(BaseDict, self).__setitem__(key, value)
def __delete__(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseDict, self).__delete__(*args, **kwargs)
def __delitem__(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseDict, self).__delitem__(*args, **kwargs)
def __delitem__(self, key, *args, **kwargs):
self._mark_as_changed(key)
return super(BaseDict, self).__delitem__(key)
def __delattr__(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseDict, self).__delattr__(*args, **kwargs)
def __delattr__(self, key, *args, **kwargs):
self._mark_as_changed(key)
return super(BaseDict, self).__delattr__(key)
def __getstate__(self):
self.instance = None
@@ -70,9 +79,12 @@ class BaseDict(dict):
self._mark_as_changed()
return super(BaseDict, self).update(*args, **kwargs)
def _mark_as_changed(self):
def _mark_as_changed(self, key=None):
if hasattr(self._instance, '_mark_as_changed'):
self._instance._mark_as_changed(self._name)
if key:
self._instance._mark_as_changed('%s.%s' % (self._name, key))
else:
self._instance._mark_as_changed(self._name)
class BaseList(list):
@@ -92,21 +104,35 @@ class BaseList(list):
self._name = name
return super(BaseList, self).__init__(list_items)
def __getitem__(self, *args, **kwargs):
value = super(BaseList, self).__getitem__(*args, **kwargs)
def __getitem__(self, key, *args, **kwargs):
value = super(BaseList, self).__getitem__(key)
EmbeddedDocument = _import_class('EmbeddedDocument')
if isinstance(value, EmbeddedDocument) and value._instance is None:
value._instance = self._instance
elif not isinstance(value, BaseDict) and isinstance(value, dict):
value = BaseDict(value, None, '%s.%s' % (self._name, key))
super(BaseList, self).__setitem__(key, value)
value._instance = self._instance
elif not isinstance(value, BaseList) and isinstance(value, list):
value = BaseList(value, None, '%s.%s' % (self._name, key))
super(BaseList, self).__setitem__(key, value)
value._instance = self._instance
return value
def __setitem__(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseList, self).__setitem__(*args, **kwargs)
def __setitem__(self, key, value, *args, **kwargs):
if isinstance(key, slice):
self._mark_as_changed()
else:
self._mark_as_changed(key)
return super(BaseList, self).__setitem__(key, value)
def __delitem__(self, *args, **kwargs):
self._mark_as_changed()
return super(BaseList, self).__delitem__(*args, **kwargs)
def __delitem__(self, key, *args, **kwargs):
if isinstance(key, slice):
self._mark_as_changed()
else:
self._mark_as_changed(key)
return super(BaseList, self).__delitem__(key)
def __setslice__(self, *args, **kwargs):
self._mark_as_changed()
@@ -153,6 +179,105 @@ class BaseList(list):
self._mark_as_changed()
return super(BaseList, self).sort(*args, **kwargs)
def _mark_as_changed(self):
def _mark_as_changed(self, key=None):
if hasattr(self._instance, '_mark_as_changed'):
self._instance._mark_as_changed(self._name)
if key:
self._instance._mark_as_changed('%s.%s' % (self._name, key))
else:
self._instance._mark_as_changed(self._name)
class StrictDict(object):
__slots__ = ()
_special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
_classes = {}
def __init__(self, **kwargs):
for k,v in kwargs.iteritems():
setattr(self, k, v)
def __getitem__(self, key):
key = '_reserved_' + key if key in self._special_fields else key
try:
return getattr(self, key)
except AttributeError:
raise KeyError(key)
def __setitem__(self, key, value):
key = '_reserved_' + key if key in self._special_fields else key
return setattr(self, key, value)
def __contains__(self, key):
return hasattr(self, key)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def pop(self, key, default=None):
v = self.get(key, default)
try:
delattr(self, key)
except AttributeError:
pass
return v
def iteritems(self):
for key in self:
yield key, self[key]
def items(self):
return [(k, self[k]) for k in iter(self)]
def keys(self):
return list(iter(self))
def __iter__(self):
return (key for key in self.__slots__ if hasattr(self, key))
def __len__(self):
return len(list(self.iteritems()))
def __eq__(self, other):
return self.items() == other.items()
def __neq__(self, other):
return self.items() != other.items()
@classmethod
def create(cls, allowed_keys):
allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys)
allowed_keys = frozenset(allowed_keys_tuple)
if allowed_keys not in cls._classes:
class SpecificStrictDict(cls):
__slots__ = allowed_keys_tuple
def __repr__(self):
return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k,v) for (k,v) in self.iteritems())
cls._classes[allowed_keys] = SpecificStrictDict
return cls._classes[allowed_keys]
class SemiStrictDict(StrictDict):
__slots__ = ('_extras')
_classes = {}
def __getattr__(self, attr):
try:
super(SemiStrictDict, self).__getattr__(attr)
except AttributeError:
try:
return self.__getattribute__('_extras')[attr]
except KeyError as e:
raise AttributeError(e)
def __setattr__(self, attr, value):
try:
super(SemiStrictDict, self).__setattr__(attr, value)
except AttributeError:
try:
self._extras[attr] = value
except AttributeError:
self._extras = {attr: value}
def __delattr__(self, attr):
try:
super(SemiStrictDict, self).__delattr__(attr)
except AttributeError:
try:
del self._extras[attr]
except KeyError as e:
raise AttributeError(e)
def __iter__(self):
try:
extras_iter = iter(self.__getattribute__('_extras'))
except AttributeError:
extras_iter = ()
return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter)

View File

@@ -13,11 +13,10 @@ from mongoengine import signals
from mongoengine.common import _import_class
from mongoengine.errors import (ValidationError, InvalidDocumentError,
LookUpError)
from mongoengine.python_support import (PY3, UNICODE_KWARGS, txt_type,
to_str_keys_recursive)
from mongoengine.python_support import PY3, txt_type
from mongoengine.base.common import get_document, ALLOW_INHERITANCE
from mongoengine.base.datastructures import BaseDict, BaseList
from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict, SemiStrictDict
from mongoengine.base.fields import ComplexBaseField
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
@@ -26,11 +25,12 @@ NON_FIELD_ERRORS = '__all__'
class BaseDocument(object):
__slots__ = ('_changed_fields', '_initialised', '_created', '_data',
'_dynamic_fields', '_auto_id_field', '_db_field_map', '_cls', '__weakref__')
_dynamic = False
_created = True
_dynamic_lock = True
_initialised = False
STRICT = False
def __init__(self, *args, **values):
"""
@@ -39,6 +39,8 @@ class BaseDocument(object):
:param __auto_convert: Try and will cast python objects to Object types
:param values: A dictionary of values for the document
"""
self._initialised = False
self._created = True
if args:
# Combine positional arguments with named arguments.
# We only want named arguments.
@@ -49,17 +51,29 @@ class BaseDocument(object):
for value in args:
name = next(field)
if name in values:
raise TypeError("Multiple values for keyword argument '" + name + "'")
raise TypeError(
"Multiple values for keyword argument '" + name + "'")
values[name] = value
__auto_convert = values.pop("__auto_convert", True)
# 399: set default values only to fields loaded from DB
__only_fields = set(values.pop("__only_fields", values))
signals.pre_init.send(self.__class__, document=self, values=values)
if self.STRICT and not self._dynamic:
self._data = StrictDict.create(allowed_keys=self._fields_ordered)()
else:
self._data = SemiStrictDict.create(
allowed_keys=self._fields_ordered)()
_created = values.pop("_created", True)
self._data = {}
self._dynamic_fields = SON()
# Assign default values to instance
for key, field in self._fields.iteritems():
if self._db_field_map.get(key, key) in values:
if self._db_field_map.get(key, key) in __only_fields:
continue
value = getattr(self, key, None)
setattr(self, key, value)
@@ -97,6 +111,7 @@ class BaseDocument(object):
# Flag initialised
self._initialised = True
self._created = _created
signals.post_init.send(self.__class__, document=self)
def __delattr__(self, *args, **kwargs):
@@ -130,18 +145,26 @@ class BaseDocument(object):
self._data[name] = value
if hasattr(self, '_changed_fields'):
self._mark_as_changed(name)
try:
self__created = self._created
except AttributeError:
self__created = True
if (self._is_document and not self._created and
name in self._meta.get('shard_key', tuple()) and
self._data.get(name) != value):
if (self._is_document and not self__created and
name in self._meta.get('shard_key', tuple()) and
self._data.get(name) != value):
OperationError = _import_class('OperationError')
msg = "Shard Keys are immutable. Tried to update %s" % name
raise OperationError(msg)
try:
self__initialised = self._initialised
except AttributeError:
self__initialised = False
# Check if the user has created a new instance of a class
if (self._is_document and self._initialised
and self._created and name == self._meta['id_field']):
super(BaseDocument, self).__setattr__('_created', False)
if (self._is_document and self__initialised
and self__created and name == self._meta['id_field']):
super(BaseDocument, self).__setattr__('_created', False)
super(BaseDocument, self).__setattr__(name, value)
@@ -158,9 +181,11 @@ class BaseDocument(object):
if isinstance(data["_data"], SON):
data["_data"] = self.__class__._from_son(data["_data"])._data
for k in ('_changed_fields', '_initialised', '_created', '_data',
'_fields_ordered', '_dynamic_fields'):
'_dynamic_fields'):
if k in data:
setattr(self, k, data[k])
if '_fields_ordered' in data:
setattr(type(self), '_fields_ordered', data['_fields_ordered'])
dynamic_fields = data.get('_dynamic_fields') or SON()
for k in dynamic_fields.keys():
setattr(self, k, data["_data"].get(k))
@@ -182,7 +207,7 @@ class BaseDocument(object):
"""Dictionary-style field access, set a field's value.
"""
# Ensure that the field exists before settings its value
if name not in self._fields:
if not self._dynamic and name not in self._fields:
raise KeyError(name)
return setattr(self, name, value)
@@ -214,8 +239,9 @@ class BaseDocument(object):
def __eq__(self, other):
if isinstance(other, self.__class__) and hasattr(other, 'id'):
if self.id == other.id:
return True
return self.id == other.id
if isinstance(other, DBRef):
return self._get_collection_name() == other.collection and self.id == other.id
return False
def __ne__(self, other):
@@ -238,21 +264,43 @@ class BaseDocument(object):
"""
pass
def to_mongo(self):
"""Return as SON data ready for use with MongoDB.
def to_mongo(self, use_db_field=True, fields=[]):
"""
Return as SON data ready for use with MongoDB.
"""
data = SON()
data["_id"] = None
data['_cls'] = self._class_name
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
# only root fields ['test1.a', 'test2'] => ['test1', 'test2']
root_fields = set([f.split('.')[0] for f in fields])
for field_name in self:
if root_fields and field_name not in root_fields:
continue
value = self._data.get(field_name, None)
field = self._fields.get(field_name)
if field is None and self._dynamic:
field = self._dynamic_fields.get(field_name)
if value is not None:
value = field.to_mongo(value)
if isinstance(field, (EmbeddedDocumentField)):
if fields:
key = '%s.' % field_name
embedded_fields = [
i.replace(key, '') for i in fields
if i.startswith(key)]
else:
embedded_fields = []
value = field.to_mongo(value, use_db_field=use_db_field,
fields=embedded_fields)
else:
value = field.to_mongo(value)
# Handle self generating fields
if value is None and field._auto_gen:
@@ -260,7 +308,10 @@ class BaseDocument(object):
self._data[field_name] = value
if value is not None:
data[field.db_field] = value
if use_db_field:
data[field.db_field] = value
else:
data[field.name] = value
# If "_id" has not been set, then try and set it
Document = _import_class("Document")
@@ -273,7 +324,7 @@ class BaseDocument(object):
# Only add _cls if allow_inheritance is True
if (not hasattr(self, '_meta') or
not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)):
not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)):
data.pop('_cls')
return data
@@ -295,7 +346,8 @@ class BaseDocument(object):
self._data.get(name)) for name in self._fields_ordered]
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField")
GenericEmbeddedDocumentField = _import_class(
"GenericEmbeddedDocumentField")
for field, value in fields:
if value is not None:
@@ -317,14 +369,18 @@ class BaseDocument(object):
pk = "None"
if hasattr(self, 'pk'):
pk = self.pk
elif self._instance:
elif self._instance and hasattr(self._instance, 'pk'):
pk = self._instance.pk
message = "ValidationError (%s:%s) " % (self._class_name, pk)
raise ValidationError(message, errors=errors)
def to_json(self, *args, **kwargs):
"""Converts a document to JSON"""
return json_util.dumps(self.to_mongo(), *args, **kwargs)
"""Converts a document to JSON.
:param use_db_field: Set to True by default but enables the output of the json structure with the field names and not the mongodb store db_names in case of set to False
"""
use_db_field = kwargs.pop('use_db_field') if kwargs.has_key(
'use_db_field') else True
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
@classmethod
def from_json(cls, json_data):
@@ -358,7 +414,7 @@ class BaseDocument(object):
# Convert lists / values so we can watch for any changes on them
if (isinstance(value, (list, tuple)) and
not isinstance(value, BaseList)):
not isinstance(value, BaseList)):
value = BaseList(value, self, name)
elif isinstance(value, dict) and not isinstance(value, BaseDict):
value = BaseDict(value, self, name)
@@ -370,9 +426,18 @@ class BaseDocument(object):
"""
if not key:
return
key = self._db_field_map.get(key, key)
if (hasattr(self, '_changed_fields') and
key not in self._changed_fields):
if not hasattr(self, '_changed_fields'):
return
if '.' in key:
key, rest = key.split('.', 1)
key = self._db_field_map.get(key, key)
key = '%s.%s' % (key, rest)
else:
key = self._db_field_map.get(key, key)
if key not in self._changed_fields:
self._changed_fields.append(key)
def _clear_changed_fields(self):
@@ -392,6 +457,8 @@ class BaseDocument(object):
else:
data = getattr(data, part, None)
if hasattr(data, "_changed_fields"):
if hasattr(data, "_is_document") and data._is_document:
continue
data._changed_fields = []
self._changed_fields = []
@@ -405,12 +472,17 @@ class BaseDocument(object):
for index, value in iterator:
list_key = "%s%s." % (key, index)
# don't check anything lower if this key is already marked
# as changed.
if list_key[:-1] in changed_fields:
continue
if hasattr(value, '_get_changed_fields'):
changed = value._get_changed_fields(inspected)
changed_fields += ["%s%s" % (list_key, k)
for k in changed if k]
for k in changed if k]
elif isinstance(value, (list, tuple, dict)):
self._nestable_types_changed_fields(changed_fields, list_key, value, inspected)
self._nestable_types_changed_fields(
changed_fields, list_key, value, inspected)
def _get_changed_fields(self, inspected=None):
"""Returns a list of all fields that have explicitly been changed.
@@ -420,6 +492,7 @@ class BaseDocument(object):
ReferenceField = _import_class("ReferenceField")
changed_fields = []
changed_fields += getattr(self, '_changed_fields', [])
inspected = inspected or set()
if hasattr(self, 'id') and isinstance(self.id, Hashable):
if self.id in inspected:
@@ -439,16 +512,17 @@ class BaseDocument(object):
if isinstance(field, ReferenceField):
continue
elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument))
and db_field_name not in changed_fields):
and db_field_name not in changed_fields):
# Find all embedded fields that have been changed
changed = data._get_changed_fields(inspected)
changed_fields += ["%s%s" % (key, k) for k in changed if k]
elif (isinstance(data, (list, tuple, dict)) and
db_field_name not in changed_fields):
if (hasattr(field, 'field') and
isinstance(field.field, ReferenceField)):
isinstance(field.field, ReferenceField)):
continue
self._nestable_types_changed_fields(changed_fields, key, data, inspected)
self._nestable_types_changed_fields(
changed_fields, key, data, inspected)
return changed_fields
def _delta(self):
@@ -472,7 +546,10 @@ class BaseDocument(object):
if isinstance(d, (ObjectId, DBRef)):
break
elif isinstance(d, list) and p.isdigit():
d = d[int(p)]
try:
d = d[int(p)]
except IndexError:
d = None
elif hasattr(d, 'get'):
d = d.get(p)
new_path.append(p)
@@ -491,7 +568,7 @@ class BaseDocument(object):
# If we've set a value that ain't the default value dont unset it.
default = None
if (self._dynamic and len(parts) and parts[0] in
self._dynamic_fields):
self._dynamic_fields):
del(set_data[path])
unset_data[path] = 1
continue
@@ -537,7 +614,7 @@ class BaseDocument(object):
return cls._meta.get('collection', None)
@classmethod
def _from_son(cls, son, _auto_dereference=True):
def _from_son(cls, son, _auto_dereference=True, only_fields=[]):
"""Create an instance of a Document (subclass) from a PyMongo SON.
"""
@@ -545,10 +622,6 @@ class BaseDocument(object):
# class if unavailable
class_name = son.get('_cls', cls._class_name)
data = dict(("%s" % key, value) for key, value in son.iteritems())
if not UNICODE_KWARGS:
# python 2.6.4 and lower cannot handle unicode keys
# passed to class constructor example: cls(**data)
to_str_keys_recursive(data)
# Return correct subclass for document type
if class_name != cls._class_name:
@@ -578,19 +651,24 @@ class BaseDocument(object):
default = default()
if isinstance(default, BaseDocument):
changed_fields.append(field_name)
elif not only_fields or field_name in only_fields:
changed_fields.append(field_name)
if errors_dict:
errors = "\n".join(["%s - %s" % (k, v)
for k, v in errors_dict.items()])
for k, v in errors_dict.items()])
msg = ("Invalid data to create a `%s` instance.\n%s"
% (cls._class_name, errors))
raise InvalidDocumentError(msg)
obj = cls(__auto_convert=False, **data)
if cls.STRICT:
data = dict((k, v)
for k, v in data.iteritems() if k in cls._fields)
obj = cls(__auto_convert=False, _created=False, __only_fields=only_fields, **data)
obj._changed_fields = changed_fields
obj._created = False
if not _auto_dereference:
obj._fields = fields
return obj
@classmethod
@@ -647,15 +725,18 @@ class BaseDocument(object):
if isinstance(key, (list, tuple)):
continue
# ASCENDING from +,
# ASCENDING from +
# DESCENDING from -
# GEO2D from *
# TEXT from $
direction = pymongo.ASCENDING
if key.startswith("-"):
direction = pymongo.DESCENDING
elif key.startswith("*"):
direction = pymongo.GEO2D
if key.startswith(("+", "-", "*")):
elif key.startswith("$"):
direction = pymongo.TEXT
if key.startswith(("+", "-", "*", "$")):
key = key[1:]
# Use real field name, do it manually because we need field
@@ -666,8 +747,14 @@ class BaseDocument(object):
fields = []
else:
fields = cls._lookup_field(parts)
parts = [field if field == '_id' else field.db_field
for field in fields]
parts = []
for field in fields:
try:
if field != "_id":
field = field.db_field
except AttributeError:
pass
parts.append(field)
key = '.'.join(parts)
index_list.append((key, direction))
@@ -724,7 +811,7 @@ class BaseDocument(object):
# Grab any embedded document field unique indexes
if (field.__class__.__name__ == "EmbeddedDocumentField" and
field.document_type != cls):
field.document_type != cls):
field_namespace = "%s." % field_name
doc_cls = field.document_type
unique_indexes += doc_cls._unique_with_indexes(field_namespace)
@@ -740,7 +827,8 @@ class BaseDocument(object):
geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField",
"PointField", "LineStringField", "PolygonField"]
geo_field_types = tuple([_import_class(field) for field in geo_field_type_names])
geo_field_types = tuple([_import_class(field)
for field in geo_field_type_names])
for field in cls._fields.values():
if not isinstance(field, geo_field_types):
@@ -750,13 +838,14 @@ class BaseDocument(object):
if field_cls in inspected:
continue
if hasattr(field_cls, '_geo_indices'):
geo_indices += field_cls._geo_indices(inspected, parent_field=field.db_field)
geo_indices += field_cls._geo_indices(
inspected, parent_field=field.db_field)
elif field._geo_index:
field_name = field.db_field
if parent_field:
field_name = "%s.%s" % (parent_field, field_name)
geo_indices.append({'fields':
[(field_name, field._geo_index)]})
[(field_name, field._geo_index)]})
return geo_indices
@classmethod
@@ -804,8 +893,17 @@ class BaseDocument(object):
# Look up subfield on the previous field
new_field = field.lookup_member(field_name)
if not new_field and isinstance(field, ComplexBaseField):
fields.append(field_name)
continue
if hasattr(field.field, 'document_type') and cls._dynamic \
and field.field.document_type._dynamic:
DynamicField = _import_class('DynamicField')
new_field = DynamicField(db_field=field_name)
else:
fields.append(field_name)
continue
elif not new_field and hasattr(field, 'document_type') and cls._dynamic \
and field.document_type._dynamic:
DynamicField = _import_class('DynamicField')
new_field = DynamicField(db_field=field_name)
elif not new_field:
raise LookUpError('Cannot resolve field "%s"'
% field_name)
@@ -825,7 +923,11 @@ class BaseDocument(object):
"""Dynamically set the display value for a field with choices"""
for attr_name, field in self._fields.items():
if field.choices:
setattr(self,
if self._dynamic:
obj = self
else:
obj = type(self)
setattr(obj,
'get_%s_display' % attr_name,
partial(self.__get_field_display, field=field))

View File

@@ -11,10 +11,12 @@ from mongoengine.errors import ValidationError
from mongoengine.base.common import ALLOW_INHERITANCE
from mongoengine.base.datastructures import BaseDict, BaseList
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
__all__ = ("BaseField", "ComplexBaseField",
"ObjectIdField", "GeoJsonBaseField")
class BaseField(object):
"""A base class for fields in a MongoDB document. Instances of this class
may be added to subclasses of `Document` to define a document's schema.
@@ -43,7 +45,7 @@ class BaseField(object):
:param required: If the field is required. Whether it has to have a
value or not. Defaults to False.
:param default: (optional) The default value for this field if no value
has been set (or if the value has been unset). It Can be a
has been set (or if the value has been unset). It can be a
callable.
:param unique: Is the field value unique or not. Defaults to False.
:param unique_with: (optional) The other field this field should be
@@ -60,6 +62,7 @@ class BaseField(object):
used when generating model forms from the document model.
"""
self.db_field = (db_field or name) if not primary_key else '_id'
if name:
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
warnings.warn(msg, DeprecationWarning)
@@ -105,7 +108,7 @@ class BaseField(object):
if instance._initialised:
try:
if (self.name not in instance._data or
instance._data[self.name] != value):
instance._data[self.name] != value):
instance._mark_as_changed(self.name)
except:
# Values cant be compared eg: naive and tz datetimes
@@ -113,7 +116,7 @@ class BaseField(object):
instance._mark_as_changed(self.name)
EmbeddedDocument = _import_class('EmbeddedDocument')
if isinstance(value, EmbeddedDocument) and value._instance is None:
if isinstance(value, EmbeddedDocument):
value._instance = weakref.proxy(instance)
instance._data[self.name] = value
@@ -175,6 +178,7 @@ class BaseField(object):
class ComplexBaseField(BaseField):
"""Handles complex fields, such as lists / dictionaries.
Allows for nesting of embedded documents inside complex types.
@@ -197,7 +201,7 @@ class ComplexBaseField(BaseField):
GenericReferenceField = _import_class('GenericReferenceField')
dereference = (self._auto_dereference and
(self.field is None or isinstance(self.field,
(GenericReferenceField, ReferenceField))))
(GenericReferenceField, ReferenceField))))
_dereference = _import_class("DeReference")()
@@ -212,7 +216,7 @@ class ComplexBaseField(BaseField):
# Convert lists / values so we can watch for any changes on them
if (isinstance(value, (list, tuple)) and
not isinstance(value, BaseList)):
not isinstance(value, BaseList)):
value = BaseList(value, instance, self.name)
instance._data[self.name] = value
elif isinstance(value, dict) and not isinstance(value, BaseDict):
@@ -220,8 +224,8 @@ class ComplexBaseField(BaseField):
instance._data[self.name] = value
if (self._auto_dereference and instance._initialised and
isinstance(value, (BaseList, BaseDict))
and not value._dereferenced):
isinstance(value, (BaseList, BaseDict))
and not value._dereferenced):
value = _dereference(
value, max_depth=1, instance=instance, name=self.name
)
@@ -384,6 +388,7 @@ class ComplexBaseField(BaseField):
class ObjectIdField(BaseField):
"""A field wrapper around MongoDB's ObjectIds.
"""
@@ -412,6 +417,7 @@ class ObjectIdField(BaseField):
class GeoJsonBaseField(BaseField):
"""A geo json field storing a geojson style object.
.. versionadded:: 0.8
"""
@@ -435,7 +441,8 @@ class GeoJsonBaseField(BaseField):
if isinstance(value, dict):
if set(value.keys()) == set(['type', 'coordinates']):
if value['type'] != self._type:
self.error('%s type must be "%s"' % (self._name, self._type))
self.error('%s type must be "%s"' %
(self._name, self._type))
return self.validate(value['coordinates'])
else:
self.error('%s can only accept a valid GeoJson dictionary'

View File

@@ -16,6 +16,7 @@ __all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
class DocumentMetaclass(type):
"""Metaclass for all documents.
"""
@@ -29,6 +30,7 @@ class DocumentMetaclass(type):
return super_new(cls, name, bases, attrs)
attrs['_is_document'] = attrs.get('_is_document', False)
attrs['_cached_reference_fields'] = []
# EmbeddedDocuments could have meta data for inheritance
if 'meta' in attrs:
@@ -90,7 +92,7 @@ class DocumentMetaclass(type):
# Set _fields and db_field maps
attrs['_fields'] = doc_fields
attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k))
for k, v in doc_fields.iteritems()])
for k, v in doc_fields.iteritems()])
attrs['_reverse_db_field_map'] = dict(
(v, k) for k, v in attrs['_db_field_map'].iteritems())
@@ -105,7 +107,7 @@ class DocumentMetaclass(type):
class_name = [name]
for base in flattened_bases:
if (not getattr(base, '_is_base_cls', True) and
not getattr(base, '_meta', {}).get('abstract', True)):
not getattr(base, '_meta', {}).get('abstract', True)):
# Collate heirarchy for _cls and _subclasses
class_name.append(base.__name__)
@@ -115,7 +117,7 @@ class DocumentMetaclass(type):
allow_inheritance = base._meta.get('allow_inheritance',
ALLOW_INHERITANCE)
if (allow_inheritance is not True and
not base._meta.get('abstract')):
not base._meta.get('abstract')):
raise ValueError('Document %s may not be subclassed' %
base.__name__)
@@ -141,7 +143,8 @@ class DocumentMetaclass(type):
base._subclasses += (_cls,)
base._types = base._subclasses # TODO depreciate _types
Document, EmbeddedDocument, DictField = cls._import_classes()
(Document, EmbeddedDocument, DictField,
CachedReferenceField) = cls._import_classes()
if issubclass(new_class, Document):
new_class._collection = None
@@ -170,6 +173,20 @@ class DocumentMetaclass(type):
f = field
f.owner_document = new_class
delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
if isinstance(f, CachedReferenceField):
if issubclass(new_class, EmbeddedDocument):
raise InvalidDocumentError(
"CachedReferenceFields is not allowed in EmbeddedDocuments")
if not f.document_type:
raise InvalidDocumentError(
"Document is not avaiable to sync")
if f.auto_sync:
f.start_listener()
f.document_type._cached_reference_fields.append(f)
if isinstance(f, ComplexBaseField) and hasattr(f, 'field'):
delete_rule = getattr(f.field,
'reverse_delete_rule',
@@ -191,7 +208,7 @@ class DocumentMetaclass(type):
field.name, delete_rule)
if (field.name and hasattr(Document, field.name) and
EmbeddedDocument not in new_class.mro()):
EmbeddedDocument not in new_class.mro()):
msg = ("%s is a document method and not a valid "
"field name" % field.name)
raise InvalidDocumentError(msg)
@@ -224,10 +241,12 @@ class DocumentMetaclass(type):
Document = _import_class('Document')
EmbeddedDocument = _import_class('EmbeddedDocument')
DictField = _import_class('DictField')
return (Document, EmbeddedDocument, DictField)
CachedReferenceField = _import_class('CachedReferenceField')
return (Document, EmbeddedDocument, DictField, CachedReferenceField)
class TopLevelDocumentMetaclass(DocumentMetaclass):
"""Metaclass for top-level documents (i.e. documents that have their own
collection in the database.
"""
@@ -275,21 +294,21 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
# Find the parent document class
parent_doc_cls = [b for b in flattened_bases
if b.__class__ == TopLevelDocumentMetaclass]
if b.__class__ == TopLevelDocumentMetaclass]
parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0]
# Prevent classes setting collection different to their parents
# If parent wasn't an abstract class
if (parent_doc_cls and 'collection' in attrs.get('_meta', {})
and not parent_doc_cls._meta.get('abstract', True)):
msg = "Trying to set a collection on a subclass (%s)" % name
warnings.warn(msg, SyntaxWarning)
del(attrs['_meta']['collection'])
and not parent_doc_cls._meta.get('abstract', True)):
msg = "Trying to set a collection on a subclass (%s)" % name
warnings.warn(msg, SyntaxWarning)
del(attrs['_meta']['collection'])
# Ensure abstract documents have abstract bases
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
if (parent_doc_cls and
not parent_doc_cls._meta.get('abstract', False)):
not parent_doc_cls._meta.get('abstract', False)):
msg = "Abstract document cannot have non-abstract base"
raise ValueError(msg)
return super_new(cls, name, bases, attrs)
@@ -306,7 +325,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
# Set collection in the meta if its callable
if (getattr(base, '_is_document', False) and
not base._meta.get('abstract')):
not base._meta.get('abstract')):
collection = meta.get('collection', None)
if callable(collection):
meta['collection'] = collection(base)
@@ -318,7 +337,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
simple_class = all([b._meta.get('abstract')
for b in flattened_bases if hasattr(b, '_meta')])
if (not simple_class and meta['allow_inheritance'] is False and
not meta['abstract']):
not meta['abstract']):
raise ValueError('Only direct subclasses of Document may set '
'"allow_inheritance" to False')
@@ -359,7 +378,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
new_class.id = field
# Set primary key if not defined by the document
new_class._auto_id_field = False
new_class._auto_id_field = getattr(parent_doc_cls,
'_auto_id_field', False)
if not new_class._meta.get('id_field'):
new_class._auto_id_field = True
new_class._meta['id_field'] = 'id'
@@ -377,7 +397,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
for exc in exceptions_to_merge:
name = exc.__name__
parents = tuple(getattr(base, name) for base in flattened_bases
if hasattr(base, name)) or (exc,)
if hasattr(base, name)) or (exc,)
# Create new exception and set to new_class
exception = type(name, parents, {'__module__': module})
setattr(new_class, name, exception)
@@ -386,6 +406,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
class MetaDict(dict):
"""Custom dictionary for meta classes.
Handles the merging of set indexes
"""
@@ -400,5 +421,6 @@ class MetaDict(dict):
class BasesTuple(tuple):
"""Special class to handle introspection of bases tuple in __new__"""
pass

View File

@@ -25,6 +25,7 @@ def _import_class(cls_name):
'GenericEmbeddedDocumentField', 'GeoPointField',
'PointField', 'LineStringField', 'ListField',
'PolygonField', 'ReferenceField', 'StringField',
'CachedReferenceField',
'ComplexBaseField', 'GeoJsonBaseField')
queryset_classes = ('OperationError',)
deref_classes = ('DeReference',)

View File

@@ -19,8 +19,9 @@ _dbs = {}
def register_connection(alias, name, host=None, port=None,
is_slave=False, read_preference=False, slaves=None,
username=None, password=None, **kwargs):
read_preference=False,
username=None, password=None, authentication_source=None,
**kwargs):
"""Add a connection.
:param alias: the name that will be used to refer to this connection
@@ -28,14 +29,11 @@ def register_connection(alias, name, host=None, port=None,
:param name: the name of the specific database to use
:param host: the host name of the :program:`mongod` instance to connect to
:param port: the port that the :program:`mongod` instance is running on
:param is_slave: whether the connection can act as a slave
** Depreciated pymongo 2.0.1+
:param read_preference: The read preference for the collection
** Added pymongo 2.1
:param slaves: a list of aliases of slave connections; each of these must
be a registered connection that has :attr:`is_slave` set to ``True``
:param username: username to authenticate with
:param password: password to authenticate with
:param authentication_source: database to authenticate against
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
"""
@@ -45,11 +43,10 @@ def register_connection(alias, name, host=None, port=None,
'name': name,
'host': host or 'localhost',
'port': port or 27017,
'is_slave': is_slave,
'slaves': slaves or [],
'read_preference': read_preference,
'username': username,
'password': password,
'read_preference': read_preference
'authentication_source': authentication_source
}
# Handle uri style connections
@@ -64,6 +61,10 @@ def register_connection(alias, name, host=None, port=None,
if "replicaSet" in conn_settings['host']:
conn_settings['replicaSet'] = True
# Deprecated parameters that should not be passed on
kwargs.pop('slaves', None)
kwargs.pop('is_slave', None)
conn_settings.update(kwargs)
_connection_settings[alias] = conn_settings
@@ -93,20 +94,10 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
raise ConnectionError(msg)
conn_settings = _connection_settings[alias].copy()
if hasattr(pymongo, 'version_tuple'): # Support for 2.1+
conn_settings.pop('name', None)
conn_settings.pop('slaves', None)
conn_settings.pop('is_slave', None)
conn_settings.pop('username', None)
conn_settings.pop('password', None)
else:
# Get all the slave connections
if 'slaves' in conn_settings:
slaves = []
for slave_alias in conn_settings['slaves']:
slaves.append(get_connection(slave_alias))
conn_settings['slaves'] = slaves
conn_settings.pop('read_preference', None)
conn_settings.pop('name', None)
conn_settings.pop('username', None)
conn_settings.pop('password', None)
conn_settings.pop('authentication_source', None)
connection_class = MongoClient
if 'replicaSet' in conn_settings:
@@ -119,7 +110,17 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
connection_class = MongoReplicaSetClient
try:
_connections[alias] = connection_class(**conn_settings)
connection = None
connection_settings_iterator = ((alias, settings.copy()) for alias, settings in _connection_settings.iteritems())
for alias, connection_settings in connection_settings_iterator:
connection_settings.pop('name', None)
connection_settings.pop('username', None)
connection_settings.pop('password', None)
if conn_settings == connection_settings and _connections.get(alias, None):
connection = _connections[alias]
break
_connections[alias] = connection if connection else connection_class(**conn_settings)
except Exception, e:
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
return _connections[alias]
@@ -137,7 +138,8 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
# Authenticate if necessary
if conn_settings['username'] and conn_settings['password']:
db.authenticate(conn_settings['username'],
conn_settings['password'])
conn_settings['password'],
source=conn_settings['authentication_source'])
_dbs[alias] = db
return _dbs[alias]

View File

@@ -1,6 +1,5 @@
from mongoengine.common import _import_class
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
from mongoengine.queryset import QuerySet
__all__ = ("switch_db", "switch_collection", "no_dereference",
@@ -162,12 +161,6 @@ class no_sub_classes(object):
return self.cls
class QuerySetNoDeRef(QuerySet):
"""Special no_dereference QuerySet"""
def __dereference(items, max_depth=1, instance=None, name=None):
return items
class query_counter(object):
""" Query_counter context manager to get the number of queries. """

View File

@@ -12,7 +12,7 @@ class DeReference(object):
def __call__(self, items, max_depth=1, instance=None, name=None):
"""
Cheaply dereferences the items to a set depth.
Also handles the convertion of complex data types.
Also handles the conversion of complex data types.
:param items: The iterable (dict, list, queryset) to be dereferenced.
:param max_depth: The maximum depth to recurse to
@@ -36,7 +36,7 @@ class DeReference(object):
if instance and isinstance(instance, (Document, EmbeddedDocument,
TopLevelDocumentMetaclass)):
doc_type = instance._fields.get(name)
if hasattr(doc_type, 'field'):
while hasattr(doc_type, 'field'):
doc_type = doc_type.field
if isinstance(doc_type, ReferenceField):
@@ -51,9 +51,19 @@ class DeReference(object):
return items
elif not field.dbref:
if not hasattr(items, 'items'):
items = [field.to_python(v)
if not isinstance(v, (DBRef, Document)) else v
for v in items]
def _get_items(items):
new_items = []
for v in items:
if isinstance(v, list):
new_items.append(_get_items(v))
elif not isinstance(v, (DBRef, Document)):
new_items.append(field.to_python(v))
else:
new_items.append(v)
return new_items
items = _get_items(items)
else:
items = dict([
(k, field.to_python(v))
@@ -85,7 +95,7 @@ class DeReference(object):
# Recursively find dbreferences
depth += 1
for k, item in iterator:
if isinstance(item, Document):
if isinstance(item, (Document, EmbeddedDocument)):
for field_name, field in item._fields.iteritems():
v = item._data.get(field_name, None)
if isinstance(v, (DBRef)):
@@ -114,11 +124,11 @@ class DeReference(object):
"""Fetch all references and convert to their document objects
"""
object_map = {}
for col, dbrefs in self.reference_map.iteritems():
for collection, dbrefs in self.reference_map.iteritems():
keys = object_map.keys()
refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys]))
if hasattr(col, 'objects'): # We have a document class for the refs
references = col.objects.in_bulk(refs)
if hasattr(collection, 'objects'): # We have a document class for the refs
references = collection.objects.in_bulk(refs)
for key, doc in references.iteritems():
object_map[key] = doc
else: # Generic reference: use the refs data to convert to document
@@ -126,19 +136,19 @@ class DeReference(object):
continue
if doc_type:
references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
references = doc_type._get_db()[collection].find({'_id': {'$in': refs}})
for ref in references:
doc = doc_type._from_son(ref)
object_map[doc.id] = doc
else:
references = get_db()[col].find({'_id': {'$in': refs}})
references = get_db()[collection].find({'_id': {'$in': refs}})
for ref in references:
if '_cls' in ref:
doc = get_document(ref["_cls"])._from_son(ref)
elif doc_type is None:
doc = get_document(
''.join(x.capitalize()
for x in col.split('_')))._from_son(ref)
for x in collection.split('_')))._from_son(ref)
else:
doc = doc_type._from_son(ref)
object_map[doc.id] = doc
@@ -192,7 +202,7 @@ class DeReference(object):
if k in self.object_map and not is_list:
data[k] = self.object_map[k]
elif isinstance(v, Document):
elif isinstance(v, (Document, EmbeddedDocument)):
for field_name, field in v._fields.iteritems():
v = data[k]._data.get(field_name, None)
if isinstance(v, (DBRef)):
@@ -204,7 +214,8 @@ class DeReference(object):
elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name)
item_name = '%s.%s' % (name, k) if name else name
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name)
elif hasattr(v, 'id'):
data[k] = self.object_map.get(v.id, v)

View File

@@ -1,39 +1,31 @@
#coding: utf-8
from nose.plugins.skip import SkipTest
from mongoengine.python_support import PY3
from unittest import TestCase
from mongoengine import connect
try:
from django.test import TestCase
from django.conf import settings
except Exception as err:
if PY3:
from unittest import TestCase
# Dummy value so no error
class settings:
MONGO_DATABASE_NAME = 'dummy'
else:
raise err
from mongoengine.connection import get_db
class MongoTestCase(TestCase):
def setUp(self):
if PY3:
raise SkipTest('django does not have Python 3 support')
"""
TestCase class that clear the collection between the tests
"""
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
@property
def db_name(self):
from django.conf import settings
return 'test_%s' % getattr(settings, 'MONGO_DATABASE_NAME', 'dummy')
def __init__(self, methodName='runtest'):
self.db = connect(self.db_name).get_db()
connect(self.db_name)
self.db = get_db()
super(MongoTestCase, self).__init__(methodName)
def _post_teardown(self):
super(MongoTestCase, self)._post_teardown()
def dropCollections(self):
for collection in self.db.collection_names():
if collection == 'system.indexes':
continue
self.db.drop_collection(collection)
def tearDown(self):
self.dropCollections()

View File

@@ -13,7 +13,8 @@ from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass,
BaseDocument, BaseDict, BaseList,
ALLOW_INHERITANCE, get_document)
from mongoengine.errors import ValidationError
from mongoengine.queryset import OperationError, NotUniqueError, QuerySet
from mongoengine.queryset import (OperationError, NotUniqueError,
QuerySet, transform)
from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME
from mongoengine.context_managers import switch_db, switch_collection
@@ -40,6 +41,7 @@ class InvalidCollectionError(Exception):
class EmbeddedDocument(BaseDocument):
"""A :class:`~mongoengine.Document` that isn't stored in its own
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
fields on :class:`~mongoengine.Document`\ s through the
@@ -54,20 +56,21 @@ class EmbeddedDocument(BaseDocument):
dictionary.
"""
__slots__ = ('_instance')
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
my_metaclass = DocumentMetaclass
my_metaclass = DocumentMetaclass
__metaclass__ = DocumentMetaclass
_instance = None
def __init__(self, *args, **kwargs):
super(EmbeddedDocument, self).__init__(*args, **kwargs)
self._instance = None
self._changed_fields = []
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.to_mongo() == other.to_mongo()
return self._data == other._data
return False
def __ne__(self, other):
@@ -75,6 +78,7 @@ class EmbeddedDocument(BaseDocument):
class Document(BaseDocument):
"""The base class used for defining the structure and properties of
collections of documents stored in MongoDB. Inherit from this class, and
add fields as class attributes to define a document's structure.
@@ -122,12 +126,15 @@ class Document(BaseDocument):
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
my_metaclass = TopLevelDocumentMetaclass
my_metaclass = TopLevelDocumentMetaclass
__metaclass__ = TopLevelDocumentMetaclass
__slots__ = ('__objects')
def pk():
"""Primary key alias
"""
def fget(self):
return getattr(self, self._meta['id_field'])
@@ -136,6 +143,13 @@ class Document(BaseDocument):
return property(fget, fset)
pk = pk()
@property
def text_score(self):
"""
Used for text searchs
"""
return self._data.get('text_score')
@classmethod
def _get_db(cls):
"""Some Model using other db_alias"""
@@ -161,7 +175,7 @@ class Document(BaseDocument):
if options.get('max') != max_documents or \
options.get('size') != max_size:
msg = (('Cannot create collection "%s" as a capped '
'collection as it already exists')
'collection as it already exists')
% cls._collection)
raise InvalidCollectionError(msg)
else:
@@ -180,7 +194,7 @@ class Document(BaseDocument):
def save(self, force_insert=False, validate=True, clean=True,
write_concern=None, cascade=None, cascade_kwargs=None,
_refs=None, **kwargs):
_refs=None, save_condition=None, **kwargs):
"""Save the :class:`~mongoengine.Document` to the database. If the
document already exists, it will be updated, otherwise it will be
created.
@@ -203,7 +217,8 @@ class Document(BaseDocument):
:param cascade_kwargs: (optional) kwargs dictionary to be passed throw
to cascading saves. Implies ``cascade=True``.
:param _refs: A list of processed references used in cascading saves
:param save_condition: only perform save if matching record in db
satisfies condition(s) (e.g., version number)
.. versionchanged:: 0.5
In existing documents it only saves changed fields using
set / unset. Saves are cascaded and any
@@ -217,6 +232,9 @@ class Document(BaseDocument):
meta['cascade'] = True. Also you can pass different kwargs to
the cascade save using cascade_kwargs which overwrites the
existing kwargs with custom values.
.. versionchanged:: 0.8.5
Optional save_condition that only overwrites existing documents
if the condition is satisfied in the current db record.
"""
signals.pre_save.send(self.__class__, document=self)
@@ -230,7 +248,8 @@ class Document(BaseDocument):
created = ('_id' not in doc or self._created or force_insert)
signals.pre_save_post_validation.send(self.__class__, document=self, created=created)
signals.pre_save_post_validation.send(self.__class__, document=self,
created=created)
try:
collection = self._get_collection()
@@ -243,7 +262,12 @@ class Document(BaseDocument):
object_id = doc['_id']
updates, removals = self._delta()
# Need to add shard key to query, or you get an error
select_dict = {'_id': object_id}
if save_condition is not None:
select_dict = transform.query(self.__class__,
**save_condition)
else:
select_dict = {}
select_dict['_id'] = object_id
shard_key = self.__class__._meta.get('shard_key', tuple())
for k in shard_key:
actual_key = self._db_field_map.get(k, k)
@@ -263,12 +287,14 @@ class Document(BaseDocument):
if removals:
update_query["$unset"] = removals
if updates or removals:
upsert = save_condition is None
last_error = collection.update(select_dict, update_query,
upsert=True, **write_concern)
upsert=upsert, **write_concern)
created = is_new_object(last_error)
if cascade is None:
cascade = self._meta.get('cascade', False) or cascade_kwargs is not None
cascade = self._meta.get(
'cascade', False) or cascade_kwargs is not None
if cascade:
kwargs = {
@@ -293,12 +319,12 @@ class Document(BaseDocument):
raise NotUniqueError(message % unicode(err))
raise OperationError(message % unicode(err))
id_field = self._meta['id_field']
if id_field not in self._meta.get('shard_key', []):
if created or id_field not in self._meta.get('shard_key', []):
self[id_field] = self._fields[id_field].to_python(object_id)
signals.post_save.send(self.__class__, document=self, created=created)
self._clear_changed_fields()
self._created = False
signals.post_save.send(self.__class__, document=self, created=created)
return self
def cascade_save(self, *args, **kwargs):
@@ -361,7 +387,8 @@ class Document(BaseDocument):
del(query["_cls"])
return self._qs.filter(**query).update_one(**kwargs)
else:
raise OperationError('attempt to update a document not yet saved')
raise OperationError(
'attempt to update a document not yet saved')
# Need to add shard key to query, or you get an error
return self._qs.filter(**self._object_key).update_one(**kwargs)
@@ -380,7 +407,8 @@ class Document(BaseDocument):
signals.pre_delete.send(self.__class__, document=self)
try:
self._qs.filter(**self._object_key).delete(write_concern=write_concern, _from_doc_delete=True)
self._qs.filter(
**self._object_key).delete(write_concern=write_concern, _from_doc_delete=True)
except pymongo.errors.OperationFailure, err:
message = u'Could not delete document (%s)' % err.message
raise OperationError(message)
@@ -447,27 +475,41 @@ class Document(BaseDocument):
DeReference()([self], max_depth + 1)
return self
def reload(self, max_depth=1):
def reload(self, *fields, **kwargs):
"""Reloads all attributes from the database.
:param fields: (optional) args list of fields to reload
:param max_depth: (optional) depth of dereferencing to follow
.. versionadded:: 0.1.2
.. versionchanged:: 0.6 Now chainable
.. versionchanged:: 0.9 Can provide specific fields to reload
"""
max_depth = 1
if fields and isinstance(fields[0], int):
max_depth = fields[0]
fields = fields[1:]
elif "max_depth" in kwargs:
max_depth = kwargs["max_depth"]
if not self.pk:
raise self.DoesNotExist("Document does not exist")
obj = self._qs.read_preference(ReadPreference.PRIMARY).filter(
**self._object_key).limit(1).select_related(max_depth=max_depth)
**self._object_key).only(*fields).limit(1
).select_related(max_depth=max_depth)
if obj:
obj = obj[0]
else:
raise self.DoesNotExist("Document does not exist")
for field in self._fields_ordered:
setattr(self, field, self._reload(field, obj[field]))
if not fields or field in fields:
setattr(self, field, self._reload(field, obj[field]))
self._changed_fields = obj._changed_fields
self._created = False
return obj
return self
def _reload(self, key, value):
"""Used by :meth:`~mongoengine.Document.reload` to ensure the
@@ -498,8 +540,8 @@ class Document(BaseDocument):
object.
"""
classes = [get_document(class_name)
for class_name in cls._subclasses
if class_name != cls.__name__] + [cls]
for class_name in cls._subclasses
if class_name != cls.__name__] + [cls]
documents = [get_document(class_name)
for class_name in document_cls._subclasses
if class_name != document_cls.__name__] + [document_cls]
@@ -521,7 +563,7 @@ class Document(BaseDocument):
@classmethod
def ensure_index(cls, key_or_list, drop_dups=False, background=False,
**kwargs):
**kwargs):
"""Ensure that the given indexes are in place.
:param key_or_list: a single index key or a list of index keys (to
@@ -576,7 +618,7 @@ class Document(BaseDocument):
# If _cls is being used (for polymorphism), it needs an index,
# only if another index doesn't begin with _cls
if (index_cls and not cls_indexed and
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
collection.ensure_index('_cls', background=background,
**index_opts)
@@ -591,24 +633,25 @@ class Document(BaseDocument):
# get all the base classes, subclasses and sieblings
classes = []
def get_classes(cls):
if (cls not in classes and
isinstance(cls, TopLevelDocumentMetaclass)):
isinstance(cls, TopLevelDocumentMetaclass)):
classes.append(cls)
for base_cls in cls.__bases__:
if (isinstance(base_cls, TopLevelDocumentMetaclass) and
base_cls != Document and
not base_cls._meta.get('abstract') and
base_cls._get_collection().full_name == cls._get_collection().full_name and
base_cls not in classes):
base_cls != Document and
not base_cls._meta.get('abstract') and
base_cls._get_collection().full_name == cls._get_collection().full_name and
base_cls not in classes):
classes.append(base_cls)
get_classes(base_cls)
for subclass in cls.__subclasses__():
if (isinstance(base_cls, TopLevelDocumentMetaclass) and
subclass._get_collection().full_name == cls._get_collection().full_name and
subclass not in classes):
subclass._get_collection().full_name == cls._get_collection().full_name and
subclass not in classes):
classes.append(subclass)
get_classes(subclass)
@@ -636,8 +679,8 @@ class Document(BaseDocument):
if [(u'_id', 1)] not in indexes:
indexes.append([(u'_id', 1)])
if (cls._meta.get('index_cls', True) and
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
indexes.append([(u'_cls', 1)])
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
indexes.append([(u'_cls', 1)])
return indexes
@@ -648,7 +691,8 @@ class Document(BaseDocument):
"""
required = cls.list_indexes()
existing = [info['key'] for info in cls._get_collection().index_information().values()]
existing = [info['key']
for info in cls._get_collection().index_information().values()]
missing = [index for index in required if index not in existing]
extra = [index for index in existing if index not in required]
@@ -666,6 +710,7 @@ class Document(BaseDocument):
class DynamicDocument(Document):
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
way as an ordinary document but has expando style properties. Any data
@@ -681,7 +726,7 @@ class DynamicDocument(Document):
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
my_metaclass = TopLevelDocumentMetaclass
my_metaclass = TopLevelDocumentMetaclass
__metaclass__ = TopLevelDocumentMetaclass
_dynamic = True
@@ -697,6 +742,7 @@ class DynamicDocument(Document):
class DynamicEmbeddedDocument(EmbeddedDocument):
"""A Dynamic Embedded Document class allowing flexible, expandable and
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
information about dynamic documents.
@@ -704,7 +750,7 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
my_metaclass = DocumentMetaclass
my_metaclass = DocumentMetaclass
__metaclass__ = DocumentMetaclass
_dynamic = True
@@ -723,6 +769,7 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
class MapReduceDocument(object):
"""A document returned from a map/reduce query.
:param collection: An instance of :class:`~pymongo.Collection`
@@ -753,7 +800,7 @@ class MapReduceDocument(object):
try:
self.key = id_field_type(self.key)
except:
raise Exception("Could not cast key as %s" % \
raise Exception("Could not cast key as %s" %
id_field_type.__name__)
if not hasattr(self, "_key_object"):

View File

@@ -34,22 +34,24 @@ except ImportError:
Image = None
ImageOps = None
__all__ = ['StringField', 'URLField', 'EmailField', 'IntField', 'LongField',
'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField',
'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField',
'GenericEmbeddedDocumentField', 'DynamicField', 'ListField',
'SortedListField', 'DictField', 'MapField', 'ReferenceField',
'GenericReferenceField', 'BinaryField', 'GridFSError',
'GridFSProxy', 'FileField', 'ImageGridFsProxy',
'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField',
'LineStringField', 'PolygonField', 'SequenceField', 'UUIDField',
'GeoJsonBaseField']
__all__ = [
'StringField', 'URLField', 'EmailField', 'IntField', 'LongField',
'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField',
'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField',
'GenericEmbeddedDocumentField', 'DynamicField', 'ListField',
'SortedListField', 'DictField', 'MapField', 'ReferenceField',
'CachedReferenceField', 'GenericReferenceField', 'BinaryField',
'GridFSError', 'GridFSProxy', 'FileField', 'ImageGridFsProxy',
'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField',
'LineStringField', 'PolygonField', 'SequenceField', 'UUIDField',
'GeoJsonBaseField']
RECURSIVE_REFERENCE_CONSTANT = 'self'
class StringField(BaseField):
"""A unicode string field.
"""
@@ -109,6 +111,7 @@ class StringField(BaseField):
class URLField(StringField):
"""A field that validates input as an URL.
.. versionadded:: 0.3
@@ -116,7 +119,8 @@ class URLField(StringField):
_URL_REGEX = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
# domain...
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|'
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
@@ -145,15 +149,19 @@ class URLField(StringField):
class EmailField(StringField):
"""A field that validates input as an E-Mail-Address.
.. versionadded:: 0.4
"""
EMAIL_REGEX = re.compile(
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,6}$', re.IGNORECASE # domain
# dot-atom
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*"
# quoted-string
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"'
# domain
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,6}$', re.IGNORECASE
)
def validate(self, value):
@@ -163,6 +171,7 @@ class EmailField(StringField):
class IntField(BaseField):
"""An 32-bit integer field.
"""
@@ -197,6 +206,7 @@ class IntField(BaseField):
class LongField(BaseField):
"""An 64-bit integer field.
"""
@@ -231,6 +241,7 @@ class LongField(BaseField):
class FloatField(BaseField):
"""An floating point number field.
"""
@@ -265,6 +276,7 @@ class FloatField(BaseField):
class DecimalField(BaseField):
"""A fixed-point decimal number field.
.. versionchanged:: 0.8
@@ -311,7 +323,7 @@ class DecimalField(BaseField):
return value
return value.quantize(self.precision, rounding=self.rounding)
def to_mongo(self, value):
def to_mongo(self, value, use_db_field=True):
if value is None:
return value
if self.force_string:
@@ -338,6 +350,7 @@ class DecimalField(BaseField):
class BooleanField(BaseField):
"""A boolean field type.
.. versionadded:: 0.1.2
@@ -356,6 +369,7 @@ class BooleanField(BaseField):
class DateTimeField(BaseField):
"""A datetime field.
Uses the python-dateutil library if available alternatively use time.strptime
@@ -391,7 +405,7 @@ class DateTimeField(BaseField):
if dateutil:
try:
return dateutil.parser.parse(value)
except ValueError:
except (TypeError, ValueError):
return None
# split usecs, because they are not recognized by strptime.
@@ -406,15 +420,15 @@ class DateTimeField(BaseField):
kwargs = {'microsecond': usecs}
try: # Seconds are optional, so try converting seconds first.
return datetime.datetime(*time.strptime(value,
'%Y-%m-%d %H:%M:%S')[:6], **kwargs)
'%Y-%m-%d %H:%M:%S')[:6], **kwargs)
except ValueError:
try: # Try without seconds.
return datetime.datetime(*time.strptime(value,
'%Y-%m-%d %H:%M')[:5], **kwargs)
'%Y-%m-%d %H:%M')[:5], **kwargs)
except ValueError: # Try without hour/minutes/seconds.
try:
return datetime.datetime(*time.strptime(value,
'%Y-%m-%d')[:3], **kwargs)
'%Y-%m-%d')[:3], **kwargs)
except ValueError:
return None
@@ -423,6 +437,7 @@ class DateTimeField(BaseField):
class ComplexDateTimeField(StringField):
"""
ComplexDateTimeField handles microseconds exactly instead of rounding
like DateTimeField does.
@@ -525,6 +540,7 @@ class ComplexDateTimeField(StringField):
class EmbeddedDocumentField(BaseField):
"""An embedded document field - with a declared document_type.
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
"""
@@ -551,10 +567,11 @@ class EmbeddedDocumentField(BaseField):
return self.document_type._from_son(value)
return value
def to_mongo(self, value):
def to_mongo(self, value, use_db_field=True, fields=[]):
if not isinstance(value, self.document_type):
return value
return self.document_type.to_mongo(value)
return self.document_type.to_mongo(value, use_db_field,
fields=fields)
def validate(self, value, clean=True):
"""Make sure that the document instance is an instance of the
@@ -574,6 +591,7 @@ class EmbeddedDocumentField(BaseField):
class GenericEmbeddedDocumentField(BaseField):
"""A generic embedded document field - allows any
:class:`~mongoengine.EmbeddedDocument` to be stored.
@@ -601,17 +619,18 @@ class GenericEmbeddedDocumentField(BaseField):
value.validate(clean=clean)
def to_mongo(self, document):
def to_mongo(self, document, use_db_field=True):
if document is None:
return None
data = document.to_mongo()
data = document.to_mongo(use_db_field)
if not '_cls' in data:
data['_cls'] = document._class_name
return data
class DynamicField(BaseField):
"""A truly dynamic field type capable of handling different and varying
types of data.
@@ -628,7 +647,7 @@ class DynamicField(BaseField):
cls = value.__class__
val = value.to_mongo()
# If we its a document thats not inherited add _cls
if (isinstance(value, Document)):
if (isinstance(value, Document)):
val = {"_ref": value.to_dbref(), "_cls": cls.__name__}
if (isinstance(value, EmbeddedDocument)):
val['_cls'] = cls.__name__
@@ -675,6 +694,7 @@ class DynamicField(BaseField):
class ListField(ComplexBaseField):
"""A list field that wraps a standard field, allowing multiple instances
of the field to be used as a list in the database.
@@ -693,21 +713,22 @@ class ListField(ComplexBaseField):
"""Make sure that a list of valid fields is being used.
"""
if (not isinstance(value, (list, tuple, QuerySet)) or
isinstance(value, basestring)):
isinstance(value, basestring)):
self.error('Only lists and tuples may be used in a list field')
super(ListField, self).validate(value)
def prepare_query_value(self, op, value):
if self.field:
if op in ('set', 'unset') and (not isinstance(value, basestring)
and not isinstance(value, BaseDocument)
and hasattr(value, '__iter__')):
and not isinstance(value, BaseDocument)
and hasattr(value, '__iter__')):
return [self.field.prepare_query_value(op, v) for v in value]
return self.field.prepare_query_value(op, value)
return super(ListField, self).prepare_query_value(op, value)
class SortedListField(ListField):
"""A ListField that sorts the contents of its list before writing to
the database in order to ensure that a sorted list is always
retrieved.
@@ -739,6 +760,7 @@ class SortedListField(ListField):
reverse=self._order_reverse)
return sorted(value, reverse=self._order_reverse)
def key_not_string(d):
""" Helper function to recursively determine if any key in a dictionary is
not a string.
@@ -747,6 +769,7 @@ def key_not_string(d):
if not isinstance(k, basestring) or (isinstance(v, dict) and key_not_string(v)):
return True
def key_has_dot_or_dollar(d):
""" Helper function to recursively determine if any key in a dictionary
contains a dot or a dollar sign.
@@ -755,12 +778,14 @@ def key_has_dot_or_dollar(d):
if ('.' in k or '$' in k) or (isinstance(v, dict) and key_has_dot_or_dollar(v)):
return True
class DictField(ComplexBaseField):
"""A dictionary field that wraps a standard Python dictionary. This is
similar to an embedded document, but the structure is not defined.
.. note::
Required means it cannot be empty - as the default for ListFields is []
Required means it cannot be empty - as the default for DictFields is {}
.. versionadded:: 0.3
.. versionchanged:: 0.5 - Can now handle complex / varying types of data
@@ -807,6 +832,7 @@ class DictField(ComplexBaseField):
class MapField(DictField):
"""A field that maps a name to a specified field type. Similar to
a DictField, except the 'value' of each item must match the specified
field type.
@@ -822,6 +848,7 @@ class MapField(DictField):
class ReferenceField(BaseField):
"""A reference to a document that will be automatically dereferenced on
access (lazily).
@@ -932,7 +959,7 @@ class ReferenceField(BaseField):
"""Convert a MongoDB-compatible type to a Python type.
"""
if (not self.dbref and
not isinstance(value, (DBRef, Document, EmbeddedDocument))):
not isinstance(value, (DBRef, Document, EmbeddedDocument))):
collection = self.document_type._get_collection_name()
value = DBRef(collection, self.document_type.id.to_python(value))
return value
@@ -955,7 +982,147 @@ class ReferenceField(BaseField):
return self.document_type._fields.get(member_name)
class CachedReferenceField(BaseField):
"""
A referencefield with cache fields to porpuse pseudo-joins
.. versionadded:: 0.9
"""
def __init__(self, document_type, fields=[], auto_sync=True, **kwargs):
"""Initialises the Cached Reference Field.
:param fields: A list of fields to be cached in document
:param auto_sync: if True documents are auto updated.
"""
if not isinstance(document_type, basestring) and \
not issubclass(document_type, (Document, basestring)):
self.error('Argument to CachedReferenceField constructor must be a'
' document class or a string')
self.auto_sync = auto_sync
self.document_type_obj = document_type
self.fields = fields
super(CachedReferenceField, self).__init__(**kwargs)
def start_listener(self):
from mongoengine import signals
signals.post_save.connect(self.on_document_pre_save,
sender=self.document_type)
def on_document_pre_save(self, sender, document, created, **kwargs):
if not created:
update_kwargs = dict(
('set__%s__%s' % (self.name, k), v)
for k, v in document._delta()[0].items()
if k in self.fields)
if update_kwargs:
filter_kwargs = {}
filter_kwargs[self.name] = document
self.owner_document.objects(
**filter_kwargs).update(**update_kwargs)
def to_python(self, value):
if isinstance(value, dict):
collection = self.document_type._get_collection_name()
value = DBRef(
collection, self.document_type.id.to_python(value['_id']))
return value
@property
def document_type(self):
if isinstance(self.document_type_obj, basestring):
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
self.document_type_obj = self.owner_document
else:
self.document_type_obj = get_document(self.document_type_obj)
return self.document_type_obj
def __get__(self, instance, owner):
if instance is None:
# Document class being used rather than a document object
return self
# Get value from document instance if available
value = instance._data.get(self.name)
self._auto_dereference = instance._fields[self.name]._auto_dereference
# Dereference DBRefs
if self._auto_dereference and isinstance(value, DBRef):
value = self.document_type._get_db().dereference(value)
if value is not None:
instance._data[self.name] = self.document_type._from_son(value)
return super(CachedReferenceField, self).__get__(instance, owner)
def to_mongo(self, document):
id_field_name = self.document_type._meta['id_field']
id_field = self.document_type._fields[id_field_name]
doc_tipe = self.document_type
if isinstance(document, Document):
# We need the id from the saved object to create the DBRef
id_ = document.pk
if id_ is None:
self.error('You can only reference documents once they have'
' been saved to the database')
else:
self.error('Only accept a document object')
value = SON((
("_id", id_field.to_mongo(id_)),
))
value.update(dict(document.to_mongo(fields=self.fields)))
return value
def prepare_query_value(self, op, value):
if value is None:
return None
if isinstance(value, Document):
if value.pk is None:
self.error('You can only reference documents once they have'
' been saved to the database')
return {'_id': value.pk}
raise NotImplementedError
def validate(self, value):
if not isinstance(value, (self.document_type)):
self.error("A CachedReferenceField only accepts documents")
if isinstance(value, Document) and value.id is None:
self.error('You can only reference documents once they have been '
'saved to the database')
def lookup_member(self, member_name):
return self.document_type._fields.get(member_name)
def sync_all(self):
"""
Sync all cached fields on demand.
Caution: this operation may be slower.
"""
update_key = 'set__%s' % self.name
for doc in self.document_type.objects:
filter_kwargs = {}
filter_kwargs[self.name] = doc
update_kwargs = {}
update_kwargs[update_key] = doc
self.owner_document.objects(
**filter_kwargs).update(**update_kwargs)
class GenericReferenceField(BaseField):
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
that will be automatically dereferenced on access (lazily).
@@ -974,6 +1141,7 @@ class GenericReferenceField(BaseField):
return self
value = instance._data.get(self.name)
self._auto_dereference = instance._fields[self.name]._auto_dereference
if self._auto_dereference and isinstance(value, (dict, SON)):
instance._data[self.name] = self.dereference(value)
@@ -1001,7 +1169,7 @@ class GenericReferenceField(BaseField):
doc = doc_cls._from_son(doc)
return doc
def to_mongo(self, document):
def to_mongo(self, document, use_db_field=True):
if document is None:
return None
@@ -1036,6 +1204,7 @@ class GenericReferenceField(BaseField):
class BinaryField(BaseField):
"""A binary data field.
"""
@@ -1056,7 +1225,7 @@ class BinaryField(BaseField):
if not isinstance(value, (bin_type, txt_type, Binary)):
self.error("BinaryField only accepts instances of "
"(%s, %s, Binary)" % (
bin_type.__name__, txt_type.__name__))
bin_type.__name__, txt_type.__name__))
if self.max_bytes is not None and len(value) > self.max_bytes:
self.error('Binary value is too long')
@@ -1067,6 +1236,7 @@ class GridFSError(Exception):
class GridFSProxy(object):
"""Proxy object to handle writing and reading of files to and from GridFS
.. versionadded:: 0.4
@@ -1121,7 +1291,8 @@ class GridFSProxy(object):
return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
def __str__(self):
name = getattr(self.get(), 'filename', self.grid_id) if self.get() else '(no file)'
name = getattr(
self.get(), 'filename', self.grid_id) if self.get() else '(no file)'
return '<%s: %s>' % (self.__class__.__name__, name)
def __eq__(self, other):
@@ -1135,7 +1306,8 @@ class GridFSProxy(object):
@property
def fs(self):
if not self._fs:
self._fs = gridfs.GridFS(get_db(self.db_alias), self.collection_name)
self._fs = gridfs.GridFS(
get_db(self.db_alias), self.collection_name)
return self._fs
def get(self, id=None):
@@ -1209,6 +1381,7 @@ class GridFSProxy(object):
class FileField(BaseField):
"""A GridFS storage field.
.. versionadded:: 0.4
@@ -1253,7 +1426,8 @@ class FileField(BaseField):
pass
# Create a new proxy object as we don't already have one
instance._data[key] = self.get_proxy_obj(key=key, instance=instance)
instance._data[key] = self.get_proxy_obj(
key=key, instance=instance)
instance._data[key].put(value)
else:
instance._data[key] = value
@@ -1291,11 +1465,13 @@ class FileField(BaseField):
class ImageGridFsProxy(GridFSProxy):
"""
Proxy for ImageField
versionadded: 0.6
"""
def put(self, file_obj, **kwargs):
"""
Insert a image in database
@@ -1312,6 +1488,16 @@ class ImageGridFsProxy(GridFSProxy):
except Exception, e:
raise ValidationError('Invalid image: %s' % e)
# Progressive JPEG
progressive = img.info.get('progressive') or False
if (kwargs.get('progressive') and
isinstance(kwargs.get('progressive'), bool) and
img_format == 'JPEG'):
progressive = True
else:
progressive = False
if (field.size and (img.size[0] > field.size['width'] or
img.size[1] > field.size['height'])):
size = field.size
@@ -1331,7 +1517,8 @@ class ImageGridFsProxy(GridFSProxy):
size = field.thumbnail_size
if size['force']:
thumbnail = ImageOps.fit(img, (size['width'], size['height']), Image.ANTIALIAS)
thumbnail = ImageOps.fit(
img, (size['width'], size['height']), Image.ANTIALIAS)
else:
thumbnail = img.copy()
thumbnail.thumbnail((size['width'],
@@ -1339,14 +1526,14 @@ class ImageGridFsProxy(GridFSProxy):
Image.ANTIALIAS)
if thumbnail:
thumb_id = self._put_thumbnail(thumbnail, img_format)
thumb_id = self._put_thumbnail(thumbnail, img_format, progressive)
else:
thumb_id = None
w, h = img.size
io = StringIO()
img.save(io, img_format)
img.save(io, img_format, progressive=progressive)
io.seek(0)
return super(ImageGridFsProxy, self).put(io,
@@ -1357,18 +1544,18 @@ class ImageGridFsProxy(GridFSProxy):
**kwargs)
def delete(self, *args, **kwargs):
#deletes thumbnail
# deletes thumbnail
out = self.get()
if out and out.thumbnail_id:
self.fs.delete(out.thumbnail_id)
return super(ImageGridFsProxy, self).delete(*args, **kwargs)
def _put_thumbnail(self, thumbnail, format, **kwargs):
def _put_thumbnail(self, thumbnail, format, progressive, **kwargs):
w, h = thumbnail.size
io = StringIO()
thumbnail.save(io, format)
thumbnail.save(io, format, progressive=progressive)
io.seek(0)
return self.fs.put(io, width=w,
@@ -1417,6 +1604,7 @@ class ImproperlyConfigured(Exception):
class ImageField(FileField):
"""
A Image File storage field.
@@ -1455,6 +1643,7 @@ class ImageField(FileField):
class SequenceField(BaseField):
"""Provides a sequental counter see:
http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers
@@ -1524,7 +1713,7 @@ class SequenceField(BaseField):
data = collection.find_one({"_id": sequence_id})
if data:
return self.value_decorator(data['next']+1)
return self.value_decorator(data['next'] + 1)
return self.value_decorator(1)
@@ -1554,6 +1743,14 @@ class SequenceField(BaseField):
return super(SequenceField, self).__set__(instance, value)
def prepare_query_value(self, op, value):
"""
This method is overriden in order to convert the query value into to required
type. We need to do this in order to be able to successfully compare query
values passed as string, the base implementation returns the value as is.
"""
return self.value_decorator(value)
def to_python(self, value):
if value is None:
value = self.generate()
@@ -1561,6 +1758,7 @@ class SequenceField(BaseField):
class UUIDField(BaseField):
"""A UUID field.
.. versionadded:: 0.6
@@ -1613,7 +1811,13 @@ class UUIDField(BaseField):
class GeoPointField(BaseField):
"""A list storing a latitude and longitude.
"""A list storing a longitude and latitude coordinate.
.. note:: this represents a generic point in a 2D plane and a legacy way of
representing a geo point. It admits 2d indexes but not "2dsphere" indexes
in MongoDB > 2.4 which are more natural for modeling geospatial points.
See :ref:`geospatial-indexes`
.. versionadded:: 0.4
"""
@@ -1628,14 +1832,17 @@ class GeoPointField(BaseField):
'of (x, y)')
if not len(value) == 2:
self.error("Value (%s) must be a two-dimensional point" % repr(value))
self.error("Value (%s) must be a two-dimensional point" %
repr(value))
elif (not isinstance(value[0], (float, int)) or
not isinstance(value[1], (float, int))):
self.error("Both values (%s) in point must be float or int" % repr(value))
self.error(
"Both values (%s) in point must be float or int" % repr(value))
class PointField(GeoJsonBaseField):
"""A geo json field storing a latitude and longitude.
"""A GeoJSON field storing a longitude and latitude coordinate.
The data is represented as:
@@ -1654,7 +1861,8 @@ class PointField(GeoJsonBaseField):
class LineStringField(GeoJsonBaseField):
"""A geo json field storing a line of latitude and longitude coordinates.
"""A GeoJSON field storing a line of longitude and latitude coordinates.
The data is represented as:
@@ -1672,7 +1880,8 @@ class LineStringField(GeoJsonBaseField):
class PolygonField(GeoJsonBaseField):
"""A geo json field storing a polygon of latitude and longitude coordinates.
"""A GeoJSON field storing a polygon of longitude and latitude coordinates.
The data is represented as:

View File

@@ -3,8 +3,6 @@
import sys
PY3 = sys.version_info[0] == 3
PY25 = sys.version_info[:2] == (2, 5)
UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264
if PY3:
import codecs
@@ -29,33 +27,3 @@ else:
txt_type = unicode
str_types = (bin_type, txt_type)
if PY25:
def product(*args, **kwds):
pools = map(tuple, args) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x + [y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
reduce = reduce
else:
from itertools import product
from functools import reduce
# For use with Python 2.5
# converts all keys from unicode to str for d and all nested dictionaries
def to_str_keys_recursive(d):
if isinstance(d, list):
for val in d:
if isinstance(val, (dict, list)):
to_str_keys_recursive(val)
elif isinstance(d, dict):
for key, val in d.items():
if isinstance(val, (dict, list)):
to_str_keys_recursive(val)
if isinstance(key, unicode):
d[str(key)] = d.pop(key)
else:
raise ValueError("non list/dict parameter not allowed")

View File

@@ -7,17 +7,20 @@ import pprint
import re
import warnings
from bson import SON
from bson.code import Code
from bson import json_util
import pymongo
import pymongo.errors
from pymongo.common import validate_read_preference
from mongoengine import signals
from mongoengine.connection import get_db
from mongoengine.context_managers import switch_db
from mongoengine.common import _import_class
from mongoengine.base.common import get_document
from mongoengine.errors import (OperationError, NotUniqueError,
InvalidQueryError, LookUpError)
from mongoengine.queryset import transform
from mongoengine.queryset.field_list import QueryFieldList
from mongoengine.queryset.visitor import Q, QNode
@@ -36,6 +39,7 @@ RE_TYPE = type(re.compile(''))
class BaseQuerySet(object):
"""A set of results returned from a query. Wraps a MongoDB cursor,
providing :class:`~mongoengine.Document` objects as the results.
"""
@@ -50,7 +54,7 @@ class BaseQuerySet(object):
self._initial_query = {}
self._where_clause = None
self._loaded_fields = QueryFieldList()
self._ordering = []
self._ordering = None
self._snapshot = False
self._timeout = True
self._class_check = True
@@ -61,6 +65,8 @@ class BaseQuerySet(object):
self._none = False
self._as_pymongo = False
self._as_pymongo_coerce = False
self._search_text = None
self._include_text_scores = False
# If inheritance is allowed, only return instances and instances of
# subclasses of the class being used
@@ -68,12 +74,14 @@ class BaseQuerySet(object):
if len(self._document._subclasses) == 1:
self._initial_query = {"_cls": self._document._subclasses[0]}
else:
self._initial_query = {"_cls": {"$in": self._document._subclasses}}
self._initial_query = {
"_cls": {"$in": self._document._subclasses}}
self._loaded_fields = QueryFieldList(always_include=['_cls'])
self._cursor_obj = None
self._limit = None
self._skip = None
self._hint = -1 # Using -1 as None is a valid value for hint
self.only_fields = []
def __call__(self, q_obj=None, class_check=True, slave_okay=False,
read_preference=None, **query):
@@ -144,16 +152,34 @@ class BaseQuerySet(object):
if queryset._scalar:
return queryset._get_scalar(
queryset._document._from_son(queryset._cursor[key],
_auto_dereference=self._auto_dereference))
_auto_dereference=self._auto_dereference,
only_fields=self.only_fields))
if queryset._as_pymongo:
return queryset._get_as_pymongo(queryset._cursor.next())
return queryset._get_as_pymongo(queryset._cursor[key])
return queryset._document._from_son(queryset._cursor[key],
_auto_dereference=self._auto_dereference)
_auto_dereference=self._auto_dereference, only_fields=self.only_fields)
raise AttributeError
def __iter__(self):
raise NotImplementedError
def _has_data(self):
""" Retrieves whether cursor has any data. """
queryset = self.order_by()
return False if queryset.first() is None else True
def __nonzero__(self):
""" Avoid to open all records in an if stmt in Py2. """
return self._has_data()
def __bool__(self):
""" Avoid to open all records in an if stmt in Py3. """
return self._has_data()
# Core functions
def all(self):
@@ -165,6 +191,36 @@ class BaseQuerySet(object):
"""
return self.__call__(*q_objs, **query)
def search_text(self, text, language=None, include_text_scores=False):
"""
Start a text search, using text indexes.
Require: MongoDB server version 2.6+.
:param language: The language that determines the list of stop words
for the search and the rules for the stemmer and tokenizer.
If not specified, the search uses the default language of the index.
For supported languages, see `Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
:param include_text_scores: If True, automaticaly add a text_score attribute to Document.
"""
queryset = self.clone()
if queryset._search_text:
raise OperationError(
"Is not possible to use search_text two times.")
query_kwargs = SON({'$search': text})
if language:
query_kwargs['$language'] = language
queryset._query_obj &= Q(__raw__={'$text': query_kwargs})
queryset._mongo_query = None
queryset._cursor_obj = None
queryset._search_text = text
queryset._include_text_scores = include_text_scores
return queryset
def get(self, *q_objs, **query):
"""Retrieve the the matching object raising
:class:`~mongoengine.queryset.MultipleObjectsReturned` or
@@ -175,7 +231,7 @@ class BaseQuerySet(object):
.. versionadded:: 0.3
"""
queryset = self.clone()
queryset = queryset.limit(2)
queryset = queryset.order_by().limit(2)
queryset = queryset.filter(*q_objs, **query)
try:
@@ -303,10 +359,10 @@ class BaseQuerySet(object):
try:
ids = self._collection.insert(raw, **write_concern)
except pymongo.errors.DuplicateKeyError, err:
message = 'Could not save document (%s)';
message = 'Could not save document (%s)'
raise NotUniqueError(message % unicode(err))
except pymongo.errors.OperationFailure, err:
message = 'Could not save document (%s)';
message = 'Could not save document (%s)'
if re.match('^E1100[01] duplicate key', unicode(err)):
# E11000 - duplicate key error index
# E11001 - duplicate key on update
@@ -349,6 +405,7 @@ class BaseQuerySet(object):
will force an fsync on the primary server.
:param _from_doc_delete: True when called from document delete therefore
signals will have been triggered so don't loop.
:returns number of deleted documents
"""
queryset = self.clone()
doc = queryset._document
@@ -366,9 +423,11 @@ class BaseQuerySet(object):
has_delete_signal) and not _from_doc_delete
if call_document_delete:
cnt = 0
for doc in queryset:
doc.delete(write_concern=write_concern)
return
cnt += 1
return cnt
delete_rules = doc._meta.get('delete_rules') or {}
# Check for DENY rules before actually deleting/nullifying any other
@@ -389,7 +448,7 @@ class BaseQuerySet(object):
ref_q = document_cls.objects(**{field_name + '__in': self})
ref_q_count = ref_q.count()
if (doc != document_cls and ref_q_count > 0
or (doc == document_cls and ref_q_count > 0)):
or (doc == document_cls and ref_q_count > 0)):
ref_q.delete(write_concern=write_concern)
elif rule == NULLIFY:
document_cls.objects(**{field_name + '__in': self}).update(
@@ -399,7 +458,8 @@ class BaseQuerySet(object):
write_concern=write_concern,
**{'pull_all__%s' % field_name: self})
queryset._collection.remove(queryset._query, write_concern=write_concern)
result = queryset._collection.remove(queryset._query, write_concern=write_concern)
return result["n"]
def update(self, upsert=False, multi=True, write_concern=None,
full_result=False, **update):
@@ -443,6 +503,8 @@ class BaseQuerySet(object):
return result
elif result:
return result['n']
except pymongo.errors.DuplicateKeyError, err:
raise NotUniqueError(u'Update failed (%s)' % unicode(err))
except pymongo.errors.OperationFailure, err:
if unicode(err) == u'multi not coded yet':
message = u'update() method requires MongoDB 1.1.3+'
@@ -466,6 +528,60 @@ class BaseQuerySet(object):
return self.update(
upsert=upsert, multi=False, write_concern=write_concern, **update)
def modify(self, upsert=False, full_response=False, remove=False, new=False, **update):
"""Update and return the updated document.
Returns either the document before or after modification based on `new`
parameter. If no documents match the query and `upsert` is false,
returns ``None``. If upserting and `new` is false, returns ``None``.
If the full_response parameter is ``True``, the return value will be
the entire response object from the server, including the 'ok' and
'lastErrorObject' fields, rather than just the modified document.
This is useful mainly because the 'lastErrorObject' document holds
information about the command's execution.
:param upsert: insert if document doesn't exist (default ``False``)
:param full_response: return the entire response object from the
server (default ``False``)
:param remove: remove rather than updating (default ``False``)
:param new: return updated rather than original document
(default ``False``)
:param update: Django-style update keyword arguments
.. versionadded:: 0.9
"""
if remove and new:
raise OperationError("Conflicting parameters: remove and new")
if not update and not upsert and not remove:
raise OperationError(
"No update parameters, must either update or remove")
queryset = self.clone()
query = queryset._query
update = transform.update(queryset._document, **update)
sort = queryset._ordering
try:
result = queryset._collection.find_and_modify(
query, update, upsert=upsert, sort=sort, remove=remove, new=new,
full_response=full_response, **self._cursor_args)
except pymongo.errors.DuplicateKeyError, err:
raise NotUniqueError(u"Update failed (%s)" % err)
except pymongo.errors.OperationFailure, err:
raise OperationError(u"Update failed (%s)" % err)
if full_response:
if result["value"] is not None:
result["value"] = self._document._from_son(result["value"], only_fields=self.only_fields)
else:
if result is not None:
result = self._document._from_son(result, only_fields=self.only_fields)
return result
def with_id(self, object_id):
"""Retrieve the object matching the id provided. Uses `object_id` only
and raises InvalidQueryError if a filter has been applied. Returns
@@ -497,13 +613,13 @@ class BaseQuerySet(object):
if self._scalar:
for doc in docs:
doc_map[doc['_id']] = self._get_scalar(
self._document._from_son(doc))
self._document._from_son(doc, only_fields=self.only_fields))
elif self._as_pymongo:
for doc in docs:
doc_map[doc['_id']] = self._get_as_pymongo(doc)
else:
for doc in docs:
doc_map[doc['_id']] = self._document._from_son(doc)
doc_map[doc['_id']] = self._document._from_son(doc, only_fields=self.only_fields)
return doc_map
@@ -522,6 +638,19 @@ class BaseQuerySet(object):
return self
def using(self, alias):
"""This method is for controlling which database the QuerySet will be evaluated against if you are using more than one database.
:param alias: The database alias
.. versionadded:: 0.9
"""
with switch_db(self._document, alias) as cls:
collection = cls._get_collection()
return self.clone_into(self.__class__(self._document, collection))
def clone(self):
"""Creates a copy of the current
:class:`~mongoengine.queryset.QuerySet`
@@ -535,13 +664,15 @@ class BaseQuerySet(object):
:class:`~mongoengine.queryset.base.BaseQuerySet` into another child class
"""
if not isinstance(cls, BaseQuerySet):
raise OperationError('%s is not a subclass of BaseQuerySet' % cls.__name__)
raise OperationError(
'%s is not a subclass of BaseQuerySet' % cls.__name__)
copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj',
'_where_clause', '_loaded_fields', '_ordering', '_snapshot',
'_timeout', '_class_check', '_slave_okay', '_read_preference',
'_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce',
'_limit', '_skip', '_hint', '_auto_dereference')
'_limit', '_skip', '_hint', '_auto_dereference',
'_search_text', '_include_text_scores', 'only_fields')
for prop in copy_props:
val = getattr(self, prop)
@@ -627,10 +758,16 @@ class BaseQuerySet(object):
distinct = self._dereference(queryset._cursor.distinct(field), 1,
name=field, instance=self._document)
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
doc_field = getattr(self._document._fields.get(field), "field", None)
# We may need to cast to the correct type eg.
# ListField(EmbeddedDocumentField)
doc_field = getattr(
self._document._fields.get(field), "field", None)
instance = getattr(doc_field, "document_type", False)
if instance:
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
GenericEmbeddedDocumentField = _import_class(
'GenericEmbeddedDocumentField')
if instance and isinstance(doc_field, (EmbeddedDocumentField,
GenericEmbeddedDocumentField)):
distinct = [instance(**doc) for doc in distinct]
return distinct
@@ -653,6 +790,7 @@ class BaseQuerySet(object):
.. versionchanged:: 0.5 - Added subfield support
"""
fields = dict([(f, QueryFieldList.ONLY) for f in fields])
self.only_fields = fields.keys()
return self.fields(True, **fields)
def exclude(self, *fields):
@@ -709,7 +847,8 @@ class BaseQuerySet(object):
for value, group in itertools.groupby(fields, lambda x: x[1]):
fields = [field for field, value in group]
fields = queryset._fields_to_dbfields(fields)
queryset._loaded_fields += QueryFieldList(fields, value=value, _only_called=_only_called)
queryset._loaded_fields += QueryFieldList(
fields, value=value, _only_called=_only_called)
return queryset
@@ -839,10 +978,35 @@ class BaseQuerySet(object):
def from_json(self, json_data):
"""Converts json data to unsaved objects"""
son_data = json_util.loads(json_data)
return [self._document._from_son(data) for data in son_data]
return [self._document._from_son(data, only_fields=self.only_fields) for data in son_data]
def aggregate(self, *pipeline, **kwargs):
"""
Perform a aggreggate function based in your queryset params
:param pipeline: list of agreggation commands,
see: http://docs.mongodb.org/manual/core/aggregation-pipeline/
.. versionadded:: 0.9
"""
initial_pipeline = []
if self._query:
initial_pipeline.append({'$match': self._query})
if self._ordering:
initial_pipeline.append({'$sort': dict(self._ordering)})
if self._limit is not None:
initial_pipeline.append({'$limit': self._limit})
if self._skip is not None:
initial_pipeline.append({'$skip': self._skip})
pipeline = initial_pipeline + list(pipeline)
return self._collection.aggregate(pipeline, cursor={}, **kwargs)
# JS functionality
def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None,
scope=None):
"""Perform a map/reduce query using the current query spec
@@ -923,10 +1087,38 @@ class BaseQuerySet(object):
map_reduce_function = 'inline_map_reduce'
else:
map_reduce_function = 'map_reduce'
mr_args['out'] = output
if isinstance(output, basestring):
mr_args['out'] = output
elif isinstance(output, dict):
ordered_output = []
for part in ('replace', 'merge', 'reduce'):
value = output.get(part)
if value:
ordered_output.append((part, value))
break
else:
raise OperationError("actionData not specified for output")
db_alias = output.get('db_alias')
remaing_args = ['db', 'sharded', 'nonAtomic']
if db_alias:
ordered_output.append(('db', get_db(db_alias).name))
del remaing_args[0]
for part in remaing_args:
value = output.get(part)
if value:
ordered_output.append((part, value))
mr_args['out'] = SON(ordered_output)
results = getattr(queryset._collection, map_reduce_function)(
map_f, reduce_f, **mr_args)
map_f, reduce_f, **mr_args)
if map_reduce_function == 'map_reduce':
results = results.find()
@@ -1138,7 +1330,7 @@ class BaseQuerySet(object):
if self._as_pymongo:
return self._get_as_pymongo(raw_doc)
doc = self._document._from_son(raw_doc,
_auto_dereference=self._auto_dereference)
_auto_dereference=self._auto_dereference, only_fields=self.only_fields)
if self._scalar:
return self._get_scalar(doc)
@@ -1173,6 +1365,13 @@ class BaseQuerySet(object):
cursor_args['slave_okay'] = self._slave_okay
if self._loaded_fields:
cursor_args['fields'] = self._loaded_fields.as_dict()
if self._include_text_scores:
if 'fields' not in cursor_args:
cursor_args['fields'] = {}
cursor_args['fields']['text_score'] = {'$meta': "textScore"}
return cursor_args
@property
@@ -1189,8 +1388,9 @@ class BaseQuerySet(object):
if self._ordering:
# Apply query ordering
self._cursor_obj.sort(self._ordering)
elif self._document._meta['ordering']:
# Otherwise, apply the ordering from the document model
elif self._ordering is None and self._document._meta['ordering']:
# Otherwise, apply the ordering from the document model, unless
# it's been explicitly cleared via order_by with no arguments
order = self._get_order_by(self._document._meta['ordering'])
self._cursor_obj.sort(order)
@@ -1362,7 +1562,7 @@ class BaseQuerySet(object):
for subdoc in subclasses:
try:
subfield = ".".join(f.db_field for f in
subdoc._lookup_field(field.split('.')))
subdoc._lookup_field(field.split('.')))
ret.append(subfield)
found = True
break
@@ -1380,6 +1580,13 @@ class BaseQuerySet(object):
for key in keys:
if not key:
continue
if key == '$text_score':
# automatically set to include text scores
self._include_text_scores = True
key_list.append(('text_score', {'$meta': "textScore"}))
continue
direction = pymongo.ASCENDING
if key[0] == '-':
direction = pymongo.DESCENDING
@@ -1392,7 +1599,7 @@ class BaseQuerySet(object):
pass
key_list.append((key, direction))
if self._cursor_obj:
if self._cursor_obj and key_list:
self._cursor_obj.sort(key_list)
return key_list
@@ -1450,6 +1657,7 @@ class BaseQuerySet(object):
# type of this field and use the corresponding
# .to_python(...)
from mongoengine.fields import EmbeddedDocumentField
obj = self._document
for chunk in path.split('.'):
obj = getattr(obj, chunk, None)
@@ -1460,6 +1668,7 @@ class BaseQuerySet(object):
if obj and data is not None:
data = obj.to_python(data)
return data
return clean(row)
def _sub_js_fields(self, code):
@@ -1468,6 +1677,7 @@ class BaseQuerySet(object):
substituted for the MongoDB name of the field (specified using the
:attr:`name` keyword argument in a field's constructor).
"""
def field_sub(match):
# Extract just the field name, and look up the field objects
field_name = match.group(1).split('.')

View File

@@ -155,3 +155,10 @@ class QuerySetNoCache(BaseQuerySet):
queryset = self.clone()
queryset.rewind()
return queryset
class QuerySetNoDeRef(QuerySet):
"""Special no_dereference QuerySet"""
def __dereference(items, max_depth=1, instance=None, name=None):
return items

View File

@@ -3,6 +3,7 @@ from collections import defaultdict
import pymongo
from bson import SON
from mongoengine.connection import get_connection
from mongoengine.common import _import_class
from mongoengine.errors import InvalidQueryError, LookUpError
@@ -10,22 +11,22 @@ __all__ = ('query', 'update')
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
'all', 'size', 'exists', 'not')
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
'within_box', 'within_polygon', 'near', 'near_sphere',
'max_distance', 'geo_within', 'geo_within_box',
'geo_within_polygon', 'geo_within_center',
'geo_within_sphere', 'geo_intersects')
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith',
'exact', 'iexact')
CUSTOM_OPERATORS = ('match',)
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
STRING_OPERATORS + CUSTOM_OPERATORS)
'all', 'size', 'exists', 'not', 'elemMatch')
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
'within_box', 'within_polygon', 'near', 'near_sphere',
'max_distance', 'geo_within', 'geo_within_box',
'geo_within_polygon', 'geo_within_center',
'geo_within_sphere', 'geo_intersects')
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith',
'exact', 'iexact')
CUSTOM_OPERATORS = ('match',)
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
STRING_OPERATORS + CUSTOM_OPERATORS)
UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
'push_all', 'pull', 'pull_all', 'add_to_set',
'set_on_insert')
UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
'push_all', 'pull', 'pull_all', 'add_to_set',
'set_on_insert')
def query(_doc_cls=None, _field_operation=False, **query):
@@ -38,7 +39,7 @@ def query(_doc_cls=None, _field_operation=False, **query):
mongo_query.update(value)
continue
parts = key.split('__')
parts = key.rsplit('__')
indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
parts = [part for part in parts if not part.isdigit()]
# Check for an operator and transform to mongo-style if there is
@@ -59,14 +60,20 @@ def query(_doc_cls=None, _field_operation=False, **query):
raise InvalidQueryError(e)
parts = []
CachedReferenceField = _import_class('CachedReferenceField')
cleaned_fields = []
for field in fields:
append_field = True
if isinstance(field, basestring):
parts.append(field)
append_field = False
# is last and CachedReferenceField
elif isinstance(field, CachedReferenceField) and fields[-1] == field:
parts.append('%s._id' % field.db_field)
else:
parts.append(field.db_field)
if append_field:
cleaned_fields.append(field)
@@ -78,13 +85,17 @@ def query(_doc_cls=None, _field_operation=False, **query):
if op in singular_ops:
if isinstance(field, basestring):
if (op in STRING_OPERATORS and
isinstance(value, basestring)):
isinstance(value, basestring)):
StringField = _import_class('StringField')
value = StringField.prepare_query_value(op, value)
else:
value = field
else:
value = field.prepare_query_value(op, value)
if isinstance(field, CachedReferenceField) and value:
value = value['_id']
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
# 'in', 'nin' and 'all' require a list of values
value = [field.prepare_query_value(op, v) for v in value]
@@ -94,7 +105,7 @@ def query(_doc_cls=None, _field_operation=False, **query):
if op in GEO_OPERATORS:
value = _geo_operator(field, op, value)
elif op in CUSTOM_OPERATORS:
if op == 'match':
if op in ('elem_match', 'match'):
value = field.prepare_query_value(op, value)
value = {"$elemMatch": value}
else:
@@ -115,14 +126,28 @@ def query(_doc_cls=None, _field_operation=False, **query):
if key in mongo_query and isinstance(mongo_query[key], dict):
mongo_query[key].update(value)
# $maxDistance needs to come last - convert to SON
if '$maxDistance' in mongo_query[key]:
value_dict = mongo_query[key]
value_dict = mongo_query[key]
if ('$maxDistance' in value_dict and '$near' in value_dict):
value_son = SON()
for k, v in value_dict.iteritems():
if k == '$maxDistance':
continue
value_son[k] = v
value_son['$maxDistance'] = value_dict['$maxDistance']
if isinstance(value_dict['$near'], dict):
for k, v in value_dict.iteritems():
if k == '$maxDistance':
continue
value_son[k] = v
if (get_connection().max_wire_version <= 1):
value_son['$maxDistance'] = value_dict[
'$maxDistance']
else:
value_son['$near'] = SON(value_son['$near'])
value_son['$near'][
'$maxDistance'] = value_dict['$maxDistance']
else:
for k, v in value_dict.iteritems():
if k == '$maxDistance':
continue
value_son[k] = v
value_son['$maxDistance'] = value_dict['$maxDistance']
mongo_query[key] = value_son
else:
# Store for manually merging later
@@ -151,6 +176,9 @@ def update(_doc_cls=None, **update):
mongo_update.update(value)
continue
parts = key.split('__')
# if there is no operator, default to "set"
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
parts.insert(0, 'set')
# Check for an operator and transform to mongo-style if there is
op = None
if parts[0] in UPDATE_OPERATORS:
@@ -248,7 +276,8 @@ def update(_doc_cls=None, **update):
if ListField in field_classes:
# Join all fields via dot notation to the last ListField
# Then process as normal
last_listField = len(cleaned_fields) - field_classes.index(ListField)
last_listField = len(
cleaned_fields) - field_classes.index(ListField)
key = ".".join(parts[:last_listField])
parts = parts[last_listField:]
parts.insert(0, key)

View File

@@ -1,8 +1,9 @@
import copy
from mongoengine.errors import InvalidQueryError
from mongoengine.python_support import product, reduce
from itertools import product
from functools import reduce
from mongoengine.errors import InvalidQueryError
from mongoengine.queryset import transform
__all__ = ('Q',)

View File

@@ -1 +1 @@
pymongo
pymongo>=2.7.1

View File

@@ -38,12 +38,14 @@ CLASSIFIERS = [
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.6.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
@@ -51,12 +53,15 @@ CLASSIFIERS = [
extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])}
if sys.version_info[0] == 3:
extra_opts['use_2to3'] = True
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'django>=1.5.1']
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'Pillow>=2.0.0', 'django>=1.5.1']
if "test" in sys.argv or "nosetests" in sys.argv:
extra_opts['packages'] = find_packages()
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
else:
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2>=2.6', 'python-dateutil']
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'Pillow>=2.0.0', 'jinja2>=2.6', 'python-dateutil']
if sys.version_info[0] == 2 and sys.version_info[1] == 6:
extra_opts['tests_require'].append('unittest2')
setup(name='mongoengine',
version=VERSION,
@@ -72,7 +77,7 @@ setup(name='mongoengine',
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=['pymongo>=2.5'],
install_requires=['pymongo>=2.7.1'],
test_suite='nose.collector',
**extra_opts
)

View File

@@ -207,22 +207,21 @@ class DeltaTest(unittest.TestCase):
doc.embedded_field.list_field[2].string_field = 'hello world'
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
self.assertEqual(doc._get_changed_fields(),
['embedded_field.list_field'])
self.assertEqual(doc.embedded_field._delta(), ({
'list_field': ['1', 2, {
'_cls': 'Embedded',
'string_field': 'hello world',
'int_field': 1,
'list_field': ['1', 2, {'hello': 'world'}],
'dict_field': {'hello': 'world'}}]}, {}))
self.assertEqual(doc._delta(), ({
'embedded_field.list_field': ['1', 2, {
['embedded_field.list_field.2'])
self.assertEqual(doc.embedded_field._delta(), ({'list_field.2': {
'_cls': 'Embedded',
'string_field': 'hello world',
'int_field': 1,
'list_field': ['1', 2, {'hello': 'world'}],
'dict_field': {'hello': 'world'}}
]}, {}))
}, {}))
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2': {
'_cls': 'Embedded',
'string_field': 'hello world',
'int_field': 1,
'list_field': ['1', 2, {'hello': 'world'}],
'dict_field': {'hello': 'world'}}
}, {}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field[2].string_field,
@@ -253,7 +252,7 @@ class DeltaTest(unittest.TestCase):
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
self.assertEqual(doc._delta(),
({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
doc.save()
doc = doc.reload(10)
@@ -548,22 +547,21 @@ class DeltaTest(unittest.TestCase):
doc.embedded_field.list_field[2].string_field = 'hello world'
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
self.assertEqual(doc._get_changed_fields(),
['db_embedded_field.db_list_field'])
self.assertEqual(doc.embedded_field._delta(), ({
'db_list_field': ['1', 2, {
['db_embedded_field.db_list_field.2'])
self.assertEqual(doc.embedded_field._delta(), ({'db_list_field.2': {
'_cls': 'Embedded',
'db_string_field': 'hello world',
'db_int_field': 1,
'db_list_field': ['1', 2, {'hello': 'world'}],
'db_dict_field': {'hello': 'world'}}]}, {}))
'db_dict_field': {'hello': 'world'}}}, {}))
self.assertEqual(doc._delta(), ({
'db_embedded_field.db_list_field': ['1', 2, {
'db_embedded_field.db_list_field.2': {
'_cls': 'Embedded',
'db_string_field': 'hello world',
'db_int_field': 1,
'db_list_field': ['1', 2, {'hello': 'world'}],
'db_dict_field': {'hello': 'world'}}
]}, {}))
}, {}))
doc.save()
doc = doc.reload(10)
self.assertEqual(doc.embedded_field.list_field[2].string_field,
@@ -594,8 +592,7 @@ class DeltaTest(unittest.TestCase):
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
self.assertEqual(doc._delta(),
({'db_embedded_field.db_list_field.2.db_list_field':
[1, 2, {}]}, {}))
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
doc.save()
doc = doc.reload(10)
@@ -735,5 +732,47 @@ class DeltaTest(unittest.TestCase):
mydoc._clear_changed_fields()
self.assertEqual([], mydoc._get_changed_fields())
def test_referenced_object_changed_attributes(self):
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
class Organization(Document):
name = StringField()
class User(Document):
name = StringField()
org = ReferenceField('Organization', required=True)
Organization.drop_collection()
User.drop_collection()
org1 = Organization(name='Org 1')
org1.save()
org2 = Organization(name='Org 2')
org2.save()
user = User(name='Fred', org=org1)
user.save()
org1.reload()
org2.reload()
user.reload()
self.assertEqual(org1.name, 'Org 1')
self.assertEqual(org2.name, 'Org 2')
self.assertEqual(user.name, 'Fred')
user.name = 'Harold'
user.org = org2
org2.name = 'New Org 2'
self.assertEqual(org2.name, 'New Org 2')
user.save()
org2.save()
self.assertEqual(org2.name, 'New Org 2')
org2.reload()
self.assertEqual(org2.name, 'New Org 2')
if __name__ == '__main__':
unittest.main()

View File

@@ -292,6 +292,59 @@ class DynamicTest(unittest.TestCase):
person.save()
self.assertEqual(Person.objects.first().age, 35)
def test_dynamic_embedded_works_with_only(self):
"""Ensure custom fieldnames on a dynamic embedded document are found by qs.only()"""
class Address(DynamicEmbeddedDocument):
city = StringField()
class Person(DynamicDocument):
address = EmbeddedDocumentField(Address)
Person.drop_collection()
Person(name="Eric", address=Address(city="San Francisco", street_number="1337")).save()
self.assertEqual(Person.objects.first().address.street_number, '1337')
self.assertEqual(Person.objects.only('address__street_number').first().address.street_number, '1337')
def test_dynamic_and_embedded_dict_access(self):
"""Ensure embedded dynamic documents work with dict[] style access"""
class Address(EmbeddedDocument):
city = StringField()
class Person(DynamicDocument):
name = StringField()
Person.drop_collection()
Person(name="Ross", address=Address(city="London")).save()
person = Person.objects.first()
person.attrval = "This works"
person["phone"] = "555-1212" # but this should too
# Same thing two levels deep
person["address"]["city"] = "Lundenne"
person.save()
self.assertEqual(Person.objects.first().address.city, "Lundenne")
self.assertEqual(Person.objects.first().phone, "555-1212")
person = Person.objects.first()
person.address = Address(city="Londinium")
person.save()
self.assertEqual(Person.objects.first().address.city, "Londinium")
person = Person.objects.first()
person["age"] = 35
person.save()
self.assertEqual(Person.objects.first().age, 35)
if __name__ == '__main__':
unittest.main()

View File

@@ -727,5 +727,32 @@ class IndexesTest(unittest.TestCase):
report.to_mongo())
self.assertEqual(report, Report.objects.get(pk=my_key))
def test_string_indexes(self):
class MyDoc(Document):
provider_ids = DictField()
meta = {
"indexes": ["provider_ids.foo", "provider_ids.bar"],
}
info = MyDoc.objects._collection.index_information()
info = [value['key'] for key, value in info.iteritems()]
self.assertTrue([('provider_ids.foo', 1)] in info)
self.assertTrue([('provider_ids.bar', 1)] in info)
def test_text_indexes(self):
class Book(Document):
title = DictField()
meta = {
"indexes": ["$title"],
}
indexes = Book.objects._collection.index_information()
self.assertTrue("title_text" in indexes)
key = indexes["title_text"]["key"]
self.assertTrue(('_fts', 'text') in key)
if __name__ == '__main__':
unittest.main()

View File

@@ -15,7 +15,7 @@ from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest,
from mongoengine import *
from mongoengine.errors import (NotRegistered, InvalidDocumentError,
InvalidQueryError)
InvalidQueryError, NotUniqueError)
from mongoengine.queryset import NULLIFY, Q
from mongoengine.connection import get_db
from mongoengine.base import get_document
@@ -57,7 +57,7 @@ class InstanceTest(unittest.TestCase):
date = DateTimeField(default=datetime.now)
meta = {
'max_documents': 10,
'max_size': 90000,
'max_size': 4096,
}
Log.drop_collection()
@@ -75,7 +75,7 @@ class InstanceTest(unittest.TestCase):
options = Log.objects._collection.options()
self.assertEqual(options['capped'], True)
self.assertEqual(options['max'], 10)
self.assertEqual(options['size'], 90000)
self.assertTrue(options['size'] >= 4096)
# Check that the document cannot be redefined with different options
def recreate_log_document():
@@ -353,6 +353,14 @@ class InstanceTest(unittest.TestCase):
self.assertEqual(person.name, "Test User")
self.assertEqual(person.age, 20)
person.reload('age')
self.assertEqual(person.name, "Test User")
self.assertEqual(person.age, 21)
person.reload()
self.assertEqual(person.name, "Mr Test User")
self.assertEqual(person.age, 21)
person.reload()
self.assertEqual(person.name, "Mr Test User")
self.assertEqual(person.age, 21)
@@ -398,10 +406,11 @@ class InstanceTest(unittest.TestCase):
doc.embedded_field.dict_field['woot'] = "woot"
self.assertEqual(doc._get_changed_fields(), [
'list_field', 'dict_field', 'embedded_field.list_field',
'embedded_field.dict_field'])
'list_field', 'dict_field.woot', 'embedded_field.list_field',
'embedded_field.dict_field.woot'])
doc.save()
self.assertEqual(len(doc.list_field), 4)
doc = doc.reload(10)
self.assertEqual(doc._get_changed_fields(), [])
self.assertEqual(len(doc.list_field), 4)
@@ -409,6 +418,16 @@ class InstanceTest(unittest.TestCase):
self.assertEqual(len(doc.embedded_field.list_field), 4)
self.assertEqual(len(doc.embedded_field.dict_field), 2)
doc.list_field.append(1)
doc.save()
doc.dict_field['extra'] = 1
doc = doc.reload(10, 'list_field')
self.assertEqual(doc._get_changed_fields(), [])
self.assertEqual(len(doc.list_field), 5)
self.assertEqual(len(doc.dict_field), 3)
self.assertEqual(len(doc.embedded_field.list_field), 4)
self.assertEqual(len(doc.embedded_field.dict_field), 2)
def test_reload_doesnt_exist(self):
class Foo(Document):
pass
@@ -515,9 +534,6 @@ class InstanceTest(unittest.TestCase):
class Email(EmbeddedDocument):
email = EmailField()
def clean(self):
print "instance:"
print self._instance
class Account(Document):
email = EmbeddedDocumentField(Email)
@@ -820,6 +836,80 @@ class InstanceTest(unittest.TestCase):
p1.reload()
self.assertEqual(p1.name, p.parent.name)
def test_save_atomicity_condition(self):
class Widget(Document):
toggle = BooleanField(default=False)
count = IntField(default=0)
save_id = UUIDField()
def flip(widget):
widget.toggle = not widget.toggle
widget.count += 1
def UUID(i):
return uuid.UUID(int=i)
Widget.drop_collection()
w1 = Widget(toggle=False, save_id=UUID(1))
# ignore save_condition on new record creation
w1.save(save_condition={'save_id':UUID(42)})
w1.reload()
self.assertFalse(w1.toggle)
self.assertEqual(w1.save_id, UUID(1))
self.assertEqual(w1.count, 0)
# mismatch in save_condition prevents save
flip(w1)
self.assertTrue(w1.toggle)
self.assertEqual(w1.count, 1)
w1.save(save_condition={'save_id':UUID(42)})
w1.reload()
self.assertFalse(w1.toggle)
self.assertEqual(w1.count, 0)
# matched save_condition allows save
flip(w1)
self.assertTrue(w1.toggle)
self.assertEqual(w1.count, 1)
w1.save(save_condition={'save_id':UUID(1)})
w1.reload()
self.assertTrue(w1.toggle)
self.assertEqual(w1.count, 1)
# save_condition can be used to ensure atomic read & updates
# i.e., prevent interleaved reads and writes from separate contexts
w2 = Widget.objects.get()
self.assertEqual(w1, w2)
old_id = w1.save_id
flip(w1)
w1.save_id = UUID(2)
w1.save(save_condition={'save_id':old_id})
w1.reload()
self.assertFalse(w1.toggle)
self.assertEqual(w1.count, 2)
flip(w2)
flip(w2)
w2.save(save_condition={'save_id':old_id})
w2.reload()
self.assertFalse(w2.toggle)
self.assertEqual(w2.count, 2)
# save_condition uses mongoengine-style operator syntax
flip(w1)
w1.save(save_condition={'count__lt':w1.count})
w1.reload()
self.assertTrue(w1.toggle)
self.assertEqual(w1.count, 3)
flip(w1)
w1.save(save_condition={'count__gte':w1.count})
w1.reload()
self.assertTrue(w1.toggle)
self.assertEqual(w1.count, 3)
def test_update(self):
"""Ensure that an existing document is updated instead of be
overwritten."""
@@ -984,11 +1074,23 @@ class InstanceTest(unittest.TestCase):
self.assertRaises(OperationError, update_no_value_raises)
def update_no_op_raises():
def update_no_op_should_default_to_set():
person = self.Person.objects.first()
person.update(name="Dan")
person.reload()
return person.name
self.assertRaises(InvalidQueryError, update_no_op_raises)
self.assertEqual("Dan", update_no_op_should_default_to_set())
def test_update_unique_field(self):
class Doc(Document):
name = StringField(unique=True)
doc1 = Doc(name="first").save()
doc2 = Doc(name="second").save()
self.assertRaises(NotUniqueError, lambda:
doc2.update(set__name=doc1.name))
def test_embedded_update(self):
"""
@@ -2281,6 +2383,8 @@ class InstanceTest(unittest.TestCase):
log.machine = "Localhost"
log.save()
self.assertTrue(log.id is not None)
log.log = "Saving"
log.save()
@@ -2304,6 +2408,8 @@ class InstanceTest(unittest.TestCase):
log.machine = "Localhost"
log.save()
self.assertTrue(log.id is not None)
log.log = "Saving"
log.save()
@@ -2411,7 +2517,7 @@ class InstanceTest(unittest.TestCase):
for parameter_name, parameter in self.parameters.iteritems():
parameter.expand()
class System(Document):
class NodesSystem(Document):
name = StringField(required=True)
nodes = MapField(ReferenceField(Node, dbref=False))
@@ -2419,18 +2525,18 @@ class InstanceTest(unittest.TestCase):
for node_name, node in self.nodes.iteritems():
node.expand()
node.save(*args, **kwargs)
super(System, self).save(*args, **kwargs)
super(NodesSystem, self).save(*args, **kwargs)
System.drop_collection()
NodesSystem.drop_collection()
Node.drop_collection()
system = System(name="system")
system = NodesSystem(name="system")
system.nodes["node"] = Node()
system.save()
system.nodes["node"].parameters["param"] = Parameter()
system.save()
system = System.objects.first()
system = NodesSystem.objects.first()
self.assertEqual("UNDEFINED", system.nodes["node"].parameters["param"].macros["test"].value)
def test_embedded_document_equality(self):
@@ -2452,5 +2558,90 @@ class InstanceTest(unittest.TestCase):
f1.ref # Dereferences lazily
self.assertEqual(f1, f2)
def test_dbref_equality(self):
class Test2(Document):
name = StringField()
class Test3(Document):
name = StringField()
class Test(Document):
name = StringField()
test2 = ReferenceField('Test2')
test3 = ReferenceField('Test3')
Test.drop_collection()
Test2.drop_collection()
Test3.drop_collection()
t2 = Test2(name='a')
t2.save()
t3 = Test3(name='x')
t3.id = t2.id
t3.save()
t = Test(name='b', test2=t2, test3=t3)
f = Test._from_son(t.to_mongo())
dbref2 = f._data['test2']
obj2 = f.test2
self.assertTrue(isinstance(dbref2, DBRef))
self.assertTrue(isinstance(obj2, Test2))
self.assertTrue(obj2.id == dbref2.id)
self.assertTrue(obj2 == dbref2)
self.assertTrue(dbref2 == obj2)
dbref3 = f._data['test3']
obj3 = f.test3
self.assertTrue(isinstance(dbref3, DBRef))
self.assertTrue(isinstance(obj3, Test3))
self.assertTrue(obj3.id == dbref3.id)
self.assertTrue(obj3 == dbref3)
self.assertTrue(dbref3 == obj3)
self.assertTrue(obj2.id == obj3.id)
self.assertTrue(dbref2.id == dbref3.id)
self.assertFalse(dbref2 == dbref3)
self.assertFalse(dbref3 == dbref2)
self.assertTrue(dbref2 != dbref3)
self.assertTrue(dbref3 != dbref2)
self.assertFalse(obj2 == dbref3)
self.assertFalse(dbref3 == obj2)
self.assertTrue(obj2 != dbref3)
self.assertTrue(dbref3 != obj2)
self.assertFalse(obj3 == dbref2)
self.assertFalse(dbref2 == obj3)
self.assertTrue(obj3 != dbref2)
self.assertTrue(dbref2 != obj3)
def test_default_values(self):
class Person(Document):
created_on = DateTimeField(default=lambda: datetime.utcnow())
name = StringField()
p = Person(name='alon')
p.save()
orig_created_on = Person.objects().only('created_on')[0].created_on
p2 = Person.objects().only('name')[0]
p2.name = 'alon2'
p2.save()
p3 = Person.objects().only('created_on')[0]
self.assertEquals(orig_created_on, p3.created_on)
class Person(Document):
created_on = DateTimeField(default=lambda: datetime.utcnow())
name = StringField()
height = IntField(default=189)
p4 = Person.objects()[0]
p4.save()
self.assertEquals(p4.height, 189)
self.assertEquals(Person.objects(height=189).count(), 1)
if __name__ == '__main__':
unittest.main()

View File

@@ -20,6 +20,28 @@ class TestJson(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
def test_json_names(self):
"""
Going to test reported issue:
https://github.com/MongoEngine/mongoengine/issues/654
where the reporter asks for the availability to perform
a to_json with the original class names and not the abreviated
mongodb document keys
"""
class Embedded(EmbeddedDocument):
string = StringField(db_field='s')
class Doc(Document):
string = StringField(db_field='s')
embedded = EmbeddedDocumentField(Embedded, db_field='e')
doc = Doc( string="Hello", embedded=Embedded(string="Inner Hello"))
doc_json = doc.to_json(sort_keys=True, use_db_field=False,separators=(',', ':'))
expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}"""
self.assertEqual( doc_json, expected_json)
def test_json_simple(self):
class Embedded(EmbeddedDocument):

View File

@@ -141,6 +141,30 @@ class ValidatorErrorTest(unittest.TestCase):
self.assertEqual(e.to_dict(), {
"e": {'val': 'OK could not be converted to int'}})
def test_embedded_weakref(self):
class SubDoc(EmbeddedDocument):
val = IntField(required=True)
class Doc(Document):
e = EmbeddedDocumentField(SubDoc, db_field='eb')
Doc.drop_collection()
d1 = Doc()
d2 = Doc()
s = SubDoc()
self.assertRaises(ValidationError, lambda: s.validate())
d1.e = s
d2.e = s
del d1
self.assertRaises(ValidationError, lambda: d2.validate())
if __name__ == '__main__':
unittest.main()

View File

@@ -47,7 +47,8 @@ class FieldTest(unittest.TestCase):
# Confirm saving now would store values
data_to_be_saved = sorted(person.to_mongo().keys())
self.assertEqual(data_to_be_saved, ['age', 'created', 'name', 'userid'])
self.assertEqual(
data_to_be_saved, ['age', 'created', 'name', 'userid'])
self.assertTrue(person.validate() is None)
@@ -63,7 +64,8 @@ class FieldTest(unittest.TestCase):
# Confirm introspection changes nothing
data_to_be_saved = sorted(person.to_mongo().keys())
self.assertEqual(data_to_be_saved, ['age', 'created', 'name', 'userid'])
self.assertEqual(
data_to_be_saved, ['age', 'created', 'name', 'userid'])
def test_default_values_set_to_None(self):
"""Ensure that default field values are used when creating a document.
@@ -587,7 +589,8 @@ class FieldTest(unittest.TestCase):
LogEntry.drop_collection()
# Post UTC - microseconds are rounded (down) nearest millisecond and dropped
# Post UTC - microseconds are rounded (down) nearest millisecond and
# dropped
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999)
d2 = datetime.datetime(1970, 01, 01, 00, 00, 01)
log = LogEntry()
@@ -688,7 +691,8 @@ class FieldTest(unittest.TestCase):
LogEntry.drop_collection()
# Post UTC - microseconds are rounded (down) nearest millisecond and dropped - with default datetimefields
# Post UTC - microseconds are rounded (down) nearest millisecond and
# dropped - with default datetimefields
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999)
log = LogEntry()
log.date = d1
@@ -696,14 +700,16 @@ class FieldTest(unittest.TestCase):
log.reload()
self.assertEqual(log.date, d1)
# Post UTC - microseconds are rounded (down) nearest millisecond - with default datetimefields
# Post UTC - microseconds are rounded (down) nearest millisecond - with
# default datetimefields
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999)
log.date = d1
log.save()
log.reload()
self.assertEqual(log.date, d1)
# Pre UTC dates microseconds below 1000 are dropped - with default datetimefields
# Pre UTC dates microseconds below 1000 are dropped - with default
# datetimefields
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
log.date = d1
log.save()
@@ -929,12 +935,16 @@ class FieldTest(unittest.TestCase):
post.save()
self.assertEqual(BlogPost.objects.count(), 3)
self.assertEqual(BlogPost.objects.filter(info__exact='test').count(), 1)
self.assertEqual(BlogPost.objects.filter(info__0__test='test').count(), 1)
self.assertEqual(
BlogPost.objects.filter(info__exact='test').count(), 1)
self.assertEqual(
BlogPost.objects.filter(info__0__test='test').count(), 1)
# Confirm handles non strings or non existing keys
self.assertEqual(BlogPost.objects.filter(info__0__test__exact='5').count(), 0)
self.assertEqual(BlogPost.objects.filter(info__100__test__exact='test').count(), 0)
self.assertEqual(
BlogPost.objects.filter(info__0__test__exact='5').count(), 0)
self.assertEqual(
BlogPost.objects.filter(info__100__test__exact='test').count(), 0)
BlogPost.drop_collection()
def test_list_field_passed_in_value(self):
@@ -951,7 +961,6 @@ class FieldTest(unittest.TestCase):
foo.bars.append(bar)
self.assertEqual(repr(foo.bars), '[<Bar: Bar object>]')
def test_list_field_strict(self):
"""Ensure that list field handles validation if provided a strict field type."""
@@ -1082,20 +1091,28 @@ class FieldTest(unittest.TestCase):
self.assertTrue(isinstance(e2.mapping[1], IntegerSetting))
# Test querying
self.assertEqual(Simple.objects.filter(mapping__1__value=42).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__2__number=1).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__2__complex__value=42).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__2__list__0__value=42).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 1)
self.assertEqual(
Simple.objects.filter(mapping__1__value=42).count(), 1)
self.assertEqual(
Simple.objects.filter(mapping__2__number=1).count(), 1)
self.assertEqual(
Simple.objects.filter(mapping__2__complex__value=42).count(), 1)
self.assertEqual(
Simple.objects.filter(mapping__2__list__0__value=42).count(), 1)
self.assertEqual(
Simple.objects.filter(mapping__2__list__1__value='foo').count(), 1)
# Confirm can update
Simple.objects().update(set__mapping__1=IntegerSetting(value=10))
self.assertEqual(Simple.objects.filter(mapping__1__value=10).count(), 1)
self.assertEqual(
Simple.objects.filter(mapping__1__value=10).count(), 1)
Simple.objects().update(
set__mapping__2__list__1=StringSetting(value='Boo'))
self.assertEqual(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 0)
self.assertEqual(Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1)
self.assertEqual(
Simple.objects.filter(mapping__2__list__1__value='foo').count(), 0)
self.assertEqual(
Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1)
Simple.drop_collection()
@@ -1141,12 +1158,16 @@ class FieldTest(unittest.TestCase):
post.save()
self.assertEqual(BlogPost.objects.count(), 3)
self.assertEqual(BlogPost.objects.filter(info__title__exact='test').count(), 1)
self.assertEqual(BlogPost.objects.filter(info__details__test__exact='test').count(), 1)
self.assertEqual(
BlogPost.objects.filter(info__title__exact='test').count(), 1)
self.assertEqual(
BlogPost.objects.filter(info__details__test__exact='test').count(), 1)
# Confirm handles non strings or non existing keys
self.assertEqual(BlogPost.objects.filter(info__details__test__exact=5).count(), 0)
self.assertEqual(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0)
self.assertEqual(
BlogPost.objects.filter(info__details__test__exact=5).count(), 0)
self.assertEqual(
BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0)
post = BlogPost.objects.create(info={'title': 'original'})
post.info.update({'title': 'updated'})
@@ -1207,19 +1228,26 @@ class FieldTest(unittest.TestCase):
self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting))
# Test querying
self.assertEqual(Simple.objects.filter(mapping__someint__value=42).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__nested_dict__number=1).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1)
self.assertEqual(
Simple.objects.filter(mapping__someint__value=42).count(), 1)
self.assertEqual(
Simple.objects.filter(mapping__nested_dict__number=1).count(), 1)
self.assertEqual(
Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1)
self.assertEqual(
Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1)
self.assertEqual(
Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1)
# Confirm can update
Simple.objects().update(
set__mapping={"someint": IntegerSetting(value=10)})
Simple.objects().update(
set__mapping__nested_dict__list__1=StringSetting(value='Boo'))
self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0)
self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1)
self.assertEqual(
Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0)
self.assertEqual(
Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1)
Simple.drop_collection()
@@ -1290,7 +1318,7 @@ class FieldTest(unittest.TestCase):
class Test(Document):
my_map = MapField(field=EmbeddedDocumentField(Embedded),
db_field='x')
db_field='x')
Test.drop_collection()
@@ -1334,7 +1362,7 @@ class FieldTest(unittest.TestCase):
Log(name="wilson", visited={'friends': datetime.datetime.now()}).save()
self.assertEqual(1, Log.objects(
visited__friends__exists=True).count())
visited__friends__exists=True).count())
def test_embedded_db_field(self):
@@ -1477,6 +1505,375 @@ class FieldTest(unittest.TestCase):
mongoed = p1.to_mongo()
self.assertTrue(isinstance(mongoed['parent'], ObjectId))
def test_cached_reference_fields(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocorrence(Document):
person = StringField()
animal = CachedReferenceField(
Animal, fields=['tag'])
Animal.drop_collection()
Ocorrence.drop_collection()
a = Animal(name="Leopard", tag="heavy")
a.save()
self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal])
o = Ocorrence(person="teste", animal=a)
o.save()
p = Ocorrence(person="Wilson")
p.save()
self.assertEqual(Ocorrence.objects(animal=None).count(), 1)
self.assertEqual(
a.to_mongo(fields=['tag']), {'tag': 'heavy', "_id": a.pk})
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
# counts
Ocorrence(person="teste 2").save()
Ocorrence(person="teste 3").save()
count = Ocorrence.objects(animal__tag='heavy').count()
self.assertEqual(count, 1)
ocorrence = Ocorrence.objects(animal__tag='heavy').first()
self.assertEqual(ocorrence.person, "teste")
self.assertTrue(isinstance(ocorrence.animal, Animal))
def test_cached_reference_field_decimal(self):
class PersonAuto(Document):
name = StringField()
salary = DecimalField()
class SocialTest(Document):
group = StringField()
person = CachedReferenceField(
PersonAuto,
fields=('salary',))
PersonAuto.drop_collection()
SocialTest.drop_collection()
p = PersonAuto(name="Alberto", salary=Decimal('7000.00'))
p.save()
s = SocialTest(group="dev", person=p)
s.save()
self.assertEqual(
SocialTest.objects._collection.find_one({'person.salary': 7000.00}), {
'_id': s.pk,
'group': s.group,
'person': {
'_id': p.pk,
'salary': 7000.00
}
})
def test_cached_reference_field_reference(self):
class Group(Document):
name = StringField()
class Person(Document):
name = StringField()
group = ReferenceField(Group)
class SocialData(Document):
obs = StringField()
tags = ListField(
StringField())
person = CachedReferenceField(
Person,
fields=('group',))
Group.drop_collection()
Person.drop_collection()
SocialData.drop_collection()
g1 = Group(name='dev')
g1.save()
g2 = Group(name="designers")
g2.save()
p1 = Person(name="Alberto", group=g1)
p1.save()
p2 = Person(name="Andre", group=g1)
p2.save()
p3 = Person(name="Afro design", group=g2)
p3.save()
s1 = SocialData(obs="testing 123", person=p1, tags=['tag1', 'tag2'])
s1.save()
s2 = SocialData(obs="testing 321", person=p3, tags=['tag3', 'tag4'])
s2.save()
self.assertEqual(SocialData.objects._collection.find_one(
{'tags': 'tag2'}), {
'_id': s1.pk,
'obs': 'testing 123',
'tags': ['tag1', 'tag2'],
'person': {
'_id': p1.pk,
'group': g1.pk
}
})
self.assertEqual(SocialData.objects(person__group=g2).count(), 1)
self.assertEqual(SocialData.objects(person__group=g2).first(), s2)
def test_cached_reference_field_update_all(self):
class Person(Document):
TYPES = (
('pf', "PF"),
('pj', "PJ")
)
name = StringField()
tp = StringField(
choices=TYPES
)
father = CachedReferenceField('self', fields=('tp',))
Person.drop_collection()
a1 = Person(name="Wilson Father", tp="pj")
a1.save()
a2 = Person(name='Wilson Junior', tp='pf', father=a1)
a2.save()
self.assertEqual(dict(a2.to_mongo()), {
"_id": a2.pk,
"name": u"Wilson Junior",
"tp": u"pf",
"father": {
"_id": a1.pk,
"tp": u"pj"
}
})
self.assertEqual(Person.objects(father=a1)._query, {
'father._id': a1.pk
})
self.assertEqual(Person.objects(father=a1).count(), 1)
Person.objects.update(set__tp="pf")
Person.father.sync_all()
a2.reload()
self.assertEqual(dict(a2.to_mongo()), {
"_id": a2.pk,
"name": u"Wilson Junior",
"tp": u"pf",
"father": {
"_id": a1.pk,
"tp": u"pf"
}
})
def test_cached_reference_fields_on_embedded_documents(self):
def build():
class Test(Document):
name = StringField()
type('WrongEmbeddedDocument', (
EmbeddedDocument,), {
'test': CachedReferenceField(Test)
})
self.assertRaises(InvalidDocumentError, build)
def test_cached_reference_auto_sync(self):
class Person(Document):
TYPES = (
('pf', "PF"),
('pj', "PJ")
)
name = StringField()
tp = StringField(
choices=TYPES
)
father = CachedReferenceField('self', fields=('tp',))
Person.drop_collection()
a1 = Person(name="Wilson Father", tp="pj")
a1.save()
a2 = Person(name='Wilson Junior', tp='pf', father=a1)
a2.save()
a1.tp = 'pf'
a1.save()
a2.reload()
self.assertEqual(dict(a2.to_mongo()), {
'_id': a2.pk,
'name': 'Wilson Junior',
'tp': 'pf',
'father': {
'_id': a1.pk,
'tp': 'pf'
}
})
def test_cached_reference_auto_sync_disabled(self):
class Persone(Document):
TYPES = (
('pf', "PF"),
('pj', "PJ")
)
name = StringField()
tp = StringField(
choices=TYPES
)
father = CachedReferenceField(
'self', fields=('tp',), auto_sync=False)
Persone.drop_collection()
a1 = Persone(name="Wilson Father", tp="pj")
a1.save()
a2 = Persone(name='Wilson Junior', tp='pf', father=a1)
a2.save()
a1.tp = 'pf'
a1.save()
self.assertEqual(Persone.objects._collection.find_one({'_id': a2.pk}), {
'_id': a2.pk,
'name': 'Wilson Junior',
'tp': 'pf',
'father': {
'_id': a1.pk,
'tp': 'pj'
}
})
def test_cached_reference_embedded_fields(self):
class Owner(EmbeddedDocument):
TPS = (
('n', "Normal"),
('u', "Urgent")
)
name = StringField()
tp = StringField(
verbose_name="Type",
db_field="t",
choices=TPS)
class Animal(Document):
name = StringField()
tag = StringField()
owner = EmbeddedDocumentField(Owner)
class Ocorrence(Document):
person = StringField()
animal = CachedReferenceField(
Animal, fields=['tag', 'owner.tp'])
Animal.drop_collection()
Ocorrence.drop_collection()
a = Animal(nam="Leopard", tag="heavy",
owner=Owner(tp='u', name="Wilson Júnior")
)
a.save()
o = Ocorrence(person="teste", animal=a)
o.save()
self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tp'])), {
'_id': a.pk,
'tag': 'heavy',
'owner': {
't': 'u'
}
})
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
self.assertEqual(o.to_mongo()['animal']['owner']['t'], 'u')
# counts
Ocorrence(person="teste 2").save()
Ocorrence(person="teste 3").save()
count = Ocorrence.objects(
animal__tag='heavy', animal__owner__tp='u').count()
self.assertEqual(count, 1)
ocorrence = Ocorrence.objects(
animal__tag='heavy',
animal__owner__tp='u').first()
self.assertEqual(ocorrence.person, "teste")
self.assertTrue(isinstance(ocorrence.animal, Animal))
def test_cached_reference_embedded_list_fields(self):
class Owner(EmbeddedDocument):
name = StringField()
tags = ListField(StringField())
class Animal(Document):
name = StringField()
tag = StringField()
owner = EmbeddedDocumentField(Owner)
class Ocorrence(Document):
person = StringField()
animal = CachedReferenceField(
Animal, fields=['tag', 'owner.tags'])
Animal.drop_collection()
Ocorrence.drop_collection()
a = Animal(nam="Leopard", tag="heavy",
owner=Owner(tags=['cool', 'funny'],
name="Wilson Júnior")
)
a.save()
o = Ocorrence(person="teste 2", animal=a)
o.save()
self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tags'])), {
'_id': a.pk,
'tag': 'heavy',
'owner': {
'tags': ['cool', 'funny']
}
})
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
self.assertEqual(o.to_mongo()['animal']['owner']['tags'],
['cool', 'funny'])
# counts
Ocorrence(person="teste 2").save()
Ocorrence(person="teste 3").save()
query = Ocorrence.objects(
animal__tag='heavy', animal__owner__tags='cool')._query
self.assertEqual(
query, {'animal.owner.tags': 'cool', 'animal.tag': 'heavy'})
ocorrence = Ocorrence.objects(
animal__tag='heavy',
animal__owner__tags='cool').first()
self.assertEqual(ocorrence.person, "teste 2")
self.assertTrue(isinstance(ocorrence.animal, Animal))
def test_objectid_reference_fields(self):
class Person(Document):
@@ -1834,8 +2231,7 @@ class FieldTest(unittest.TestCase):
Person(name="Wilson Jr").save()
self.assertEqual(repr(Person.objects(city=None)),
"[<Person: Person object>]")
"[<Person: Person object>]")
def test_generic_reference_choices(self):
"""Ensure that a GenericReferenceField can handle choices
@@ -1982,7 +2378,8 @@ class FieldTest(unittest.TestCase):
attachment_required.blob = Binary(b('\xe6\x00\xc4\xff\x07'))
attachment_required.validate()
attachment_size_limit = AttachmentSizeLimit(blob=b('\xe6\x00\xc4\xff\x07'))
attachment_size_limit = AttachmentSizeLimit(
blob=b('\xe6\x00\xc4\xff\x07'))
self.assertRaises(ValidationError, attachment_size_limit.validate)
attachment_size_limit.blob = b('\xe6\x00\xc4\xff')
attachment_size_limit.validate()
@@ -2030,8 +2427,8 @@ class FieldTest(unittest.TestCase):
"""
class Shirt(Document):
size = StringField(max_length=3, choices=(
('S', 'Small'), ('M', 'Medium'), ('L', 'Large'),
('XL', 'Extra Large'), ('XXL', 'Extra Extra Large')))
('S', 'Small'), ('M', 'Medium'), ('L', 'Large'),
('XL', 'Extra Large'), ('XXL', 'Extra Extra Large')))
style = StringField(max_length=3, choices=(
('S', 'Small'), ('B', 'Baggy'), ('W', 'wide')), default='S')
@@ -2061,7 +2458,7 @@ class FieldTest(unittest.TestCase):
"""
class Shirt(Document):
size = StringField(max_length=3,
choices=('S', 'M', 'L', 'XL', 'XXL'))
choices=('S', 'M', 'L', 'XL', 'XXL'))
Shirt.drop_collection()
@@ -2179,7 +2576,6 @@ class FieldTest(unittest.TestCase):
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
self.assertEqual(c['next'], 1000)
def test_sequence_field_get_next_value(self):
class Person(Document):
id = SequenceField(primary_key=True)
@@ -2368,7 +2764,6 @@ class FieldTest(unittest.TestCase):
self.assertEqual(1, post.comments[0].id)
self.assertEqual(2, post.comments[1].id)
def test_generic_embedded_document(self):
class Car(EmbeddedDocument):
name = StringField()
@@ -2478,7 +2873,7 @@ class FieldTest(unittest.TestCase):
self.assertTrue('comments' in error.errors)
self.assertTrue(1 in error.errors['comments'])
self.assertTrue(isinstance(error.errors['comments'][1]['content'],
ValidationError))
ValidationError))
# ValidationError.schema property
error_dict = error.to_dict()
@@ -2604,11 +2999,11 @@ class FieldTest(unittest.TestCase):
DictFieldTest.drop_collection()
test = DictFieldTest(dictionary=None)
test.dictionary # Just access to test getter
test.dictionary # Just access to test getter
self.assertRaises(ValidationError, test.validate)
test = DictFieldTest(dictionary=False)
test.dictionary # Just access to test getter
test.dictionary # Just access to test getter
self.assertRaises(ValidationError, test.validate)

View File

@@ -279,7 +279,7 @@ class FileTest(unittest.TestCase):
t.image.put(f)
self.fail("Should have raised an invalidation error")
except ValidationError, e:
self.assertEqual("%s" % e, "Invalid image: cannot identify image file")
self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f)
t = TestImage()
t.image.put(open(TEST_IMAGE_PATH, 'rb'))

View File

@@ -3,3 +3,4 @@ from field_list import *
from queryset import *
from visitor import *
from geo import *
from modify import *

View File

@@ -5,6 +5,8 @@ import unittest
from datetime import datetime, timedelta
from mongoengine import *
from nose.plugins.skip import SkipTest
__all__ = ("GeoQueriesTest",)
@@ -139,6 +141,7 @@ class GeoQueriesTest(unittest.TestCase):
def test_spherical_geospatial_operators(self):
"""Ensure that spherical geospatial queries are working
"""
raise SkipTest("https://jira.mongodb.org/browse/SERVER-14039")
class Point(Document):
location = GeoPointField()

102
tests/queryset/modify.py Normal file
View File

@@ -0,0 +1,102 @@
import sys
sys.path[0:0] = [""]
import unittest
from mongoengine import connect, Document, IntField
__all__ = ("FindAndModifyTest",)
class Doc(Document):
id = IntField(primary_key=True)
value = IntField()
class FindAndModifyTest(unittest.TestCase):
def setUp(self):
connect(db="mongoenginetest")
Doc.drop_collection()
def assertDbEqual(self, docs):
self.assertEqual(list(Doc._collection.find().sort("id")), docs)
def test_modify(self):
Doc(id=0, value=0).save()
doc = Doc(id=1, value=1).save()
old_doc = Doc.objects(id=1).modify(set__value=-1)
self.assertEqual(old_doc.to_json(), doc.to_json())
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
def test_modify_with_new(self):
Doc(id=0, value=0).save()
doc = Doc(id=1, value=1).save()
new_doc = Doc.objects(id=1).modify(set__value=-1, new=True)
doc.value = -1
self.assertEqual(new_doc.to_json(), doc.to_json())
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
def test_modify_not_existing(self):
Doc(id=0, value=0).save()
self.assertEqual(Doc.objects(id=1).modify(set__value=-1), None)
self.assertDbEqual([{"_id": 0, "value": 0}])
def test_modify_with_upsert(self):
Doc(id=0, value=0).save()
old_doc = Doc.objects(id=1).modify(set__value=1, upsert=True)
self.assertEqual(old_doc, None)
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}])
def test_modify_with_upsert_existing(self):
Doc(id=0, value=0).save()
doc = Doc(id=1, value=1).save()
old_doc = Doc.objects(id=1).modify(set__value=-1, upsert=True)
self.assertEqual(old_doc.to_json(), doc.to_json())
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
def test_modify_with_upsert_with_new(self):
Doc(id=0, value=0).save()
new_doc = Doc.objects(id=1).modify(upsert=True, new=True, set__value=1)
self.assertEqual(new_doc.to_mongo(), {"_id": 1, "value": 1})
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}])
def test_modify_with_remove(self):
Doc(id=0, value=0).save()
doc = Doc(id=1, value=1).save()
old_doc = Doc.objects(id=1).modify(remove=True)
self.assertEqual(old_doc.to_json(), doc.to_json())
self.assertDbEqual([{"_id": 0, "value": 0}])
def test_find_and_modify_with_remove_not_existing(self):
Doc(id=0, value=0).save()
self.assertEqual(Doc.objects(id=1).modify(remove=True), None)
self.assertDbEqual([{"_id": 0, "value": 0}])
def test_modify_with_order_by(self):
Doc(id=0, value=3).save()
Doc(id=1, value=2).save()
Doc(id=2, value=1).save()
doc = Doc(id=3, value=0).save()
old_doc = Doc.objects().order_by("-id").modify(set__value=-1)
self.assertEqual(old_doc.to_json(), doc.to_json())
self.assertDbEqual([
{"_id": 0, "value": 3}, {"_id": 1, "value": 2},
{"_id": 2, "value": 1}, {"_id": 3, "value": -1}])
def test_modify_with_fields(self):
Doc(id=0, value=0).save()
Doc(id=1, value=1).save()
old_doc = Doc.objects(id=1).only("id").modify(set__value=-1)
self.assertEqual(old_doc.to_mongo(), {"_id": 1})
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
if __name__ == '__main__':
unittest.main()

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,11 @@
import sys
sys.path[0:0] = [""]
import unittest
try:
import unittest2 as unittest
except ImportError:
import unittest
import datetime
import pymongo
@@ -34,6 +39,17 @@ class ConnectionTest(unittest.TestCase):
conn = get_connection('testdb')
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
def test_sharing_connections(self):
"""Ensure that connections are shared when the connection settings are exactly the same
"""
connect('mongoenginetest', alias='testdb1')
expected_connection = get_connection('testdb1')
connect('mongoenginetest', alias='testdb2')
actual_connection = get_connection('testdb2')
self.assertEqual(expected_connection, actual_connection)
def test_connect_uri(self):
"""Ensure that the connect() method works properly with uri's
"""

View File

@@ -0,0 +1,107 @@
import unittest
from mongoengine.base.datastructures import StrictDict, SemiStrictDict
class TestStrictDict(unittest.TestCase):
def strict_dict_class(self, *args, **kwargs):
return StrictDict.create(*args, **kwargs)
def setUp(self):
self.dtype = self.strict_dict_class(("a", "b", "c"))
def test_init(self):
d = self.dtype(a=1, b=1, c=1)
self.assertEqual((d.a, d.b, d.c), (1, 1, 1))
def test_init_fails_on_nonexisting_attrs(self):
self.assertRaises(AttributeError, lambda: self.dtype(a=1, b=2, d=3))
def test_eq(self):
d = self.dtype(a=1, b=1, c=1)
dd = self.dtype(a=1, b=1, c=1)
e = self.dtype(a=1, b=1, c=3)
f = self.dtype(a=1, b=1)
g = self.strict_dict_class(("a", "b", "c", "d"))(a=1, b=1, c=1, d=1)
h = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=1)
i = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=2)
self.assertEqual(d, dd)
self.assertNotEqual(d, e)
self.assertNotEqual(d, f)
self.assertNotEqual(d, g)
self.assertNotEqual(f, d)
self.assertEqual(d, h)
self.assertNotEqual(d, i)
def test_setattr_getattr(self):
d = self.dtype()
d.a = 1
self.assertEqual(d.a, 1)
self.assertRaises(AttributeError, lambda: d.b)
def test_setattr_raises_on_nonexisting_attr(self):
d = self.dtype()
def _f():
d.x=1
self.assertRaises(AttributeError, _f)
def test_setattr_getattr_special(self):
d = self.strict_dict_class(["items"])
d.items = 1
self.assertEqual(d.items, 1)
def test_get(self):
d = self.dtype(a=1)
self.assertEqual(d.get('a'), 1)
self.assertEqual(d.get('b', 'bla'), 'bla')
def test_items(self):
d = self.dtype(a=1)
self.assertEqual(d.items(), [('a', 1)])
d = self.dtype(a=1, b=2)
self.assertEqual(d.items(), [('a', 1), ('b', 2)])
def test_mappings_protocol(self):
d = self.dtype(a=1, b=2)
assert dict(d) == {'a': 1, 'b': 2}
assert dict(**d) == {'a': 1, 'b': 2}
class TestSemiSrictDict(TestStrictDict):
def strict_dict_class(self, *args, **kwargs):
return SemiStrictDict.create(*args, **kwargs)
def test_init_fails_on_nonexisting_attrs(self):
# disable irrelevant test
pass
def test_setattr_raises_on_nonexisting_attr(self):
# disable irrelevant test
pass
def test_setattr_getattr_nonexisting_attr_succeeds(self):
d = self.dtype()
d.x = 1
self.assertEqual(d.x, 1)
def test_init_succeeds_with_nonexisting_attrs(self):
d = self.dtype(a=1, b=1, c=1, x=2)
self.assertEqual((d.a, d.b, d.c, d.x), (1, 1, 1, 2))
def test_iter_with_nonexisting_attrs(self):
d = self.dtype(a=1, b=1, c=1, x=2)
self.assertEqual(list(d), ['a', 'b', 'c', 'x'])
def test_iteritems_with_nonexisting_attrs(self):
d = self.dtype(a=1, b=1, c=1, x=2)
self.assertEqual(list(d.iteritems()), [('a', 1), ('b', 1), ('c', 1), ('x', 2)])
def tets_cmp_with_strict_dicts(self):
d = self.dtype(a=1, b=1, c=1)
dd = StrictDict.create(("a", "b", "c"))(a=1, b=1, c=1)
self.assertEqual(d, dd)
def test_cmp_with_strict_dict_with_nonexisting_attrs(self):
d = self.dtype(a=1, b=1, c=1, x=2)
dd = StrictDict.create(("a", "b", "c", "x"))(a=1, b=1, c=1, x=2)
self.assertEqual(d, dd)
if __name__ == '__main__':
unittest.main()

View File

@@ -291,6 +291,30 @@ class FieldTest(unittest.TestCase):
self.assertEqual(employee.friends, friends)
self.assertEqual(q, 2)
def test_list_of_lists_of_references(self):
class User(Document):
name = StringField()
class Post(Document):
user_lists = ListField(ListField(ReferenceField(User)))
class SimpleList(Document):
users = ListField(ReferenceField(User))
User.drop_collection()
Post.drop_collection()
u1 = User.objects.create(name='u1')
u2 = User.objects.create(name='u2')
u3 = User.objects.create(name='u3')
SimpleList.objects.create(users=[u1, u2, u3])
self.assertEqual(SimpleList.objects.all()[0].users, [u1, u2, u3])
Post.objects.create(user_lists=[[u1, u2], [u3]])
self.assertEqual(Post.objects.all()[0].user_lists, [[u1, u2], [u3]])
def test_circular_reference(self):
"""Ensure you can handle circular references
"""
@@ -1195,6 +1219,30 @@ class FieldTest(unittest.TestCase):
page = Page.objects.first()
self.assertEqual(page.tags[0], page.posts[0].tags[0])
def test_select_related_follows_embedded_referencefields(self):
class Playlist(Document):
items = ListField(EmbeddedDocumentField("PlaylistItem"))
class PlaylistItem(EmbeddedDocument):
song = ReferenceField("Song")
class Song(Document):
title = StringField()
Playlist.drop_collection()
Song.drop_collection()
songs = [Song.objects.create(title="song %d" % i) for i in range(3)]
items = [PlaylistItem(song=song) for song in songs]
playlist = Playlist.objects.create(items=items)
with query_counter() as q:
self.assertEqual(q, 0)
playlist = Playlist.objects.first().select_related()
songs = [item.song for item in playlist.items]
self.assertEqual(q, 2)
if __name__ == '__main__':
unittest.main()

View File

@@ -2,11 +2,11 @@ import sys
sys.path[0:0] = [""]
import unittest
from nose.plugins.skip import SkipTest
from mongoengine import *
from mongoengine.django.shortcuts import get_document_or_404
import django
from django.http import Http404
from django.template import Context, Template
from django.conf import settings
@@ -19,6 +19,10 @@ settings.configure(
AUTHENTICATION_BACKENDS = ('mongoengine.django.auth.MongoEngineBackend',)
)
# For Django >= 1.7
if hasattr(django, 'setup'):
django.setup()
try:
from django.contrib.auth import authenticate, get_user_model
from mongoengine.django.auth import User
@@ -32,6 +36,7 @@ except Exception:
DJ15 = False
from django.contrib.sessions.tests import SessionTestsMixin
from mongoengine.django.sessions import SessionStore, MongoSession
from mongoengine.django.tests import MongoTestCase
from datetime import tzinfo, timedelta
ZERO = timedelta(0)
@@ -293,5 +298,11 @@ class MongoAuthTest(unittest.TestCase):
db_user = User.objects.get(username='user')
self.assertEqual(user.id, db_user.id)
class MongoTestCaseTest(MongoTestCase):
def test_mongo_test_case(self):
self.db.dummy_collection.insert({'collection': 'will be dropped'})
if __name__ == '__main__':
unittest.main()

View File

@@ -37,7 +37,8 @@ class SignalTests(unittest.TestCase):
@classmethod
def post_init(cls, sender, document, **kwargs):
signal_output.append('post_init signal, %s' % document)
signal_output.append('post_init signal, %s, document._created = %s' % (document, document._created))
@classmethod
def pre_save(cls, sender, document, **kwargs):
@@ -54,7 +55,9 @@ class SignalTests(unittest.TestCase):
@classmethod
def post_save(cls, sender, document, **kwargs):
dirty_keys = document._delta()[0].keys() + document._delta()[1].keys()
signal_output.append('post_save signal, %s' % document)
signal_output.append('post_save dirty keys, %s' % dirty_keys)
if 'created' in kwargs:
if kwargs['created']:
signal_output.append('Is created')
@@ -191,10 +194,16 @@ class SignalTests(unittest.TestCase):
a1 = self.Author(name='Bill Shakespeare')
self.Author.objects.insert([a1], load_bulk=False)
def load_existing_author():
a = self.Author(name='Bill Shakespeare')
a.save()
self.get_signal_output(lambda: None) # eliminate signal output
a1 = self.Author.objects(name='Bill Shakespeare')[0]
self.assertEqual(self.get_signal_output(create_author), [
"pre_init signal, Author",
"{'name': 'Bill Shakespeare'}",
"post_init signal, Bill Shakespeare",
"post_init signal, Bill Shakespeare, document._created = True",
])
a1 = self.Author(name='Bill Shakespeare')
@@ -203,6 +212,7 @@ class SignalTests(unittest.TestCase):
"pre_save_post_validation signal, Bill Shakespeare",
"Is created",
"post_save signal, Bill Shakespeare",
"post_save dirty keys, ['name']",
"Is created"
])
@@ -213,6 +223,7 @@ class SignalTests(unittest.TestCase):
"pre_save_post_validation signal, William Shakespeare",
"Is updated",
"post_save signal, William Shakespeare",
"post_save dirty keys, ['name']",
"Is updated"
])
@@ -221,12 +232,22 @@ class SignalTests(unittest.TestCase):
'post_delete signal, William Shakespeare',
])
signal_output = self.get_signal_output(load_existing_author)
# test signal_output lines separately, because of random ObjectID after object load
self.assertEqual(signal_output[0],
"pre_init signal, Author",
)
self.assertEqual(signal_output[2],
"post_init signal, Bill Shakespeare, document._created = False",
)
signal_output = self.get_signal_output(bulk_create_author_with_load)
# The output of this signal is not entirely deterministic. The reloaded
# object will have an object ID. Hence, we only check part of the output
self.assertEqual(signal_output[3],
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]")
self.assertEqual(signal_output[3], "pre_bulk_insert signal, [<Author: Bill Shakespeare>]"
)
self.assertEqual(signal_output[-2:],
["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
"Is loaded",])
@@ -234,7 +255,7 @@ class SignalTests(unittest.TestCase):
self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [
"pre_init signal, Author",
"{'name': 'Bill Shakespeare'}",
"post_init signal, Bill Shakespeare",
"post_init signal, Bill Shakespeare, document._created = True",
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]",
"post_bulk_insert signal, [<Author: Bill Shakespeare>]",
"Not loaded",