Compare commits

...

261 Commits

Author SHA1 Message Date
Ross Lawley
aa2add39ad Version bump 2012-04-25 12:24:08 +01:00
Ross Lawley
a928047147 Fixing sessions for django 1.3 and django 1.4 2012-04-24 21:00:30 +01:00
Ross Lawley
88dc64653e Fix Django 1.3 auth 2012-04-18 16:41:09 +01:00
Ross Lawley
5f4b70f3a9 Version bump 2012-04-18 10:30:14 +01:00
Ross Lawley
51b429e5b0 Updated changelog 2012-04-18 10:28:54 +01:00
Ross Lawley
360624eb6e Merge branch 'dev' of github.com:hmarr/mongoengine into dev 2012-04-18 10:28:06 +01:00
Ross Lawley
d9d2291837 Merge branch 'master' into dev 2012-04-18 10:27:57 +01:00
Ross Lawley
cbdf816232 Merge branch 'master' of github.com:hmarr/mongoengine 2012-04-18 10:23:02 +01:00
Ross Lawley
2d71eb8a18 Added support back for Django 1.3 as well as 1.4 2012-04-18 10:22:26 +01:00
Ross Lawley
64d2532ce9 Merge pull request #484 from dcrosta/replica-set-connection
refactor get_connection

Thanks @dcrosta
2012-04-18 00:51:30 -07:00
Dan Crosta
0376910f33 refactor get_connection
In the previous version, the requested ReadPreference was ignored in the
case that the user specified a MongoDB URI. This rearranges the code to
ensure that only those values which we explicitly parse out of the URI
override values set as keyword arguments.

This leaves open the possibility of conflicts between the URI and the
kwargs -- we should consider whether to raise an exception if, e.g.,
username is specified as a kwarg *and* in the URI.
2012-04-17 19:50:22 -04:00
Ross Lawley
6d503119a1 Merge pull request #482 from wpjunior/patch-7
Small fixes for ReferenceField
2012-04-16 05:25:42 -07:00
Wilson Júnior
bfae93e57e small fixes for ReferenceField 2012-04-13 04:56:20 -03:00
Ross Lawley
d0e42a4798 Merge branch 'master' into dev 2012-03-27 01:47:48 +01:00
Ross Lawley
2a34358abc Updated connection refs #474 2012-03-27 01:47:17 +01:00
Ross Lawley
fd2bb8ea45 Merge pull request #474 from samuelclay/patch-1
The port is defaulted in to conn_settings, so discard the port since hos...
2012-03-26 17:17:26 -07:00
Ross Lawley
98e5daa0e0 Added mostlystatic to the AUTHORS 2012-03-27 00:49:34 +01:00
Samuel Clay
ad2e119282 The port is defaulted in to conn_settings, so discard the port since hosts_or_uri must be used. 2012-03-26 16:48:37 -07:00
Ross Lawley
c20c30d8d1 Merge pull request #471 from mostlystatic/master
Simple fix to unknown connection alias error message
2012-03-26 16:47:46 -07:00
mostlystatic
66d215c9c1 Fix for unknown connection alias error message. 2012-03-24 20:01:40 +00:00
Ross Lawley
46e088d379 Merge branch 'master' into dev 2012-03-24 19:07:17 +00:00
Ross Lawley
bbdd15161a 0.6.3 release 2012-03-24 19:06:08 +00:00
Ross Lawley
ea9dc8cfb8 Merge branch 'master' of github.com:hmarr/mongoengine 2012-03-24 19:03:44 +00:00
Ross Lawley
6bd2ccc9bf UPdated authors 2012-03-24 19:03:24 +00:00
Ross Lawley
56327c6b58 Merge pull request #470 from arbaal/master
Patches for Django 1.4 compability
2012-03-24 12:02:44 -07:00
Nils Hasenbanck
712e8a51e4 Merge branch 'master' of github.com:arbaal/mongoengine
Conflicts:
	mongoengine/django/sessions.py

Signed-off-by: Nils Hasenbanck <nils@hasenbanck.de>
2012-03-24 19:49:02 +01:00
Nils Hasenbanck
421f324f9e Fixed the exception when saving a new session
The session was not created for some reason. Now it is.

Signed-off-by: Nils Hasenbanck <nils@hasenbanck.de>
2012-03-24 19:43:01 +01:00
Nils Hasenbanck
8fe4a70299 Fixed the exception when saving a new session
Signed-off-by: Nils Hasenbanck <nils@hasenbanck.de>
2012-03-24 19:24:42 +01:00
Nils Hasenbanck
3af6d0dbfd Replaces deprecated hasher with new django 1.4 hasher
This way we can even use the new hasher configuration
django 1.4 provides.

Signed-off-by: Nils Hasenbanck <nils@hasenbanck.de>
2012-03-24 11:08:00 +01:00
Nils Hasenbanck
e2bef076d3 Fixed the session backend for django 1.4
Signed-off-by: Nils Hasenbanck <nils@hasenbanck.de>
2012-03-24 11:07:37 +01:00
Ross Lawley
1bf9f28f4b Merge branch 'master' into dev 2012-03-22 15:49:57 +00:00
Ross Lawley
f1e7b97a93 Updated changelog 2012-03-22 15:48:54 +00:00
Ross Lawley
8cfe13ad90 Merge branch 'master' into dev 2012-03-22 15:46:32 +00:00
Ross Lawley
0f420abc8e Added test for listfields containing embedded documents
Added Adam to the authors - thanks for the patch
fixes #466
2012-03-22 15:44:22 +00:00
Ross Lawley
3b5b715567 Merge branch 'dev' of https://github.com/aparrish/mongoengine into 466 2012-03-22 15:32:45 +00:00
Adam Parrish
520051af25 preparing values in a ListField won't mangle embedded documents any more 2012-03-21 11:03:49 -07:00
Ross Lawley
fd18a48608 Rst fix 2012-03-15 16:36:04 +00:00
Ross Lawley
64860c6287 Fix signals documentation 2012-03-15 16:27:31 +00:00
Ross Lawley
58635b24ba Updated changelog 2012-03-12 10:35:02 +00:00
Ross Lawley
3ec9dfc108 Merge branch 'master' into dev 2012-03-12 10:33:08 +00:00
Ross Lawley
bd1572f11a Fixed upgrade docs and instructions 2012-03-12 10:31:51 +00:00
Ross Lawley
95c58bd793 Merge branch 'master' into dev 2012-03-08 12:40:20 +00:00
Ross Lawley
65591c7727 Version Bump 2012-03-08 12:40:07 +00:00
Ross Lawley
737cbf5f60 Updated docs and added fix for _types and positional operator
Bumped version to 0.6.2
2012-03-08 12:39:25 +00:00
Ross Lawley
4c67cbb4b7 Updated upgrade docs. Reported issues with older pymongo and sharding 2012-03-06 12:31:14 +00:00
Ross Lawley
ed2cc2a60b Adding Jacob Peddicord to Authors 2012-03-05 16:20:37 +00:00
Ross Lawley
859e9b3cc4 Version bump 2012-03-05 16:16:37 +00:00
Ross Lawley
c34e79fad9 Fix replicaset connection 2012-03-05 16:15:06 +00:00
Ross Lawley
82446d641e Updated the spec 2012-03-05 14:28:33 +00:00
Ross Lawley
9451c9f331 Updated Readme - points to readthedocs 2012-03-05 13:14:36 +00:00
Ross Lawley
61411bb259 Doc updates 2012-03-05 12:26:44 +00:00
Ross Lawley
fcdb0eff8f Added for read the docs 2012-03-05 12:21:53 +00:00
Ross Lawley
30d9347272 Updated readme 2012-03-05 12:20:59 +00:00
Ross Lawley
7564bbdee8 Fix docs 2012-03-05 11:43:54 +00:00
Ross Lawley
69251e5000 Updated docs 2012-03-05 11:35:12 +00:00
Ross Lawley
6ecdc7b59d Added FutureWarning for inherited classes not declaring allow_inheritance
Refs #437
2012-03-05 11:25:13 +00:00
Ross Lawley
b7d0d8f0cc Added warning to SortedListField 2012-03-05 11:20:22 +00:00
Ross Lawley
df52ed1162 Merge pull request #449 from kajic/uri-fix
Uri connection fix
2012-03-05 00:23:18 -08:00
Ross Lawley
aa6370dd5d Merge pull request #448 from kajic/delta-fix
Thanks Kajic
2012-03-05 00:23:04 -08:00
Robert Kajic
c272b7901f Fix for bug where changes to a a embedded document field are not recorded if the root document was just created+saved. 2012-03-02 15:35:15 +01:00
Robert Kajic
c61de6540a Don't ignore kwargs for uri style connections 2012-03-02 15:33:54 +01:00
Robert Kajic
3c7bf50089 Make uri style connections use parameters not specified in the uri, as well as other keyword arguments 2012-03-02 15:33:38 +01:00
Ross Lawley
32fc4152a7 Enable covered indexes for simple documents.
Refs #444
2012-03-02 13:42:24 +00:00
Ross Lawley
bdf7187d5c Added a limit to .get 2012-02-29 14:57:24 +00:00
Ross Lawley
1639576203 Bumped version 2012-02-29 12:05:47 +00:00
Ross Lawley
ae20c785ea Updated docs 2012-02-29 12:04:43 +00:00
Ross Lawley
a2eb876f8c No longer always upsert on save
closes #407
2012-02-29 11:39:10 +00:00
Ross Lawley
5a1eaa0a98 Updated new deref test 2012-02-29 11:23:43 +00:00
Ross Lawley
398fd4a548 Merge branch 'cleaned_dev' of https://github.com/Ankhbayar/mongoengine into test 2012-02-29 11:13:48 +00:00
Ross Lawley
44b9fb66e1 Updates must have an operation
Closes #387
2012-02-29 11:04:09 +00:00
Ross Lawley
2afa2171f9 Updated Changelog 2012-02-29 10:32:58 +00:00
Ross Lawley
1d7ea71c0d DeReference is now used in a thread safe manner
No global / module instance is needed
Fixes #399
2012-02-29 10:31:33 +00:00
Ross Lawley
2a391f0f16 Raise an error if trying to perform a join
You can't join across reference fields, so raise an error
if someone tries to.
2012-02-29 10:10:51 +00:00
Ross Lawley
e9b8093dac Updated changelog 2012-02-29 10:09:16 +00:00
Ross Lawley
6a229cfbc5 Updates can now take raw queries 2012-02-24 15:48:32 +00:00
Ross Lawley
3300f409ba Update Authors and changelist 2012-02-24 10:34:51 +00:00
Ross Lawley
4466005363 Merge branch 'dev' of github.com:hmarr/mongoengine into dev 2012-02-24 10:29:06 +00:00
Ross Lawley
296ef5bddf Merge branch 'geo2d' into dev 2012-02-24 10:28:52 +00:00
Ross Lawley
1f2a432e82 Merge pull request #439 from kajic/dev
register_connection bugfix
2012-02-24 02:12:55 -08:00
Robert Kajic
855933ab2a uri_dict will have the 'database' key even if the database wasn't present in the uri. We must check it's value as well. 2012-02-24 00:07:57 +01:00
Ross Lawley
ece8d25187 Added replicaset connection support
Provide replicaSet=NAME in your connection.
fixes #423
2012-02-17 17:09:48 +00:00
Ross Lawley
589a720162 Updated changelog 2012-02-17 17:09:14 +00:00
Ross Lawley
a59b518cf2 Updates to imports for future pymongo 2.2 2012-02-17 11:18:25 +00:00
Ross Lawley
a15352a4f8 Merge branch 'dev' of github.com:hmarr/mongoengine into dev 2012-02-17 09:41:41 +00:00
Ross Lawley
df65f3fc3f base no longer expects meta to have allow_inheritance
Closes #430 #431
2012-02-17 09:41:01 +00:00
Robert Kajic
734986c1b5 Documentation on geospatial indexes and how to create them explicitly 2012-02-16 10:41:47 +01:00
Анхбаяр Лхагвадорж
4a9ed5f2f2 Fix derefcence failed some case. 2012-02-02 18:33:12 +08:00
Ross Lawley
088f229865 Merge pull request #421 from linuxnow/integration/dev
Integration/dev
2012-02-01 04:05:06 -08:00
Pau Aliagas
cb2cb851e2 Fix formatting typo in changelog entry 2012-02-01 12:29:05 +01:00
Robert Kajic
d3962c4f7d Added support for creating a geo2d index by prefixing the field name with a * 2012-01-31 22:31:24 +01:00
Ross Lawley
0301135f96 Added uri style connection handling 2012-01-30 10:24:45 +00:00
Ross Lawley
f59aa922ea Added more .scalar tests 2012-01-27 12:20:47 +00:00
Ross Lawley
f60a49d6f6 Added .scalar to Queryset
More efficient than the previous .values_list implementation Ref #393
Reverted some of the .values_list code thats no longer needed.

Closes #415
2012-01-27 11:45:12 +00:00
Alice Bevan-McGregor
9a190eb00d Added ability to have scalar return values instead of partially-populated Document instances. 2012-01-27 11:45:11 +00:00
Ross Lawley
6bad4bd415 Merge pull request #412 from faulkner/dict-update
Add dict.update() support to BaseDict.

Thx Faulkner :)
2012-01-16 03:32:45 -08:00
Chris Faulkner
50d9b0b796 Add dict.update() support to BaseDict. 2012-01-16 19:13:03 +08:00
Ross Lawley
12f884e3ac Fixes typo in documents - thanks Shalabh
Closes #406
2012-01-16 09:11:00 +00:00
Ross Lawley
02b1aa7355 Added Ashwin Purohit to authors
Refs #410
2012-01-16 09:06:18 +00:00
Ross Lawley
90bfa608dd Merge branch 'master' into dev
Conflicts:
	docs/guide/signals.rst
	python-mongoengine.spec
2012-01-16 09:05:38 +00:00
Ross Lawley
13f38b1c1d Merge pull request #410 from purohit/master
typo fix
2012-01-16 01:04:29 -08:00
Ross Lawley
1afe7240f4 Fixed pagination limit / skip bug
fixes #398
2012-01-16 09:03:12 +00:00
Ashwin Purohit
7a41155178 typo in signals guide 2012-01-14 23:21:43 -08:00
Ross Lawley
39a20ea471 Merge pull request #403 from wpjunior/patch-5
Fixes for None values in QuerySet.values_list
2012-01-06 01:17:27 -08:00
Wilson Júnior
d8855a4a0f fixes for None values in QuerySet.values_list 2012-01-05 13:35:32 -02:00
Ross Lawley
de8da78042 Update docs/changelog.rst 2012-01-03 20:42:24 +00:00
Ross Lawley
318b42dff2 Merge pull request #393 from wpjunior/queryset_select
select method in Queryset
2012-01-03 12:40:49 -08:00
Ross Lawley
0018674b62 Update docs/changelog.rst 2012-01-03 20:37:33 +00:00
Ross Lawley
82913e8d69 Merge pull request #396 from wpjunior/mixin_inheritance
Mixin inheritance
2012-01-03 12:36:45 -08:00
Wilson Júnior
0d867a108d mixin inheritance 2011-12-19 11:31:42 -02:00
Wilson Júnior
5ee4b4a5ac added count/len for ListResult 2011-12-16 11:49:20 -02:00
Wilson Júnior
62219d9648 changed name 2011-12-16 11:07:38 -02:00
Ross Lawley
6d9bfff19c Started work on performance
Added an initial benchmark.py
Much more performant than 0.5.2 but still work todo.
2011-12-16 12:41:47 +00:00
Ross Lawley
7614b92197 Fixes super in BaseDict
Closes #395
2011-12-15 09:16:35 +00:00
Wilson Júnior
7c1afd0031 tests for db_field 2011-12-13 11:56:35 -02:00
Wilson Júnior
ca7b2371fb added support for dereferences 2011-12-13 11:54:19 -02:00
Wilson Júnior
ed5fba6b0f support for embedded fields 2011-12-13 07:46:49 -02:00
Ross Lawley
2b3b3bf652 Prelim PyPy support
Refs: #392
2011-12-12 16:26:10 +00:00
Ross Lawley
11daf706df Added Sharding support
Added shard_key meta, so save() and update() passes shard keys
to the pymongo query.  Also made shard key fields immutable.

Closes #388 and #389
2011-12-12 16:13:19 +00:00
Wilson Júnior
4a269eb2c4 added .select method 2011-12-12 13:39:37 -02:00
Ross Lawley
9b3899476c Allow arbitary kwargs to be passed to pymongo
Fix pymongo 2.1+ check
Closes #390 closes #378
2011-12-12 09:14:50 +00:00
Ross Lawley
febb3d7e3d Updated connection - so handles < pymongo 2.1
Updated docs

Refs #378
2011-12-09 08:39:50 -08:00
Ross Lawley
83e3c5c7d8 Updated connection for pymongo 2.1 support
closes #378
2011-12-09 08:26:39 -08:00
Ross Lawley
3c271845c9 Merge pull request #386 from wpjunior/patch-4
Custom db_alias support for MongoEngine DjangoSession
2011-12-07 07:56:43 -08:00
Wilson Júnior
56c4292164 added custom db_alias support for MongoEngine DjangoSession 2011-12-07 13:36:35 -02:00
Ross Lawley
2531ade3bb Added David to Authors
refs #380
2011-12-07 03:01:56 -08:00
Ross Lawley
3e2f035400 Abstract documents can now declare indexes
fixes #380
2011-12-07 02:15:50 -08:00
Ross Lawley
e7bcb5e366 Updated docs and Authors list re: db_alias 2011-12-07 01:46:11 -08:00
Ross Lawley
112e921ce2 Syntax cleaning 2011-12-07 01:34:36 -08:00
Ross Lawley
216f15602b Fixing test 2011-12-07 01:17:35 -08:00
Ross Lawley
fbe1901e65 Added some tests #384 2011-12-07 01:16:45 -08:00
Ross Lawley
8d2bc444bb db_alias using in model, queryset, reference fields, derefrence. 2011-12-07 01:16:36 -08:00
Ross Lawley
cf4a45da11 Dynamic Documents now support string query lookups 2011-12-06 06:38:25 -08:00
Ross Lawley
be78209f94 Added test showing you can add index for dynamic docs 2011-12-05 04:44:40 -08:00
Ross Lawley
45b5bf73fe Added Jan to the contributors list
Refs #135 #381
2011-12-05 04:17:51 -08:00
Ross Lawley
84f9e44b6c Fixed GridFS documents can now be pickled
Refs #135 #381
2011-12-05 04:16:57 -08:00
Ross Lawley
700bc1b4bb Multiple fields with the same db_field now raises Exception
Closes #329
2011-12-02 08:44:15 -08:00
Ross Lawley
beef2ede25 Merge pull request #313 from linuxnow/integration/master
add .gitignore and spec file to master
2011-12-02 08:15:24 -08:00
Ross Lawley
9bfc838029 Updated Docs and bumped version
Hopefully nearer 0.6
closes #368
2011-12-02 08:14:25 -08:00
Ross Lawley
e9d7353294 Updated with_id to raise Error if used with a filter.
Closes #365
2011-12-02 07:11:06 -08:00
Ross Lawley
a6948771d8 Added ReferencField handling with .distinct()
Closes #356
2011-12-02 06:47:58 -08:00
Ross Lawley
403977cd49 Added test for saving references unnecessarily.
Refs #359
2011-12-02 06:40:57 -08:00
Ross Lawley
153538cef9 Added test for saving false on dynamic documents
Refs #282
Closes #311
2011-12-02 06:34:51 -08:00
Ross Lawley
9f1196e982 Merge branch 'dev' of github.com:hmarr/mongoengine into dev 2011-12-02 06:04:12 -08:00
Ross Lawley
6419a8d09a Fixed False BooleanField marked as unset by _delta()
Closes #282
2011-12-02 06:03:15 -08:00
Ross Lawley
769cee3d64 Merge pull request #379 from wpjunior/patch-3
Small improvements for item_frequencies
2011-12-02 05:19:49 -08:00
Wilson Júnior
fc460b775e Small improvements for item_frequencies 2011-12-02 09:46:51 -02:00
Ross Lawley
ba59e498de Custom __instancecheck__ no longer needed
Would be needed if calling a classmethod in __new__
but as we dont support reverse_delete_rules on embedded
documents there is no longer the need for it.

Refs #227
2011-12-02 02:52:06 -08:00
Ross Lawley
939bd2bb1f Updated Documentation 2011-12-02 02:49:16 -08:00
Ross Lawley
e231f71b4a EmbeddedDocuments dont support Reverse Delete Rules
Now throws an InvalidDocumentError

Refs #227
2011-12-02 02:46:55 -08:00
Ross Lawley
d06c5f036b Cleaned up _transform_query
Refs #354 #376
2011-12-02 00:37:32 -08:00
Ross Lawley
071562d755 Fixed issue with dynamic documents deltas
Closes #377
2011-12-02 00:11:25 -08:00
Ross Lawley
391f659af1 Updated docs re: reverse delete rules
refs #254
2011-12-01 08:16:13 -08:00
Ross Lawley
8a44232bfc Added Reverse Delete Rule support to ListFields
DictFields and MapFields aren't supported and raise an
InvalidDocument Error

Closes #254
2011-12-01 07:57:24 -08:00
Ross Lawley
9188f9bf62 Added custom cascade kwarg options
Allows the user to overwrite any default kwargs

Closes #295
2011-11-30 08:54:33 -08:00
Ross Lawley
0187a0e113 Handle updating and getting None values
Fixes updating a field to None, so it works in a similar
fashion as unsetting it via save()

Updated to handle null data from the database

Fixes #362
2011-11-30 08:12:44 -08:00
Ross Lawley
beacfae400 Removed use of _get_subclasses favouring get_document
_get_subclasses not actually required and causes issues
where Base Classes aren't imported but dont actually
need to be.

Fixes #271
2011-11-30 07:55:33 -08:00
Ross Lawley
fdc385ea33 Allow dynamic data to be deleted
Fixes #374
2011-11-30 03:06:46 -08:00
Ross Lawley
8b97808931 Added docs for elemMatch 2011-11-30 02:30:29 -08:00
Ross Lawley
179c4a10c8 Merge branch 'elemmatch' of https://github.com/wpjunior/mongoengine into elemmatch 2011-11-30 02:26:54 -08:00
Ross Lawley
6cef571bfb Added Reverse option to SortedLists
Thanks Stephen Young for the patch
closes #364
2011-11-30 02:15:47 -08:00
Ross Lawley
fbe8b28b2e Merge branch 'bug/318' into dev
Conflicts:
	AUTHORS
2011-11-29 03:46:11 -08:00
Ross Lawley
a8d91a56bf Fixes circular list references
The depth deduciton for _fields was over zealous
now max_depth is honoured/

Fixes #373
2011-11-29 03:43:49 -08:00
Ross Lawley
8d7291506e Updated Authors 2011-11-29 01:44:23 -08:00
Wilson Júnior
d9005ac2fc added elemMatch support 2011-11-28 14:45:57 -02:00
Ross Lawley
c775c0a80c Circular references with EmbeddedDocumentField fix
Fixes #345
2011-11-28 08:23:28 -08:00
Ross Lawley
700e2cd93d Updated changelog 2011-11-28 08:16:36 -08:00
Ross Lawley
083f00be84 Fixes passed in Constructor data for complexfields
Fixes #355
2011-11-28 08:09:17 -08:00
Ross Lawley
d00859ecfd Updated changelog - DictField fix 2011-11-28 07:07:26 -08:00
Ross Lawley
4e73566c11 Updated changelog - optional cascasde saves 2011-11-28 07:06:56 -08:00
Ross Lawley
208a467b24 Added dictfield check for Int keys
Fixes #371
2011-11-28 07:05:54 -08:00
Ross Lawley
e1bb453f32 Configurable cascading saves
Updated cascading save logic - can now add meta or pass
cascade to save().  Also Cleaned up reset changed fields logic
as well, so less looping

Refs: #370 #349
2011-11-28 06:51:17 -08:00
Ross Lawley
4607b08be5 Making BaseDict / List more robust 2011-11-28 06:35:19 -08:00
Ross Lawley
aa5c776f3d Copy and paste == brainless 2011-11-28 06:21:45 -08:00
Ross Lawley
0075c0a1e8 Gracefully handle when self.observer is absent
After pickles / deepcopying etc..
2011-11-28 05:54:03 -08:00
Ross Lawley
83fff80b0f Cleaned up dereferencing
Dereferencing now respects max_depth, so should be more performant.
Reload is chainable and can be passed a max_depth for dereferencing
Added an Observer for ComplexBaseFields.

Refs #324 #323 #289
Closes #320
2011-11-25 08:36:47 -08:00
Ross Lawley
5e553ffaf7 Added reconnect back into the syntax
forces a disconnect.
2011-11-24 00:59:43 -08:00
Ross Lawley
6d185b7f7a Merge pull request #366 from wpjunior/filemultidb
db_alias support for FileFields
2011-11-22 12:17:35 -08:00
Ross Lawley
e80144e9f2 Added multidb support
No change required to upgrade to multiple databases. Aliases are used
to describe the database and these can be manually registered or fall
through to a default alias using connect.

Made get_connection and get_db first class members of the connection class.
Old style _get_connection and _get_db still supported.

Refs: #84 #87 #93 #215
2011-11-22 08:01:14 -08:00
Wilson Júnior
fa4b820931 added support for db_alias in FileFields 2011-11-22 13:40:01 -02:00
Ross Lawley
63c5a4dd65 Fixes saving document schemas that have changed
Ensures that form defaults which are documents are
automatically marked as changed, so schemas can evolve
without migration issues.

[#360]
2011-11-22 07:34:08 -08:00
Adam Parrish
34646a414c Fixes bug using positional operator to update embedded documents.
append_field wasn't getting reset to True in the loop, so fields wouldn't
get appended to clean_fields after str was encountered

[#354]
2011-11-11 01:10:00 -08:00
Ross Lawley
5aeee9deb2 Added PIL to spec file
[#314]
2011-11-04 01:55:46 -07:00
Ross Lawley
4c1509a62a Updated docs re choices
[#284] [#314]
2011-11-04 01:54:30 -07:00
Ross Lawley
bfdaae944d Merge branch 'dev' of github.com:hmarr/mongoengine into dev 2011-11-04 01:46:45 -07:00
Ross Lawley
4e44198bbd Clean up of choices code and added tests
[#284] [#314]
2011-11-04 01:45:44 -07:00
Ross Lawley
a4e8177b76 Merge branch 'master' of https://github.com/KarimAllah/mongoengine into choices
Conflicts:
	mongoengine/base.py
2011-11-04 01:34:58 -07:00
Ross Lawley
81bf5cb78b Merge pull request #348 from wpjunior/patch-2
fixes for __repr__ documents
2011-11-02 04:54:08 -07:00
Wilson Júnior
a9fc476fb8 fixed errors in repr if unicode string is found 2011-11-02 09:38:26 -02:00
Ross Lawley
26f0c06624 Merge branch 'get_or_create_optizations' of https://github.com/wpjunior/mongoengine into get_or_create_optimizations 2011-11-01 02:21:37 -07:00
Ross Lawley
59bd72a888 Added tests for __repr__ fix 2011-11-01 02:15:31 -07:00
Ross Lawley
7d808b483e Merge branch 'master' of https://github.com/grubberr/mongoengine into slice 2011-11-01 02:14:44 -07:00
Ross Lawley
3ee60affa9 Renamed schema for errors
Now is `to_dict()` as is more explicit
[refs #344 #328]
2011-11-01 01:51:58 -07:00
Ross Lawley
558b8123b5 Merge branch 'validation-schema' of https://github.com/n1k0/mongoengine into validation-schema
Conflicts:
	mongoengine/base.py
	mongoengine/fields.py
2011-11-01 01:45:32 -07:00
Ross Lawley
ecdf2ae5c7 Updated docs and Authors 2011-11-01 01:20:47 -07:00
Ross Lawley
aa9ed614ad Merge branch 'master' of https://github.com/mjtamlyn/mongoengine into transform_update 2011-11-01 01:19:30 -07:00
Sergey Chvalyuk
1acdb880fc fixing #336 2011-10-28 00:23:13 +03:00
Ross Lawley
7cd22aaf83 Removed debug print 2011-10-27 01:18:32 -07:00
Ross Lawley
5eb63cfa30 Updated changelog 2011-10-27 01:14:51 -07:00
Ross Lawley
5dc998ed52 Merge branch 'dev-bulkinsertsignal' of https://github.com/colinhowe/mongoengine into bulk 2011-10-27 01:13:59 -07:00
Wilson Júnior
8074094568 optimizations for get_or_create 2011-10-27 06:02:59 -02:00
Ross Lawley
56d1139d71 Added ImageField Support
Thanks to @wpjunior for the patch
Closes [#298]
2011-10-27 00:58:47 -07:00
Ross Lawley
165cdc8840 Updated changelog 2011-10-27 00:35:34 -07:00
Ross Lawley
c42aef74de Merge pull request #326 from wpjunior/fixes-325
Fixes for #325 issue - Thanks @wpjunior
2011-10-27 00:33:36 -07:00
Ross Lawley
634e1f661f Updated docs / upgrade notes 2011-10-27 00:31:48 -07:00
Nicolas Perriault
a1db437c42 got rid of assert for validation; ValidationError now extends AssertionError for BC purpose 2011-10-25 22:38:43 +02:00
Nicolas Perriault
b8e2bdc99f simpler raising of ValidatioError 2011-10-25 20:04:39 +02:00
Ross Lawley
52d4ea7d78 Merge pull request #316 from wpjunior/fixes-315
Fixes for #315 issue
2011-10-25 03:47:19 -07:00
Nicolas Perriault
7db5335420 fixed URLField.validate() wasn't using BaseField.error() to raise a ValidationError 2011-10-25 10:53:58 +02:00
Nicolas Perriault
62480fe940 added a ValidatorError.schema properties which contains a dict representation of the whole validation error schema 2011-10-24 17:15:34 +02:00
Nicolas Perriault
3d7b30da77 first version of BC validation schema 2011-10-24 01:02:31 +02:00
Wilson Júnior
8e87648d53 added tests for get_or_create 2011-10-19 09:44:49 -02:00
Wilson Júnior
f842c90007 Merge branches 'master' and 'fixes-325' into fixes-325 2011-10-19 06:57:39 -02:00
Wilson Júnior
7f2b686ab5 added drop_collection for test 2011-10-19 06:55:05 -02:00
Wilson Júnior
b09c52fc7e fixes for #325 issue 2011-10-19 06:30:41 -02:00
Pau Aliagas
202d6e414f Update spec file 2011-10-13 20:29:36 +02:00
Wilson Júnior
3d817f145c fixes for #315 issue 2011-10-12 18:28:40 -03:00
Pau Aliagas
181e191fee Add some more files to ignore in .gitignore 2011-10-12 11:00:40 +02:00
Pau Aliagas
79ecf027dd Add dependencies to spec file
Add spec file for rpm-based systems
2011-10-12 11:00:02 +02:00
Ross Lawley
76d771d20f Merge branch 'master' into dev
Conflicts:
	AUTHORS
	docs/changelog.rst
	mongoengine/base.py
2011-10-12 00:35:01 -07:00
Ross Lawley
24b8650026 Merge remote branch 'origin/dev' into dev 2011-10-12 00:18:37 -07:00
Ross Lawley
269e6e29d6 Updated Authors 2011-10-12 00:18:12 -07:00
Ross Lawley
c4b0002ddb Fixed typo 2011-10-11 14:59:58 +02:00
Ross Lawley
53598781b8 Facepalm - mutable default argument in method.. 2011-10-11 12:44:41 +02:00
Ross Lawley
0624cdd6e4 Fixes collection creation post drop_collection
Thanks to Julien Rebetez for the original patch
closes [#285]
2011-10-11 02:26:33 -07:00
Ross Lawley
5fb9d61d28 Merge remote branch 'origin/dev' into dev 2011-10-11 00:15:23 -07:00
Ross Lawley
7b1860d17b Fixes tree based circular references
Thanks to jpfarias for the fix.
Also normalised the other circular checks.
2011-10-10 09:16:32 -07:00
Ross Lawley
8797565606 UPdated changelog 2011-10-08 08:13:53 -07:00
Ross Lawley
3d97c41fe9 Merge pull request #305 from linuxnow/integration/error_msgs
Add field name to validation exception messages
2011-10-08 08:13:40 -07:00
Ross Lawley
5edfeb2e29 Merge pull request #303 from linuxnow/integration/uuid
Add UUIDField
2011-10-08 08:08:45 -07:00
Ross Lawley
268908b3b2 Improvements to .get() efficiency
Closes #307 and #290
2011-10-08 08:06:23 -07:00
Ross Lawley
fb70b47acb Merge branch 'master' into dev
Conflicts:
	docs/changelog.rst
2011-10-08 07:33:52 -07:00
Marc Tamlyn
219d316b49 Fix iteration on querysets.
If iteration of a queryset was interrupted (by a break, or a caught
error), the next iterator would start from the second element as the
cursor had already moved to the first. This is fixed by adding a rewind
into the __iter__ method.
2011-10-05 13:26:57 +01:00
Pau Aliagas
3aa2233b5d Add field name to exception messages 2011-10-04 18:35:32 +02:00
Pau Aliagas
d59862ae6e Merge remote-tracking branch 'upstream/dev' into integration/uuid 2011-10-04 16:02:58 +02:00
Pau Aliagas
0a03f9a31a Add unit tests for UUIDField 2011-10-04 15:59:56 +02:00
Ross Lawley
dca135190a Fixed changelog 2011-10-04 04:28:30 -07:00
Ross Lawley
aedcf3dc81 Merge branch 'listfield' into dev
Conflicts:
	docs/changelog.rst
2011-10-04 04:28:04 -07:00
Ross Lawley
6961a9494f Updates to ComplexFields
Required now means they cannot be empty [#302]
2011-10-04 04:26:56 -07:00
Ross Lawley
6d70ef1a08 Updated changelog [#304] 2011-10-04 03:18:39 -07:00
Ross Lawley
e1fc15875d Merge remote branch 'origin/dev' into dev 2011-10-04 03:10:10 -07:00
Ross Lawley
94ae1388b1 Updated .gitignore 2011-10-04 02:59:00 -07:00
Ross Lawley
17728d4e74 Added tests for empty lists 2011-10-04 02:57:50 -07:00
Ross Lawley
417aa743ca Merge pull request #304 from linuxnow/integration/specfile
Add spec file for rpm-based distributions
2011-10-04 02:27:05 -07:00
Pau Aliagas
2f26f7a827 Add dependencies to spec file
Add spec file for rpm-based systems
2011-10-04 10:33:26 +02:00
Pau Aliagas
09f9c59b3d Add some more files to ignore in .gitignore 2011-10-04 10:24:44 +02:00
Pau Aliagas
bec6805296 Add UUIDField 2011-10-04 10:20:41 +02:00
Pau Aliagas
d99c7c20cc Don't allow empty lists when they are required
When using ListField, an empty list is added as the default value.
But when you mark this field as required, you expect it not to be empty,
so this patch makes sure that this is duly checked.
2011-10-04 10:12:21 +02:00
Ross Lawley
60b6ad3fcf Added test for listfield fix
Added Pau Aliagas to authors
[closes #299]
2011-10-03 05:30:23 -07:00
Pau Aliagas
9b4d0f6450 Make sure that ListFields are not strings 2011-10-03 05:29:24 -07:00
Ross Lawley
1a2c74391c Added grubberr to AUTHORS
[Refs #296]
2011-10-03 05:18:25 -07:00
Sergey Chvalyuk
08288e591c small optimizing fix 2011-10-03 05:16:17 -07:00
Ross Lawley
823cf421fa Fixes to circular references.
Removes infinite looping
refs #294
2011-09-29 14:07:30 -07:00
Ross Lawley
3799f27734 Merge branch 'master' of https://github.com/kuno/mongoengine into kuno 2011-09-28 01:51:06 -07:00
Ross Lawley
a7edd8602c Added support for expando style dynamic documents.
Added two new classes: DynamicDocument and DynamicEmbeddedDocument
for handling expando style setting of attributes.

[closes #112]
2011-09-28 01:39:39 -07:00
Karim Allah
c081aca794 Fixing dereferencing when the dereferenced-document wasn't found. 2011-09-25 18:58:40 +02:00
kuno
2ca6648227 fixed indentation error in signal docs 2011-09-20 21:30:20 +08:00
Ross Lawley
1af54f93f5 Merge pull request #287 from wpjunior/fixitem_frequencies
Fix item_frequencies
2011-09-20 06:04:30 -07:00
Wilson Júnior
a9cacd2e06 fixed embedded null item_frequencies 2011-09-20 08:56:30 -03:00
Ross Lawley
f7fbb3d2f6 Relaxed field name checking on embedded documents 2011-09-20 03:45:11 -07:00
Karim Allah
adb7bbeea0 Being compatible with non-django style chioces 2011-09-18 19:48:33 +02:00
Colin Howe
b91db87ae0 Pre and post bulk-insert signals 2011-09-09 19:17:40 +01:00
40 changed files with 4255 additions and 723 deletions

5
.gitignore vendored
View File

@@ -1,7 +1,8 @@
.* .*
!.gitignore !.gitignore
*.pyc *~
.*.swp *.py[co]
.*.sw[po]
*.egg *.egg
docs/.build docs/.build
docs/_build docs/_build

35
AUTHORS
View File

@@ -1,11 +1,11 @@
The PRIMARY AUTHORS are (and/or have been): The PRIMARY AUTHORS are (and/or have been):
Ross Lawley <ross.lawley@gmail.com>
Harry Marr <harry@hmarr.com> Harry Marr <harry@hmarr.com>
Matt Dennewitz <mattdennewitz@gmail.com> Matt Dennewitz <mattdennewitz@gmail.com>
Deepak Thukral <iapain@yahoo.com> Deepak Thukral <iapain@yahoo.com>
Florian Schlachter <flori@n-schlachter.de> Florian Schlachter <flori@n-schlachter.de>
Steve Challis <steve@stevechallis.com> Steve Challis <steve@stevechallis.com>
Ross Lawley <ross.lawley@gmail.com>
Wilson Júnior <wilsonpjunior@gmail.com> Wilson Júnior <wilsonpjunior@gmail.com>
Dan Crosta https://github.com/dcrosta Dan Crosta https://github.com/dcrosta
@@ -67,5 +67,36 @@ that much better:
* Gareth Lloyd * Gareth Lloyd
* Albert Choi * Albert Choi
* John Arnfield * John Arnfield
* grubberr
* Paul Aliagas
* Paul Cunnane
* Julien Rebetez * Julien Rebetez
* Marc Tamlyn
* Karim Allah
* Adam Parrish
* jpfarias
* jonrscott
* Alice Zoë Bevan-McGregor
* Stephen Young
* tkloc
* aid
* yamaneko1212
* dave mankoff
* Alexander G. Morano
* jwilder
* Joe Shaw
* Adam Flynn
* Ankhbayar
* Jan Schrewe
* David Koblas
* Crittercism
* Alvin Liang
* andrewmlevy
* Chris Faulkner
* Ashwin Purohit
* Shalabh Aggarwal
* Chris Williams
* Robert Kajic
* Jacob Peddicord
* Nils Hasenbanck
* mostlystatic

View File

@@ -3,20 +3,21 @@ MongoEngine
=========== ===========
:Info: MongoEngine is an ORM-like layer on top of PyMongo. :Info: MongoEngine is an ORM-like layer on top of PyMongo.
:Author: Harry Marr (http://github.com/hmarr) :Author: Harry Marr (http://github.com/hmarr)
:Maintainer: Ross Lawley (http://github.com/rozza)
About About
===== =====
MongoEngine is a Python Object-Document Mapper for working with MongoDB. MongoEngine is a Python Object-Document Mapper for working with MongoDB.
Documentation available at http://hmarr.com/mongoengine/ - there is currently Documentation available at http://mongoengine-odm.rtfd.org - there is currently
a `tutorial <http://hmarr.com/mongoengine/tutorial.html>`_, a `user guide a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide
<http://hmarr.com/mongoengine/userguide.html>`_ and an `API reference <http://readthedocs.org/docs/mongoengine-odm/en/latest/userguide.html>`_ and an `API reference
<http://hmarr.com/mongoengine/apireference.html>`_. <http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_.
Installation Installation
============ ============
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
you can use ``easy_install -U mongoengine``. Otherwise, you can download the you can use ``easy_install -U mongoengine``. Otherwise, you can download the
source from `GitHub <http://github.com/hmarr/mongoengine>`_ and run ``python source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
setup.py install``. setup.py install``.
Dependencies Dependencies
@@ -92,6 +93,6 @@ Community
Contributing Contributing
============ ============
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ - to The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
contribute to the project, fork it on GitHub and send a pull request, all contribute to the project, fork it on GitHub and send a pull request, all
contributions and suggestions are welcome! contributions and suggestions are welcome!

182
benchmark.py Normal file
View File

@@ -0,0 +1,182 @@
#!/usr/bin/env python
import timeit
def cprofile_main():
from pymongo import Connection
connection = Connection()
connection.drop_database('timeit_test')
connection.disconnect()
from mongoengine import Document, DictField, connect
connect("timeit_test")
class Noddy(Document):
fields = DictField()
for i in xrange(1):
noddy = Noddy()
for j in range(20):
noddy.fields["key" + str(j)] = "value " + str(j)
noddy.save()
def main():
"""
0.4 Performance Figures ...
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo
1.1141769886
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine
2.37724113464
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
1.92479610443
0.5.X
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo
1.10552310944
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine
16.5169169903
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
14.9446101189
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
14.912801981
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, force=True
14.9617750645
Performance
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo
1.10072994232
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine
5.27341103554
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
4.49365401268
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
4.43459296227
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, force=True
4.40114378929
"""
setup = """
from pymongo import Connection
connection = Connection()
connection.drop_database('timeit_test')
"""
stmt = """
from pymongo import Connection
connection = Connection()
db = connection.timeit_test
noddy = db.noddy
for i in xrange(10000):
example = {'fields': {}}
for j in range(20):
example['fields']["key"+str(j)] = "value "+str(j)
noddy.insert(example)
myNoddys = noddy.find()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - Pymongo"""
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
setup = """
from pymongo import Connection
connection = Connection()
connection.drop_database('timeit_test')
connection.disconnect()
from mongoengine import Document, DictField, connect
connect("timeit_test")
class Noddy(Document):
fields = DictField()
"""
stmt = """
for i in xrange(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save()
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine"""
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
stmt = """
for i in xrange(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save(safe=False, validate=False)
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False"""
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
stmt = """
for i in xrange(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save(safe=False, validate=False, cascade=False)
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False"""
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
stmt = """
for i in xrange(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save(force_insert=True, safe=False, validate=False, cascade=False)
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, force=True"""
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
if __name__ == "__main__":
main()

View File

@@ -6,6 +6,7 @@ Connecting
========== ==========
.. autofunction:: mongoengine.connect .. autofunction:: mongoengine.connect
.. autofunction:: mongoengine.register_connection
Documents Documents
========= =========
@@ -21,6 +22,12 @@ Documents
.. autoclass:: mongoengine.EmbeddedDocument .. autoclass:: mongoengine.EmbeddedDocument
:members: :members:
.. autoclass:: mongoengine.DynamicDocument
:members:
.. autoclass:: mongoengine.DynamicEmbeddedDocument
:members:
.. autoclass:: mongoengine.document.MapReduceDocument .. autoclass:: mongoengine.document.MapReduceDocument
:members: :members:

View File

@@ -2,15 +2,100 @@
Changelog Changelog
========= =========
Changes in 0.6.4
================
- Refactored connection / fixed replicasetconnection
- Bug fix for unknown connection alias error message
- Sessions support Django 1.3 and Django 1.4
- Minor fix for ReferenceField
Changes in 0.6.3
================
- Updated sessions for Django 1.4
- Bug fix for updates where listfields contain embedded documents
- Bug fix for collection naming and mixins
Changes in 0.6.2
================
- Updated documentation for ReplicaSet connections
- Hack round _types issue with SERVER-5247 - querying other arrays may also cause problems.
Changes in 0.6.1
================
- Fix for replicaSet connections
Changes in 0.6
================
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
- Added support for covered indexes when inheritance is off
- No longer always upsert on save for items with a '_id'
- Error raised if update doesn't have an operation
- DeReferencing is now thread safe
- Errors raised if trying to perform a join in a query
- Updates can now take __raw__ queries
- Added custom 2D index declarations
- Added replicaSet connection support
- Updated deprecated imports from pymongo (safe for pymongo 2.2)
- Added uri support for connections
- Added scalar for efficiently returning partial data values (aliased to values_list)
- Fixed limit skip bug
- Improved Inheritance / Mixin
- Added sharding support
- Added pymongo 2.1 support
- Fixed Abstract documents can now declare indexes
- Added db_alias support to individual documents
- Fixed GridFS documents can now be pickled
- Added Now raises an InvalidDocumentError when declaring multiple fields with the same db_field
- Added InvalidQueryError when calling with_id with a filter
- Added support for DBRefs in distinct()
- Fixed issue saving False booleans
- Fixed issue with dynamic documents deltas
- Added Reverse Delete Rule support to ListFields - MapFields aren't supported
- Added customisable cascade kwarg options
- Fixed Handle None values for non-required fields
- Removed Document._get_subclasses() - no longer required
- Fixed bug requiring subclasses when not actually needed
- Fixed deletion of dynamic data
- Added support for the $elementMatch operator
- Added reverse option to SortedListFields
- Fixed dereferencing - multi directional list dereferencing
- Fixed issue creating indexes with recursive embedded documents
- Fixed recursive lookup in _unique_with_indexes
- Fixed passing ComplexField defaults to constructor for ReferenceFields
- Fixed validation of DictField Int keys
- Added optional cascade saving
- Fixed dereferencing - max_depth now taken into account
- Fixed document mutation saving issue
- Fixed positional operator when replacing embedded documents
- Added Non-Django Style choices back (you can have either)
- Fixed __repr__ of a sliced queryset
- Added recursive validation error of documents / complex fields
- Fixed breaking during queryset iteration
- Added pre and post bulk-insert signals
- Added ImageField - requires PIL
- Fixed Reference Fields can be None in get_or_create / queries
- Fixed accessing pk on an embedded document
- Fixed calling a queryset after drop_collection now recreates the collection
- Add field name to validation exception messages
- Added UUID field
- Improved efficiency of .get()
- Updated ComplexFields so if required they won't accept empty lists / dicts
- Added spec file for rpm-based distributions
- Fixed ListField so it doesnt accept strings
- Added DynamicDocument and EmbeddedDynamicDocument classes for expando schemas
Changes in v0.5.2 Changes in v0.5.2
================= =================
- A Robust Circular reference bugfix - A Robust Circular reference bugfix
Changes in v0.5.1 Changes in v0.5.1
================= =================
- Circular reference bugfix - Fixed simple circular reference bug
Changes in v0.5 Changes in v0.5
=============== ===============

View File

@@ -38,7 +38,7 @@ master_doc = 'index'
# General information about the project. # General information about the project.
project = u'MongoEngine' project = u'MongoEngine'
copyright = u'2009-2011, Harry Marr' copyright = u'2009-2012, MongoEngine Authors'
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
@@ -121,7 +121,7 @@ html_theme_path = ['_themes']
# Add any paths that contain custom static files (such as style sheets) here, # Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files, # relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css". # so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static'] #html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format. # using the given strftime format.

View File

@@ -2,6 +2,8 @@
Using MongoEngine with Django Using MongoEngine with Django
============================= =============================
.. note :: Updated to support Django 1.4
Connecting Connecting
========== ==========
In your **settings.py** file, ignore the standard database settings (unless you In your **settings.py** file, ignore the standard database settings (unless you

View File

@@ -3,6 +3,7 @@
===================== =====================
Connecting to MongoDB Connecting to MongoDB
===================== =====================
To connect to a running instance of :program:`mongod`, use the To connect to a running instance of :program:`mongod`, use the
:func:`~mongoengine.connect` function. The first argument is the name of the :func:`~mongoengine.connect` function. The first argument is the name of the
database to connect to. If the database does not exist, it will be created. If database to connect to. If the database does not exist, it will be created. If
@@ -18,3 +19,47 @@ provide :attr:`host` and :attr:`port` arguments to
:func:`~mongoengine.connect`:: :func:`~mongoengine.connect`::
connect('project1', host='192.168.1.35', port=12345) connect('project1', host='192.168.1.35', port=12345)
Uri style connections are also supported as long as you include the database
name - just supply the uri as the :attr:`host` to
:func:`~mongoengine.connect`::
connect('project1', host='mongodb://localhost/database_name')
ReplicaSets
===========
MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection`
to use them please use a URI style connection and provide the `replicaSet` name in the
connection kwargs.
Multiple Databases
==================
Multiple database support was added in MongoEngine 0.6. To use multiple
databases you can use :func:`~mongoengine.connect` and provide an `alias` name
for the connection - if no `alias` is provided then "default" is used.
In the background this uses :func:`~mongoengine.register_connection` to
store the data and you can register all aliases up front if required.
Individual documents can also support multiple databases by providing a
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` objects
to point across databases and collections. Below is an example schema, using
3 different databases to store data::
class User(Document):
name = StringField()
meta = {"db_alias": "user-db"}
class Book(Document):
name = StringField()
meta = {"db_alias": "book-db"}
class AuthorBooks(Document):
author = ReferenceField(User)
book = ReferenceField(Book)
meta = {"db_alias": "users-books-db"}

View File

@@ -24,6 +24,34 @@ objects** as class attributes to the document class::
title = StringField(max_length=200, required=True) title = StringField(max_length=200, required=True)
date_modified = DateTimeField(default=datetime.datetime.now) date_modified = DateTimeField(default=datetime.datetime.now)
Dynamic document schemas
========================
One of the benefits of MongoDb is dynamic schemas for a collection, whilst data
should be planned and organised (after all explicit is better than implicit!)
there are scenarios where having dynamic / expando style documents is desirable.
:class:`~mongoengine.DynamicDocument` documents work in the same way as
:class:`~mongoengine.Document` but any data / attributes set to them will also
be saved ::
from mongoengine import *
class Page(DynamicDocument):
title = StringField(max_length=200, required=True)
# Create a new page and add tags
>>> page = Page(title='Using MongoEngine')
>>> page.tags = ['mongodb', 'mongoengine']
>>> page.save()
>>> Page.objects(tags='mongoengine').count()
>>> 1
..note::
There is one caveat on Dynamic Documents: fields cannot start with `_`
Fields Fields
====== ======
By default, fields are not required. To make a field mandatory, set the By default, fields are not required. To make a field mandatory, set the
@@ -107,12 +135,33 @@ arguments can be set on all fields:
When True, use this field as a primary key for the collection. When True, use this field as a primary key for the collection.
:attr:`choices` (Default: None) :attr:`choices` (Default: None)
An iterable of choices to which the value of this field should be limited. An iterable (e.g. a list or tuple) of choices to which the value of this
field should be limited.
Can be either be a nested tuples of value (stored in mongo) and a
human readable key ::
SIZE = (('S', 'Small'),
('M', 'Medium'),
('L', 'Large'),
('XL', 'Extra Large'),
('XXL', 'Extra Extra Large'))
class Shirt(Document):
size = StringField(max_length=3, choices=SIZE)
Or a flat iterable just containing values ::
SIZE = ('S', 'M', 'L', 'XL', 'XXL')
class Shirt(Document):
size = StringField(max_length=3, choices=SIZE)
:attr:`help_text` (Default: None) :attr:`help_text` (Default: None)
Optional help text to output with the field - used by form libraries Optional help text to output with the field - used by form libraries
:attr:`verbose` (Default: None) :attr:`verbose_name` (Default: None)
Optional human-readable name for the field - used by form libraries Optional human-readable name for the field - used by form libraries
@@ -382,11 +431,32 @@ If a dictionary is passed then the following options are available:
:attr:`unique` (Default: False) :attr:`unique` (Default: False)
Whether the index should be sparse. Whether the index should be sparse.
.. note:: .. warning::
Inheritance adds extra indices.
If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`.
Geospatial indexes
---------------------------
Geospatial indexes will be automatically created for all Geospatial indexes will be automatically created for all
:class:`~mongoengine.GeoPointField`\ s :class:`~mongoengine.GeoPointField`\ s
It is also possible to explicitly define geospatial indexes. This is
useful if you need to define a geospatial index on a subfield of a
:class:`~mongoengine.DictField` or a custom field that contains a
point. To create a geospatial index you must prefix the field with the
***** sign. ::
class Place(Document):
location = DictField()
meta = {
'indexes': [
'*location.point',
],
}
Ordering Ordering
======== ========
A default ordering can be specified for your A default ordering can be specified for your
@@ -427,8 +497,31 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
first_post = BlogPost.objects.order_by("+published_date").first() first_post = BlogPost.objects.order_by("+published_date").first()
assert first_post.title == "Blog Post #1" assert first_post.title == "Blog Post #1"
Shard keys
==========
If your collection is sharded, then you need to specify the shard key as a tuple,
using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`.
This ensures that the shard key is sent with the query when calling the
:meth:`~mongoengine.document.Document.save` or
:meth:`~mongoengine.document.Document.update` method on an existing
:class:`-mongoengine.Document` instance::
class LogEntry(Document):
machine = StringField()
app = StringField()
timestamp = DateTimeField()
data = StringField()
meta = {
'shard_key': ('machine', 'timestamp',)
}
.. _document-inheritance:
Document inheritance Document inheritance
==================== ====================
To create a specialised type of a :class:`~mongoengine.Document` you have To create a specialised type of a :class:`~mongoengine.Document` you have
defined, you may subclass it and add any extra fields or methods you may need. defined, you may subclass it and add any extra fields or methods you may need.
As this is new class is not a direct subclass of As this is new class is not a direct subclass of
@@ -440,10 +533,15 @@ convenient and efficient retrieval of related documents::
class Page(Document): class Page(Document):
title = StringField(max_length=200, required=True) title = StringField(max_length=200, required=True)
meta = {'allow_inheritance': True}
# Also stored in the collection named 'page' # Also stored in the collection named 'page'
class DatedPage(Page): class DatedPage(Page):
date = DateTimeField() date = DateTimeField()
.. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta.
Working with existing data Working with existing data
-------------------------- --------------------------
To enable correct retrieval of documents involved in this kind of heirarchy, To enable correct retrieval of documents involved in this kind of heirarchy,

View File

@@ -35,13 +35,23 @@ already exist, then any changes will be updated atomically. For example::
* ``list_field.pop(0)`` - *sets* the resulting list * ``list_field.pop(0)`` - *sets* the resulting list
* ``del(list_field)`` - *unsets* whole list * ``del(list_field)`` - *unsets* whole list
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
Note that this will only work if the document exists in the database and has a
valide :attr:`id`.
.. seealso:: .. seealso::
:ref:`guide-atomic-updates` :ref:`guide-atomic-updates`
Cascading Saves
---------------
If your document contains :class:`~mongoengine.ReferenceField` or
:class:`~mongoengine.GenericReferenceField` objects, then by default the
:meth:`~mongoengine.Document.save` method will automatically save any changes to
those objects as well. If this is not desired passing :attr:`cascade` as False
to the save method turns this feature off.
Deleting documents
------------------
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
Note that this will only work if the document exists in the database and has a
valid :attr:`id`.
Document IDs Document IDs
============ ============
Each document in the database has a unique id. This may be accessed through the Each document in the database has a unique id. This may be accessed through the

View File

@@ -76,6 +76,7 @@ expressions:
* ``istartswith`` -- string field starts with value (case insensitive) * ``istartswith`` -- string field starts with value (case insensitive)
* ``endswith`` -- string field ends with value * ``endswith`` -- string field ends with value
* ``iendswith`` -- string field ends with value (case insensitive) * ``iendswith`` -- string field ends with value (case insensitive)
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
There are a few special operators for performing geographical queries, that There are a few special operators for performing geographical queries, that
may used with :class:`~mongoengine.GeoPointField`\ s: may used with :class:`~mongoengine.GeoPointField`\ s:
@@ -194,22 +195,6 @@ to be created::
>>> a.name == b.name and a.age == b.age >>> a.name == b.name and a.age == b.age
True True
Dereferencing results
---------------------
When iterating the results of :class:`~mongoengine.ListField` or
:class:`~mongoengine.DictField` we automatically dereference any
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
number the queries to mongo.
There are times when that efficiency is not enough, documents that have
:class:`~mongoengine.ReferenceField` objects or
:class:`~mongoengine.GenericReferenceField` objects at the top level are
expensive as the number of queries to MongoDB can quickly rise.
To limit the number of queries use
:func:`~mongoengine.queryset.QuerySet.select_related` which converts the
QuerySet to a list and dereferences as efficiently as possible.
Default Document queries Default Document queries
======================== ========================
By default, the objects :attr:`~mongoengine.Document.objects` attribute on a By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
@@ -312,8 +297,16 @@ would be generating "tag-clouds"::
from operator import itemgetter from operator import itemgetter
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
Query efficiency and performance
================================
There are a couple of methods to improve efficiency when querying, reducing the
information returned by the query or efficient dereferencing .
Retrieving a subset of fields Retrieving a subset of fields
============================= -----------------------------
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required, Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
and for efficiency only these should be retrieved from the database. This issue and for efficiency only these should be retrieved from the database. This issue
is especially important for MongoDB, as fields may often be extremely large is especially important for MongoDB, as fields may often be extremely large
@@ -346,6 +339,27 @@ will be given::
If you later need the missing fields, just call If you later need the missing fields, just call
:meth:`~mongoengine.Document.reload` on your document. :meth:`~mongoengine.Document.reload` on your document.
Getting related data
--------------------
When iterating the results of :class:`~mongoengine.ListField` or
:class:`~mongoengine.DictField` we automatically dereference any
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
number the queries to mongo.
There are times when that efficiency is not enough, documents that have
:class:`~mongoengine.ReferenceField` objects or
:class:`~mongoengine.GenericReferenceField` objects at the top level are
expensive as the number of queries to MongoDB can quickly rise.
To limit the number of queries use
:func:`~mongoengine.queryset.QuerySet.select_related` which converts the
QuerySet to a list and dereferences as efficiently as possible. By default
:func:`~mongoengine.queryset.QuerySet.select_related` only dereferences any
references to the depth of 1 level. If you have more complicated documents and
want to dereference more of the object at once then increasing the :attr:`max_depth`
will dereference more levels of the document.
Advanced queries Advanced queries
================ ================
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword

View File

@@ -5,11 +5,13 @@ Signals
.. versionadded:: 0.5 .. versionadded:: 0.5
.. note::
Signal support is provided by the excellent `blinker`_ library and Signal support is provided by the excellent `blinker`_ library and
will gracefully fall back if it is not available. will gracefully fall back if it is not available.
The following document signals exist in MongoEngine and are pretty self explaintary: The following document signals exist in MongoEngine and are pretty self-explanatory:
* `mongoengine.signals.pre_init` * `mongoengine.signals.pre_init`
* `mongoengine.signals.post_init` * `mongoengine.signals.post_init`
@@ -17,6 +19,8 @@ The following document signals exist in MongoEngine and are pretty self explaint
* `mongoengine.signals.post_save` * `mongoengine.signals.post_save`
* `mongoengine.signals.pre_delete` * `mongoengine.signals.pre_delete`
* `mongoengine.signals.post_delete` * `mongoengine.signals.post_delete`
* `mongoengine.signals.pre_bulk_insert`
* `mongoengine.signals.post_bulk_insert`
Example usage:: Example usage::

View File

@@ -18,6 +18,9 @@ MongoDB. To install it, simply run
:doc:`apireference` :doc:`apireference`
The complete API documentation. The complete API documentation.
:doc:`upgrade`
How to upgrade MongoEngine.
:doc:`django` :doc:`django`
Using MongoEngine and Django Using MongoEngine and Django
@@ -42,7 +45,8 @@ Also, you can join the developers' `mailing list
Changes Changes
------- -------
See the :doc:`changelog` for a full list of changes to MongoEngine. See the :doc:`changelog` for a full list of changes to MongoEngine and
:doc:`upgrade` for upgrade information.
.. toctree:: .. toctree::
:hidden: :hidden:

View File

@@ -167,6 +167,11 @@ To delete all the posts if a user is deleted set the rule::
See :class:`~mongoengine.ReferenceField` for more information. See :class:`~mongoengine.ReferenceField` for more information.
..note::
MapFields and DictFields currently don't support automatic handling of
deleted references
Adding data to our Tumblelog Adding data to our Tumblelog
============================ ============================
Now that we've defined how our documents will be structured, let's start adding Now that we've defined how our documents will be structured, let's start adding

View File

@@ -2,6 +2,24 @@
Upgrading Upgrading
========= =========
0.5 to 0.6
==========
Embedded Documents - if you had a `pk` field you will have to rename it from `_id`
to `pk` as pk is no longer a property of Embedded Documents.
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
an InvalidDocument error as they aren't currently supported.
Document._get_subclasses - Is no longer used and the class method has been removed.
Document.objects.with_id - now raises an InvalidQueryError if used with a filter.
FutureWarning - A future warning has been added to all inherited classes that
don't define `allow_inheritance` in their meta.
You may need to update pyMongo to 2.0 for use with Sharding.
0.4 to 0.5 0.4 to 0.5
=========== ===========
@@ -9,7 +27,7 @@ There have been the following backwards incompatibilities from 0.4 to 0.5. The
main areas of changed are: choices in fields, map_reduce and collection names. main areas of changed are: choices in fields, map_reduce and collection names.
Choice options: Choice options:
-------------- ---------------
Are now expected to be an iterable of tuples, with the first element in each Are now expected to be an iterable of tuples, with the first element in each
tuple being the actual value to be stored. The second element is the tuple being the actual value to be stored. The second element is the
@@ -58,7 +76,7 @@ To upgrade use a Mixin class to set meta like so ::
class MyAceDocument(Document, BaseMixin): class MyAceDocument(Document, BaseMixin):
pass pass
MyAceDocument._get_collection_name() == myacedocument MyAceDocument._get_collection_name() == "myacedocument"
Alternatively, you can rename your collections eg :: Alternatively, you can rename your collections eg ::

View File

@@ -12,9 +12,7 @@ from signals import *
__all__ = (document.__all__ + fields.__all__ + connection.__all__ + __all__ = (document.__all__ + fields.__all__ + connection.__all__ +
queryset.__all__ + signals.__all__) queryset.__all__ + signals.__all__)
__author__ = 'Harry Marr' VERSION = (0, 6, 6)
VERSION = (0, 5, 2)
def get_version(): def get_version():

View File

@@ -1,15 +1,18 @@
import warnings
from queryset import QuerySet, QuerySetManager from queryset import QuerySet, QuerySetManager
from queryset import DoesNotExist, MultipleObjectsReturned from queryset import DoesNotExist, MultipleObjectsReturned
from queryset import DO_NOTHING from queryset import DO_NOTHING
from mongoengine import signals from mongoengine import signals
import weakref
import sys import sys
import pymongo import pymongo
import pymongo.objectid from bson import ObjectId
import operator import operator
from functools import partial from functools import partial
from bson.dbref import DBRef
class NotRegistered(Exception): class NotRegistered(Exception):
@@ -19,8 +22,56 @@ class NotRegistered(Exception):
class InvalidDocumentError(Exception): class InvalidDocumentError(Exception):
pass pass
class ValidationError(Exception):
pass class ValidationError(AssertionError):
"""Validation exception.
"""
errors = {}
field_name = None
_message = None
def __init__(self, message="", **kwargs):
self.errors = kwargs.get('errors', {})
self.field_name = kwargs.get('field_name')
self.message = message
def __str__(self):
return self.message
def __repr__(self):
return '%s(%s,)' % (self.__class__.__name__, self.message)
def __getattribute__(self, name):
message = super(ValidationError, self).__getattribute__(name)
if name == 'message' and self.field_name:
return message + ' ("%s")' % self.field_name
else:
return message
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
def to_dict(self):
def build_dict(source):
errors_dict = {}
if not source:
return errors_dict
if isinstance(source, dict):
for field_name, error in source.iteritems():
errors_dict[field_name] = build_dict(error)
elif isinstance(source, ValidationError) and source.errors:
return build_dict(source.errors)
else:
return unicode(source)
return errors_dict
if not self.errors:
return {}
return build_dict(self.errors)
_document_registry = {} _document_registry = {}
@@ -50,6 +101,8 @@ class BaseField(object):
.. versionchanged:: 0.5 - added verbose and help text .. versionchanged:: 0.5 - added verbose and help text
""" """
name = None
# Fields may have _types inserted into indexes by default # Fields may have _types inserted into indexes by default
_index_with_types = True _index_with_types = True
_geo_index = False _geo_index = False
@@ -65,7 +118,6 @@ class BaseField(object):
validation=None, choices=None, verbose_name=None, help_text=None): validation=None, choices=None, verbose_name=None, help_text=None):
self.db_field = (db_field or name) if not primary_key else '_id' self.db_field = (db_field or name) if not primary_key else '_id'
if name: if name:
import warnings
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'" msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
warnings.warn(msg, DeprecationWarning) warnings.warn(msg, DeprecationWarning)
self.name = None self.name = None
@@ -97,17 +149,13 @@ class BaseField(object):
# Get value from document instance if available, if not use default # Get value from document instance if available, if not use default
value = instance._data.get(self.name) value = instance._data.get(self.name)
if value is None: if value is None:
value = self.default value = self.default
# Allow callable default values # Allow callable default values
if callable(value): if callable(value):
value = value() value = value()
# Convert lists / values so we can watch for any changes on them
if isinstance(value, (list, tuple)) and not isinstance(value, BaseList):
value = BaseList(value, instance=instance, name=self.name)
elif isinstance(value, dict) and not isinstance(value, BaseDict):
value = BaseDict(value, instance=instance, name=self.name)
return value return value
def __set__(self, instance, value): def __set__(self, instance, value):
@@ -116,6 +164,12 @@ class BaseField(object):
instance._data[self.name] = value instance._data[self.name] = value
instance._mark_as_changed(self.name) instance._mark_as_changed(self.name)
def error(self, message="", errors=None, field_name=None):
"""Raises a ValidationError.
"""
field_name = field_name if field_name else self.name
raise ValidationError(message, errors=errors, field_name=field_name)
def to_python(self, value): def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type. """Convert a MongoDB-compatible type to a Python type.
""" """
@@ -137,20 +191,25 @@ class BaseField(object):
pass pass
def _validate(self, value): def _validate(self, value):
# check choices # check choices
if self.choices is not None: if self.choices:
if isinstance(self.choices[0], (list, tuple)):
option_keys = [option_key for option_key, option_value in self.choices] option_keys = [option_key for option_key, option_value in self.choices]
if value not in option_keys: if value not in option_keys:
raise ValidationError("Value must be one of %s." % unicode(option_keys)) self.error('Value must be one of %s' % unicode(option_keys))
else:
if value not in self.choices:
self.error('Value must be one of %s' % unicode(self.choices))
# check validation argument # check validation argument
if self.validation is not None: if self.validation is not None:
if callable(self.validation): if callable(self.validation):
if not self.validation(value): if not self.validation(value):
raise ValidationError('Value does not match custom' \ self.error('Value does not match custom validation method')
'validation method.')
else: else:
raise ValueError('validation argument must be a callable.') raise ValueError('validation argument for "%s" must be a '
'callable.' % self.name)
self.validate(value) self.validate(value)
@@ -166,6 +225,7 @@ class ComplexBaseField(BaseField):
""" """
field = None field = None
_dereference = False
def __get__(self, instance, owner): def __get__(self, instance, owner):
"""Descriptor to automatically dereference references. """Descriptor to automatically dereference references.
@@ -174,11 +234,39 @@ class ComplexBaseField(BaseField):
# Document class being used rather than a document object # Document class being used rather than a document object
return self return self
from dereference import dereference if not self._dereference and instance._initialised:
instance._data[self.name] = dereference( from dereference import DeReference
instance._data.get(self.name), max_depth=1, instance=instance, name=self.name, get=True self._dereference = DeReference() # Cached
instance._data[self.name] = self._dereference(
instance._data.get(self.name), max_depth=1, instance=instance,
name=self.name
) )
return super(ComplexBaseField, self).__get__(instance, owner)
value = super(ComplexBaseField, self).__get__(instance, owner)
# Convert lists / values so we can watch for any changes on them
if isinstance(value, (list, tuple)) and not isinstance(value, BaseList):
value = BaseList(value, instance, self.name)
instance._data[self.name] = value
elif isinstance(value, dict) and not isinstance(value, BaseDict):
value = BaseDict(value, instance, self.name)
instance._data[self.name] = value
if self._dereference and instance._initialised and \
isinstance(value, (BaseList, BaseDict)) and not value._dereferenced:
value = self._dereference(
value, max_depth=1, instance=instance, name=self.name
)
value._dereferenced = True
instance._data[self.name] = value
return value
def __set__(self, instance, value):
"""Descriptor for assigning a value to a field in a document.
"""
instance._data[self.name] = value
instance._mark_as_changed(self.name)
def to_python(self, value): def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type. """Convert a MongoDB-compatible type to a Python type.
@@ -207,10 +295,10 @@ class ComplexBaseField(BaseField):
if isinstance(v, Document): if isinstance(v, Document):
# We need the id from the saved object to create the DBRef # We need the id from the saved object to create the DBRef
if v.pk is None: if v.pk is None:
raise ValidationError('You can only reference documents once ' self.error('You can only reference documents once they'
'they have been saved to the database') ' have been saved to the database')
collection = v._get_collection_name() collection = v._get_collection_name()
value_dict[k] = pymongo.dbref.DBRef(collection, v.pk) value_dict[k] = DBRef(collection, v.pk)
elif hasattr(v, 'to_python'): elif hasattr(v, 'to_python'):
value_dict[k] = v.to_python() value_dict[k] = v.to_python()
else: else:
@@ -247,19 +335,19 @@ class ComplexBaseField(BaseField):
if isinstance(v, Document): if isinstance(v, Document):
# We need the id from the saved object to create the DBRef # We need the id from the saved object to create the DBRef
if v.pk is None: if v.pk is None:
raise ValidationError('You can only reference documents once ' self.error('You can only reference documents once they'
'they have been saved to the database') ' have been saved to the database')
# If its a document that is not inheritable it won't have # If its a document that is not inheritable it won't have
# _types / _cls data so make it a generic reference allows # _types / _cls data so make it a generic reference allows
# us to dereference # us to dereference
meta = getattr(v, 'meta', getattr(v, '_meta', {})) meta = getattr(v, 'meta', getattr(v, '_meta', {}))
if meta and not meta['allow_inheritance'] and not self.field: if meta and not meta.get('allow_inheritance', True) and not self.field:
from fields import GenericReferenceField from fields import GenericReferenceField
value_dict[k] = GenericReferenceField().to_mongo(v) value_dict[k] = GenericReferenceField().to_mongo(v)
else: else:
collection = v._get_collection_name() collection = v._get_collection_name()
value_dict[k] = pymongo.dbref.DBRef(collection, v.pk) value_dict[k] = DBRef(collection, v.pk)
elif hasattr(v, 'to_mongo'): elif hasattr(v, 'to_mongo'):
value_dict[k] = v.to_mongo() value_dict[k] = v.to_mongo()
else: else:
@@ -270,17 +358,29 @@ class ComplexBaseField(BaseField):
return value_dict return value_dict
def validate(self, value): def validate(self, value):
"""If field provided ensure the value is valid. """If field is provided ensure the value is valid.
""" """
errors = {}
if self.field: if self.field:
try:
if hasattr(value, 'iteritems'): if hasattr(value, 'iteritems'):
[self.field.validate(v) for k,v in value.iteritems()] sequence = value.iteritems()
else: else:
[self.field.validate(v) for v in value] sequence = enumerate(value)
except Exception, err: for k, v in sequence:
raise ValidationError('Invalid %s item (%s)' % ( try:
self.field.__class__.__name__, str(v))) self.field.validate(v)
except (ValidationError, AssertionError), error:
if hasattr(error, 'errors'):
errors[k] = error.errors
else:
errors[k] = error
if errors:
field_class = self.field.__class__.__name__
self.error('Invalid %s item (%s)' % (field_class, value),
errors=errors)
# Don't allow empty values if required
if self.required and not value:
self.error('Field is required and cannot be empty')
def prepare_query_value(self, op, value): def prepare_query_value(self, op, value):
return self.to_mongo(value) return self.to_mongo(value)
@@ -301,6 +401,47 @@ class ComplexBaseField(BaseField):
owner_document = property(_get_owner_document, _set_owner_document) owner_document = property(_get_owner_document, _set_owner_document)
class BaseDynamicField(BaseField):
"""Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
def to_mongo(self, value):
"""Convert a Python type to a MongoDBcompatible type.
"""
if isinstance(value, basestring):
return value
if hasattr(value, 'to_mongo'):
return value.to_mongo()
if not isinstance(value, (dict, list, tuple)):
return value
is_list = False
if not hasattr(value, 'items'):
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
data = {}
for k, v in value.items():
data[k] = self.to_mongo(v)
if is_list: # Convert back to a list
value = [v for k, v in sorted(data.items(), key=operator.itemgetter(0))]
else:
value = data
return value
def lookup_member(self, member_name):
return member_name
def prepare_query_value(self, op, value):
if isinstance(value, basestring):
from mongoengine.fields import StringField
return StringField().prepare_query_value(op, value)
return self.to_mongo(value)
class ObjectIdField(BaseField): class ObjectIdField(BaseField):
"""An field wrapper around MongoDB's ObjectIds. """An field wrapper around MongoDB's ObjectIds.
""" """
@@ -309,12 +450,12 @@ class ObjectIdField(BaseField):
return value return value
def to_mongo(self, value): def to_mongo(self, value):
if not isinstance(value, pymongo.objectid.ObjectId): if not isinstance(value, ObjectId):
try: try:
return pymongo.objectid.ObjectId(unicode(value)) return ObjectId(unicode(value))
except Exception, e: except Exception, e:
# e.message attribute has been deprecated since Python 2.6 # e.message attribute has been deprecated since Python 2.6
raise ValidationError(unicode(e)) self.error(unicode(e))
return value return value
def prepare_query_value(self, op, value): def prepare_query_value(self, op, value):
@@ -322,9 +463,9 @@ class ObjectIdField(BaseField):
def validate(self, value): def validate(self, value):
try: try:
pymongo.objectid.ObjectId(unicode(value)) ObjectId(unicode(value))
except: except:
raise ValidationError('Invalid Object ID') self.error('Invalid Object ID')
class DocumentMetaclass(type): class DocumentMetaclass(type):
@@ -332,6 +473,25 @@ class DocumentMetaclass(type):
""" """
def __new__(cls, name, bases, attrs): def __new__(cls, name, bases, attrs):
def _get_mixin_fields(base):
attrs = {}
attrs.update(dict([(k, v) for k, v in base.__dict__.items()
if issubclass(v.__class__, BaseField)]))
# Handle simple mixin's with meta
if hasattr(base, 'meta') and not isinstance(base, DocumentMetaclass):
meta = attrs.get('meta', {})
meta.update(base.meta)
attrs['meta'] = meta
for p_base in base.__bases__:
#optimize :-)
if p_base in (object, BaseDocument):
continue
attrs.update(_get_mixin_fields(p_base))
return attrs
metaclass = attrs.get('__metaclass__') metaclass = attrs.get('__metaclass__')
super_new = super(DocumentMetaclass, cls).__new__ super_new = super(DocumentMetaclass, cls).__new__
if metaclass and issubclass(metaclass, DocumentMetaclass): if metaclass and issubclass(metaclass, DocumentMetaclass):
@@ -343,6 +503,7 @@ class DocumentMetaclass(type):
simple_class = True simple_class = True
for base in bases: for base in bases:
# Include all fields present in superclasses # Include all fields present in superclasses
if hasattr(base, '_fields'): if hasattr(base, '_fields'):
doc_fields.update(base._fields) doc_fields.update(base._fields)
@@ -350,14 +511,20 @@ class DocumentMetaclass(type):
superclasses[base._class_name] = base superclasses[base._class_name] = base
superclasses.update(base._superclasses) superclasses.update(base._superclasses)
else: # Add any mixin fields else: # Add any mixin fields
attrs.update(dict([(k,v) for k,v in base.__dict__.items() attrs.update(_get_mixin_fields(base))
if issubclass(v.__class__, BaseField)]))
if hasattr(base, '_meta') and not base._meta.get('abstract'): if hasattr(base, '_meta') and not base._meta.get('abstract'):
# Ensure that the Document class may be subclassed - # Ensure that the Document class may be subclassed -
# inheritance may be disabled to remove dependency on # inheritance may be disabled to remove dependency on
# additional fields _cls and _types # additional fields _cls and _types
class_name.append(base._class_name) class_name.append(base._class_name)
if not base._meta.get('allow_inheritance_defined', True):
warnings.warn(
"%s uses inheritance, the default for allow_inheritance "
"is changing to off by default. Please add it to the "
"document meta." % name,
FutureWarning
)
if base._meta.get('allow_inheritance', True) == False: if base._meta.get('allow_inheritance', True) == False:
raise ValueError('Document %s may not be subclassed' % raise ValueError('Document %s may not be subclassed' %
base.__name__) base.__name__)
@@ -365,7 +532,8 @@ class DocumentMetaclass(type):
simple_class = False simple_class = False
doc_class_name = '.'.join(reversed(class_name)) doc_class_name = '.'.join(reversed(class_name))
meta = attrs.get('_meta', attrs.get('meta', {})) meta = attrs.get('_meta', {})
meta.update(attrs.get('meta', {}))
if 'allow_inheritance' not in meta: if 'allow_inheritance' not in meta:
meta['allow_inheritance'] = True meta['allow_inheritance'] = True
@@ -380,6 +548,7 @@ class DocumentMetaclass(type):
attrs['_superclasses'] = superclasses attrs['_superclasses'] = superclasses
# Add the document's fields to the _fields attribute # Add the document's fields to the _fields attribute
field_names = {}
for attr_name, attr_value in attrs.items(): for attr_name, attr_value in attrs.items():
if hasattr(attr_value, "__class__") and \ if hasattr(attr_value, "__class__") and \
issubclass(attr_value.__class__, BaseField): issubclass(attr_value.__class__, BaseField):
@@ -387,21 +556,35 @@ class DocumentMetaclass(type):
if not attr_value.db_field: if not attr_value.db_field:
attr_value.db_field = attr_name attr_value.db_field = attr_name
doc_fields[attr_name] = attr_value doc_fields[attr_name] = attr_value
field_names[attr_value.db_field] = field_names.get(attr_value.db_field, 0) + 1
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
if duplicate_db_fields:
raise InvalidDocumentError("Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields))
attrs['_fields'] = doc_fields attrs['_fields'] = doc_fields
attrs['_db_field_map'] = dict([(k, v.db_field) for k, v in doc_fields.items() if k != v.db_field]) attrs['_db_field_map'] = dict([(k, v.db_field) for k, v in doc_fields.items() if k != v.db_field])
attrs['_reverse_db_field_map'] = dict([(v, k) for k, v in attrs['_db_field_map'].items()]) attrs['_reverse_db_field_map'] = dict([(v, k) for k, v in attrs['_db_field_map'].items()])
from mongoengine import Document from mongoengine import Document, EmbeddedDocument, DictField
new_class = super_new(cls, name, bases, attrs) new_class = super_new(cls, name, bases, attrs)
for field in new_class._fields.values(): for field in new_class._fields.values():
field.owner_document = new_class field.owner_document = new_class
delete_rule = getattr(field, 'reverse_delete_rule', DO_NOTHING)
if delete_rule != DO_NOTHING:
field.document_type.register_delete_rule(new_class, field.name,
delete_rule)
if field.name and hasattr(Document, field.name): delete_rule = getattr(field, 'reverse_delete_rule', DO_NOTHING)
f = field
if isinstance(f, ComplexBaseField) and hasattr(f, 'field'):
delete_rule = getattr(f.field, 'reverse_delete_rule', DO_NOTHING)
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
raise InvalidDocumentError("Reverse delete rules are not supported for %s (field: %s)" % (field.__class__.__name__, field.name))
f = field.field
if delete_rule != DO_NOTHING:
if issubclass(new_class, EmbeddedDocument):
raise InvalidDocumentError("Reverse delete rules are not supported for EmbeddedDocuments (field: %s)" % field.name)
f.document_type.register_delete_rule(new_class, field.name, delete_rule)
if field.name and hasattr(Document, field.name) and EmbeddedDocument not in new_class.mro():
raise InvalidDocumentError("%s is a document method and not a valid field name" % field.name) raise InvalidDocumentError("%s is a document method and not a valid field name" % field.name)
module = attrs.get('__module__') module = attrs.get('__module__')
@@ -453,6 +636,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
collection = ''.join('_%s' % c if c.isupper() else c for c in name).strip('_').lower() collection = ''.join('_%s' % c if c.isupper() else c for c in name).strip('_').lower()
id_field = None id_field = None
abstract_base_indexes = []
base_indexes = [] base_indexes = []
base_meta = {} base_meta = {}
@@ -472,6 +656,9 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
base_meta[key] = base._meta[key] base_meta[key] = base._meta[key]
id_field = id_field or base._meta.get('id_field') id_field = id_field or base._meta.get('id_field')
if base._meta.get('abstract', False):
abstract_base_indexes += base._meta.get('indexes', [])
else:
base_indexes += base._meta.get('indexes', []) base_indexes += base._meta.get('indexes', [])
# Propagate 'allow_inheritance' # Propagate 'allow_inheritance'
if 'allow_inheritance' in base._meta: if 'allow_inheritance' in base._meta:
@@ -480,6 +667,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
base_meta['queryset_class'] = base._meta['queryset_class'] base_meta['queryset_class'] = base._meta['queryset_class']
try: try:
base_meta['objects'] = base.__getattribute__(base, 'objects') base_meta['objects'] = base.__getattribute__(base, 'objects')
except TypeError:
pass
except AttributeError: except AttributeError:
pass pass
@@ -498,6 +687,10 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
'delete_rules': {}, 'delete_rules': {},
'allow_inheritance': True 'allow_inheritance': True
} }
allow_inheritance_defined = ('allow_inheritance' in base_meta or
'allow_inheritance'in attrs.get('meta', {}))
meta['allow_inheritance_defined'] = allow_inheritance_defined
meta.update(base_meta) meta.update(base_meta)
# Apply document-defined meta options # Apply document-defined meta options
@@ -518,8 +711,9 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
meta['queryset_class'] = manager.queryset_class meta['queryset_class'] = manager.queryset_class
new_class.objects = manager new_class.objects = manager
indicies = meta['indexes'] + abstract_base_indexes
user_indexes = [QuerySet._build_index_spec(new_class, spec) user_indexes = [QuerySet._build_index_spec(new_class, spec)
for spec in meta['indexes']] + base_indexes for spec in indicies] + base_indexes
new_class._meta['indexes'] = user_indexes new_class._meta['indexes'] = user_indexes
unique_indexes = cls._unique_with_indexes(new_class) unique_indexes = cls._unique_with_indexes(new_class)
@@ -575,7 +769,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
unique_indexes.append(index) unique_indexes.append(index)
# Grab any embedded document field unique indexes # Grab any embedded document field unique indexes
if field.__class__.__name__ == "EmbeddedDocumentField": if field.__class__.__name__ == "EmbeddedDocumentField" and field.document_type != new_class:
field_namespace = "%s." % field_name field_namespace = "%s." % field_name
unique_indexes += cls._unique_with_indexes(field.document_type, unique_indexes += cls._unique_with_indexes(field.document_type,
field_namespace) field_namespace)
@@ -585,30 +779,114 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
class BaseDocument(object): class BaseDocument(object):
_dynamic = False
_created = True
_dynamic_lock = True
_initialised = False
def __init__(self, **values): def __init__(self, **values):
signals.pre_init.send(self.__class__, document=self, values=values) signals.pre_init.send(self.__class__, document=self, values=values)
self._data = {} self._data = {}
self._initialised = False
# Assign default values to instance # Assign default values to instance
for attr_name, field in self._fields.items(): for attr_name, field in self._fields.items():
value = getattr(self, attr_name, None) value = getattr(self, attr_name, None)
setattr(self, attr_name, value) setattr(self, attr_name, value)
# Assign initial values to instance # Set passed values after initialisation
for attr_name in values.keys(): if self._dynamic:
try: self._dynamic_fields = {}
value = values.pop(attr_name) dynamic_data = {}
setattr(self, attr_name, value) for key, value in values.items():
except AttributeError: if key in self._fields or key == '_id':
pass setattr(self, key, value)
elif self._dynamic:
dynamic_data[key] = value
else:
for key, value in values.items():
setattr(self, key, value)
# Set any get_fieldname_display methods # Set any get_fieldname_display methods
self.__set_field_display() self.__set_field_display()
if self._dynamic:
self._dynamic_lock = False
for key, value in dynamic_data.items():
setattr(self, key, value)
# Flag initialised # Flag initialised
self._initialised = True self._initialised = True
signals.post_init.send(self.__class__, document=self) signals.post_init.send(self.__class__, document=self)
def __setattr__(self, name, value):
# Handle dynamic data only if an initialised dynamic document
if self._dynamic and not self._dynamic_lock:
field = None
if not hasattr(self, name) and not name.startswith('_'):
field = BaseDynamicField(db_field=name)
field.name = name
self._dynamic_fields[name] = field
if not name.startswith('_'):
value = self.__expand_dynamic_values(name, value)
# Handle marking data as changed
if name in self._dynamic_fields:
self._data[name] = value
if hasattr(self, '_changed_fields'):
self._mark_as_changed(name)
# Handle None values for required fields
if value is None and name in getattr(self, '_fields', {}):
self._data[name] = value
if hasattr(self, '_changed_fields'):
self._mark_as_changed(name)
return
if not self._created and name in self._meta.get('shard_key', tuple()):
from queryset import OperationError
raise OperationError("Shard Keys are immutable. Tried to update %s" % name)
super(BaseDocument, self).__setattr__(name, value)
def __expand_dynamic_values(self, name, value):
"""expand any dynamic values to their correct types / values"""
if not isinstance(value, (dict, list, tuple)):
return value
is_list = False
if not hasattr(value, 'items'):
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
if not is_list and '_cls' in value:
cls = get_document(value['_cls'])
value = cls(**value)
value._dynamic = True
value._changed_fields = []
return value
data = {}
for k, v in value.items():
key = name if is_list else k
data[k] = self.__expand_dynamic_values(key, v)
if is_list: # Convert back to a list
data_items = sorted(data.items(), key=operator.itemgetter(0))
value = [v for k, v in data_items]
else:
value = data
# Convert lists / values so we can watch for any changes on them
if isinstance(value, (list, tuple)) and not isinstance(value, BaseList):
value = BaseList(value, self, name)
elif isinstance(value, dict) and not isinstance(value, BaseDict):
value = BaseDict(value, self, name)
return value
def validate(self): def validate(self):
"""Ensure that all fields' values are valid and that required fields """Ensure that all fields' values are valid and that required fields
are present. are present.
@@ -618,25 +896,21 @@ class BaseDocument(object):
for name, field in self._fields.items()] for name, field in self._fields.items()]
# Ensure that each field is matched to a valid value # Ensure that each field is matched to a valid value
errors = {}
for field, value in fields: for field, value in fields:
if value is not None: if value is not None:
try: try:
field._validate(value) field._validate(value)
except (ValueError, AttributeError, AssertionError), e: except ValidationError, error:
raise ValidationError('Invalid value for field named "%s" of type "%s": %s' errors[field.name] = error.errors or error
% (field.name, field.__class__.__name__, value)) except (ValueError, AttributeError, AssertionError), error:
errors[field.name] = error
elif field.required: elif field.required:
raise ValidationError('Field "%s" is required' % field.name) errors[field.name] = ValidationError('Field is required',
field_name=field.name)
@apply if errors:
def pk(): raise ValidationError('Errors encountered validating document',
"""Primary key alias errors=errors)
"""
def fget(self):
return getattr(self, self._meta['id_field'])
def fset(self, value):
return setattr(self, self._meta['id_field'], value)
return property(fget, fset)
def to_mongo(self): def to_mongo(self):
"""Return data dictionary ready for use with MongoDB. """Return data dictionary ready for use with MongoDB.
@@ -653,6 +927,12 @@ class BaseDocument(object):
data['_types'] = self._superclasses.keys() + [self._class_name] data['_types'] = self._superclasses.keys() + [self._class_name]
if '_id' in data and data['_id'] is None: if '_id' in data and data['_id'] is None:
del data['_id'] del data['_id']
if not self._dynamic:
return data
for name, field in self._dynamic_fields.items():
data[name] = field.to_mongo(self._data.get(name, None))
return data return data
@classmethod @classmethod
@@ -661,21 +941,6 @@ class BaseDocument(object):
""" """
return cls._meta.get('collection', None) return cls._meta.get('collection', None)
@classmethod
def _get_subclasses(cls):
"""Return a dictionary of all subclasses (found recursively).
"""
try:
subclasses = cls.__subclasses__()
except:
subclasses = cls.__subclasses__(cls)
all_subclasses = {}
for subclass in subclasses:
all_subclasses[subclass._class_name] = subclass
all_subclasses.update(subclass._get_subclasses())
return all_subclasses
@classmethod @classmethod
def _from_son(cls, son): def _from_son(cls, son):
"""Create an instance of a Document (subclass) from a PyMongo SON. """Create an instance of a Document (subclass) from a PyMongo SON.
@@ -693,26 +958,24 @@ class BaseDocument(object):
# Return correct subclass for document type # Return correct subclass for document type
if class_name != cls._class_name: if class_name != cls._class_name:
subclasses = cls._get_subclasses() cls = get_document(class_name)
if class_name not in subclasses:
# Type of document is probably more generic than the class
# that has been queried to return this SON
raise NotRegistered("""
`%s` has not been registered in the document registry.
Importing the document class automatically registers it,
has it been imported?
""".strip() % class_name)
cls = subclasses[class_name]
present_fields = data.keys() changed_fields = []
for field_name, field in cls._fields.items(): for field_name, field in cls._fields.items():
if field.db_field in data: if field.db_field in data:
value = data[field.db_field] value = data[field.db_field]
data[field_name] = (value if value is None data[field_name] = (value if value is None
else field.to_python(value)) else field.to_python(value))
elif field.default:
default = field.default
if callable(default):
default = default()
if isinstance(default, BaseDocument):
changed_fields.append(field_name)
obj = cls(**data) obj = cls(**data)
obj._changed_fields = [] obj._changed_fields = changed_fields
obj._created = False
return obj return obj
def _mark_as_changed(self, key): def _mark_as_changed(self, key):
@@ -727,7 +990,7 @@ class BaseDocument(object):
def _get_changed_fields(self, key='', inspected=None): def _get_changed_fields(self, key='', inspected=None):
"""Returns a list of all fields that have explicitly been changed. """Returns a list of all fields that have explicitly been changed.
""" """
from mongoengine import EmbeddedDocument from mongoengine import EmbeddedDocument, DynamicEmbeddedDocument
_changed_fields = [] _changed_fields = []
_changed_fields += getattr(self, '_changed_fields', []) _changed_fields += getattr(self, '_changed_fields', [])
@@ -738,6 +1001,8 @@ class BaseDocument(object):
inspected.add(self.id) inspected.add(self.id)
field_list = self._fields.copy() field_list = self._fields.copy()
if self._dynamic:
field_list.update(self._dynamic_fields)
for field_name in field_list: for field_name in field_list:
db_field_name = self._db_field_map.get(field_name, field_name) db_field_name = self._db_field_map.get(field_name, field_name)
@@ -748,7 +1013,7 @@ class BaseDocument(object):
continue continue
inspected.add(field.id) inspected.add(field.id)
if isinstance(field, (EmbeddedDocument,)) and db_field_name not in _changed_fields: # Grab all embedded fields that have been changed if isinstance(field, (EmbeddedDocument, DynamicEmbeddedDocument)) and db_field_name not in _changed_fields: # Grab all embedded fields that have been changed
_changed_fields += ["%s%s" % (key, k) for k in field._get_changed_fields(key, inspected) if k] _changed_fields += ["%s%s" % (key, k) for k in field._get_changed_fields(key, inspected) if k]
elif isinstance(field, (list, tuple, dict)) and db_field_name not in _changed_fields: # Loop list / dict fields as they contain documents elif isinstance(field, (list, tuple, dict)) and db_field_name not in _changed_fields: # Loop list / dict fields as they contain documents
# Determine the iterator to use # Determine the iterator to use
@@ -772,6 +1037,7 @@ class BaseDocument(object):
set_fields = self._get_changed_fields() set_fields = self._get_changed_fields()
set_data = {} set_data = {}
unset_data = {} unset_data = {}
parts = []
if hasattr(self, '_changed_fields'): if hasattr(self, '_changed_fields'):
set_data = {} set_data = {}
# Fetch each set item from its path # Fetch each set item from its path
@@ -793,13 +1059,16 @@ class BaseDocument(object):
# Determine if any changed items were actually unset. # Determine if any changed items were actually unset.
for path, value in set_data.items(): for path, value in set_data.items():
if value: if value or isinstance(value, bool):
continue continue
# If we've set a value that ain't the default value dont unset it. # If we've set a value that ain't the default value dont unset it.
default = None default = None
if self._dynamic and len(parts) and parts[0] in self._dynamic_fields:
if path in self._fields: del(set_data[path])
unset_data[path] = 1
continue
elif path in self._fields:
default = self._fields[path].default default = self._fields[path].default
else: # Perform a full lookup for lists / embedded lookups else: # Perform a full lookup for lists / embedded lookups
d = self d = self
@@ -818,7 +1087,10 @@ class BaseDocument(object):
field_name = d._reverse_db_field_map.get(db_field_name, field_name = d._reverse_db_field_map.get(db_field_name,
db_field_name) db_field_name)
default = d._fields[field_name].default if field_name in d._fields:
default = d._fields.get(field_name).default
else:
default = None
if default is not None: if default is not None:
if callable(default): if callable(default):
@@ -831,23 +1103,22 @@ class BaseDocument(object):
return set_data, unset_data return set_data, unset_data
@classmethod @classmethod
def _geo_indices(cls, inspected_classes=None): def _geo_indices(cls, inspected=None):
inspected_classes = inspected_classes or [] inspected = inspected or []
geo_indices = [] geo_indices = []
inspected_classes.append(cls) inspected.append(cls)
for field in cls._fields.values(): for field in cls._fields.values():
if hasattr(field, 'document_type'): if hasattr(field, 'document_type'):
field_cls = field.document_type field_cls = field.document_type
if field_cls in inspected_classes: if field_cls in inspected:
continue continue
if hasattr(field_cls, '_geo_indices'): if hasattr(field_cls, '_geo_indices'):
geo_indices += field_cls._geo_indices(inspected_classes) geo_indices += field_cls._geo_indices(inspected)
elif field._geo_index: elif field._geo_index:
geo_indices.append(field) geo_indices.append(field)
return geo_indices return geo_indices
def __getstate__(self): def __getstate__(self):
self_dict = self.__dict__
removals = ["get_%s_display" % k for k, v in self._fields.items() if v.choices] removals = ["get_%s_display" % k for k, v in self._fields.items() if v.choices]
for k in removals: for k in removals:
if hasattr(self, k): if hasattr(self, k):
@@ -866,7 +1137,9 @@ class BaseDocument(object):
def __get_field_display(self, field): def __get_field_display(self, field):
"""Returns the display value for a choice field""" """Returns the display value for a choice field"""
value = getattr(self, field.name) value = getattr(self, field.name)
if field.choices and isinstance(field.choices[0], (list, tuple)):
return dict(field.choices).get(value, value) return dict(field.choices).get(value, value)
return value
def __iter__(self): def __iter__(self):
return iter(self._fields) return iter(self._fields)
@@ -901,10 +1174,10 @@ class BaseDocument(object):
def __repr__(self): def __repr__(self):
try: try:
u = unicode(self) u = unicode(self).encode('utf-8')
except (UnicodeEncodeError, UnicodeDecodeError): except (UnicodeEncodeError, UnicodeDecodeError):
u = '[Bad Unicode data]' u = '[Bad Unicode data]'
return u'<%s: %s>' % (self.__class__.__name__, u) return '<%s: %s>' % (self.__class__.__name__, u)
def __str__(self): def __str__(self):
if hasattr(self, '__unicode__'): if hasattr(self, '__unicode__'):
@@ -932,9 +1205,13 @@ class BaseList(list):
"""A special list so we can watch any changes """A special list so we can watch any changes
""" """
_dereferenced = False
_instance = None
_name = None
def __init__(self, list_items, instance, name): def __init__(self, list_items, instance, name):
self.instance = instance self._instance = instance
self.name = name self._name = name
super(BaseList, self).__init__(list_items) super(BaseList, self).__init__(list_items)
def __setitem__(self, *args, **kwargs): def __setitem__(self, *args, **kwargs):
@@ -945,6 +1222,14 @@ class BaseList(list):
self._mark_as_changed() self._mark_as_changed()
super(BaseList, self).__delitem__(*args, **kwargs) super(BaseList, self).__delitem__(*args, **kwargs)
def __getstate__(self):
self.observer = None
return self
def __setstate__(self, state):
self = state
return self
def append(self, *args, **kwargs): def append(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
return super(BaseList, self).append(*args, **kwargs) return super(BaseList, self).append(*args, **kwargs)
@@ -974,28 +1259,27 @@ class BaseList(list):
return super(BaseList, self).sort(*args, **kwargs) return super(BaseList, self).sort(*args, **kwargs)
def _mark_as_changed(self): def _mark_as_changed(self):
"""Marks a list as changed if has an instance and a name""" if hasattr(self._instance, '_mark_as_changed'):
if hasattr(self, 'instance') and hasattr(self, 'name'): self._instance._mark_as_changed(self._name)
self.instance._mark_as_changed(self.name)
class BaseDict(dict): class BaseDict(dict):
"""A special dict so we can watch any changes """A special dict so we can watch any changes
""" """
_dereferenced = False
_instance = None
_name = None
def __init__(self, dict_items, instance, name): def __init__(self, dict_items, instance, name):
self.instance = instance self._instance = instance
self.name = name self._name = name
super(BaseDict, self).__init__(dict_items) super(BaseDict, self).__init__(dict_items)
def __setitem__(self, *args, **kwargs): def __setitem__(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).__setitem__(*args, **kwargs) super(BaseDict, self).__setitem__(*args, **kwargs)
def __setattr__(self, *args, **kwargs):
self._mark_as_changed()
super(BaseDict, self).__setattr__(*args, **kwargs)
def __delete__(self, *args, **kwargs): def __delete__(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).__delete__(*args, **kwargs) super(BaseDict, self).__delete__(*args, **kwargs)
@@ -1008,22 +1292,34 @@ class BaseDict(dict):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).__delattr__(*args, **kwargs) super(BaseDict, self).__delattr__(*args, **kwargs)
def __getstate__(self):
self.instance = None
self._dereferenced = False
return self
def __setstate__(self, state):
self = state
return self
def clear(self, *args, **kwargs): def clear(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).clear(*args, **kwargs) super(BaseDict, self).clear(*args, **kwargs)
def pop(self, *args, **kwargs): def pop(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).clear(*args, **kwargs) super(BaseDict, self).pop(*args, **kwargs)
def popitem(self, *args, **kwargs): def popitem(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).clear(*args, **kwargs) super(BaseDict, self).popitem(*args, **kwargs)
def update(self, *args, **kwargs):
self._mark_as_changed()
super(BaseDict, self).update(*args, **kwargs)
def _mark_as_changed(self): def _mark_as_changed(self):
"""Marks a dict as changed if has an instance and a name""" if hasattr(self._instance, '_mark_as_changed'):
if hasattr(self, 'instance') and hasattr(self, 'name'): self._instance._mark_as_changed(self._name)
self.instance._mark_as_changed(self.name)
if sys.version_info < (2, 5): if sys.version_info < (2, 5):
# Prior to Python 2.5, Exception was an old-style class # Prior to Python 2.5, Exception was an old-style class

View File

@@ -1,82 +1,159 @@
from pymongo import Connection import pymongo
import multiprocessing from pymongo import Connection, ReplicaSetConnection, uri_parser
import threading
__all__ = ['ConnectionError', 'connect']
_connection_defaults = { __all__ = ['ConnectionError', 'connect', 'register_connection',
'host': 'localhost', 'DEFAULT_CONNECTION_NAME']
'port': 27017,
}
_connection = {}
_connection_settings = _connection_defaults.copy()
_db_name = None
_db_username = None DEFAULT_CONNECTION_NAME = 'default'
_db_password = None
_db = {}
class ConnectionError(Exception): class ConnectionError(Exception):
pass pass
def _get_connection(reconnect=False): _connection_settings = {}
"""Handles the connection to the database _connections = {}
_dbs = {}
def register_connection(alias, name, host='localhost', port=27017,
is_slave=False, read_preference=False, slaves=None,
username=None, password=None, **kwargs):
"""Add a connection.
:param alias: the name that will be used to refer to this connection
throughout MongoEngine
:param name: the name of the specific database to use
:param host: the host name of the :program:`mongod` instance to connect to
:param port: the port that the :program:`mongod` instance is running on
:param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+
:param read_preference: The read preference for the collection ** Added pymongo 2.1
:param slaves: a list of aliases of slave connections; each of these must
be a registered connection that has :attr:`is_slave` set to ``True``
:param username: username to authenticate with
:param password: password to authenticate with
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
""" """
global _connection global _connection_settings
identity = get_identity()
conn_settings = {
'name': name,
'host': host,
'port': port,
'is_slave': is_slave,
'slaves': slaves or [],
'username': username,
'password': password,
'read_preference': read_preference
}
# Handle uri style connections
if "://" in host:
uri_dict = uri_parser.parse_uri(host)
if uri_dict.get('database') is None:
raise ConnectionError("If using URI style connection include "\
"database name in string")
conn_settings.update({
'host': host,
'name': uri_dict.get('database'),
'username': uri_dict.get('username'),
'password': uri_dict.get('password'),
'read_preference': read_preference,
})
_connection_settings[alias] = conn_settings
def disconnect(alias=DEFAULT_CONNECTION_NAME):
global _connections
global _dbs
if alias in _connections:
get_connection(alias=alias).disconnect()
del _connections[alias]
if alias in _dbs:
del _dbs[alias]
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
global _connections
# Connect to the database if not already connected # Connect to the database if not already connected
if _connection.get(identity) is None or reconnect: if reconnect:
disconnect(alias)
if alias not in _connections:
if alias not in _connection_settings:
msg = 'Connection with alias "%s" has not been defined' % alias
if alias == DEFAULT_CONNECTION_NAME:
msg = 'You have not defined a default connection'
raise ConnectionError(msg)
conn_settings = _connection_settings[alias].copy()
if hasattr(pymongo, 'version_tuple'): # Support for 2.1+
conn_settings.pop('name', None)
conn_settings.pop('slaves', None)
conn_settings.pop('is_slave', None)
conn_settings.pop('username', None)
conn_settings.pop('password', None)
else:
# Get all the slave connections
if 'slaves' in conn_settings:
slaves = []
for slave_alias in conn_settings['slaves']:
slaves.append(get_connection(slave_alias))
conn_settings['slaves'] = slaves
conn_settings.pop('read_preference', None)
connection_class = Connection
if 'replicaSet' in conn_settings:
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
# Discard port since it can't be used on ReplicaSetConnection
conn_settings.pop('port', None)
connection_class = ReplicaSetConnection
try: try:
_connection[identity] = Connection(**_connection_settings) _connections[alias] = connection_class(**conn_settings)
except Exception, e: except Exception, e:
raise ConnectionError("Cannot connect to the database:\n%s" % e) raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
return _connection[identity] return _connections[alias]
def _get_db(reconnect=False):
"""Handles database connections and authentication based on the current def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
identity global _dbs
if reconnect:
disconnect(alias)
if alias not in _dbs:
conn = get_connection(alias)
conn_settings = _connection_settings[alias]
_dbs[alias] = conn[conn_settings['name']]
# Authenticate if necessary
if conn_settings['username'] and conn_settings['password']:
_dbs[alias].authenticate(conn_settings['username'],
conn_settings['password'])
return _dbs[alias]
def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
"""Connect to the database specified by the 'db' argument.
Connection settings may be provided here as well if the database is not
running on the default port on localhost. If authentication is needed,
provide username and password arguments as well.
Multiple databases are supported by using aliases. Provide a separate
`alias` to connect to a different instance of :program:`mongod`.
.. versionchanged:: 0.6 - added multiple database support.
""" """
global _db, _connection global _connections
identity = get_identity() if alias not in _connections:
# Connect if not already connected register_connection(alias, db, **kwargs)
if _connection.get(identity) is None or reconnect:
_connection[identity] = _get_connection(reconnect=reconnect)
if _db.get(identity) is None or reconnect: return get_connection(alias)
# _db_name will be None if the user hasn't called connect()
if _db_name is None:
raise ConnectionError('Not connected to the database')
# Get DB from current connection and authenticate if necessary
_db[identity] = _connection[identity][_db_name]
if _db_username and _db_password:
_db[identity].authenticate(_db_username, _db_password)
return _db[identity]
def get_identity():
"""Creates an identity key based on the current process and thread
identity.
"""
identity = multiprocessing.current_process()._identity
identity = 0 if not identity else identity[0]
identity = (identity, threading.current_thread().ident)
return identity
def connect(db, username=None, password=None, **kwargs):
"""Connect to the database specified by the 'db' argument. Connection
settings may be provided here as well if the database is not running on
the default port on localhost. If authentication is needed, provide
username and password arguments as well.
"""
global _connection_settings, _db_name, _db_username, _db_password, _db
_connection_settings = dict(_connection_defaults, **kwargs)
_db_name = db
_db_username = username
_db_password = password
return _get_db(reconnect=True)
# Support old naming convention
_get_connection = get_connection
_get_db = get_db

View File

@@ -1,17 +1,15 @@
import operator from bson import DBRef, SON
import pymongo from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document)
from fields import (ReferenceField, ListField, DictField, MapField)
from base import BaseDict, BaseList, get_document, TopLevelDocumentMetaclass from connection import get_db
from fields import ReferenceField
from connection import _get_db
from queryset import QuerySet from queryset import QuerySet
from document import Document from document import Document
class DeReference(object): class DeReference(object):
def __call__(self, items, max_depth=1, instance=None, name=None, get=False): def __call__(self, items, max_depth=1, instance=None, name=None):
""" """
Cheaply dereferences the items to a set depth. Cheaply dereferences the items to a set depth.
Also handles the convertion of complex data types. Also handles the convertion of complex data types.
@@ -45,7 +43,7 @@ class DeReference(object):
self.reference_map = self._find_references(items) self.reference_map = self._find_references(items)
self.object_map = self._fetch_objects(doc_type=doc_type) self.object_map = self._fetch_objects(doc_type=doc_type)
return self._attach_objects(items, 0, instance, name, get) return self._attach_objects(items, 0, instance, name)
def _find_references(self, items, depth=0): def _find_references(self, items, depth=0):
""" """
@@ -55,7 +53,7 @@ class DeReference(object):
:param depth: The current depth of recursion :param depth: The current depth of recursion
""" """
reference_map = {} reference_map = {}
if not items: if not items or depth >= self.max_depth:
return reference_map return reference_map
# Determine the iterator to use # Determine the iterator to use
@@ -65,13 +63,14 @@ class DeReference(object):
iterator = items.iteritems() iterator = items.iteritems()
# Recursively find dbreferences # Recursively find dbreferences
depth += 1
for k, item in iterator: for k, item in iterator:
if hasattr(item, '_fields'): if hasattr(item, '_fields'):
for field_name, field in item._fields.iteritems(): for field_name, field in item._fields.iteritems():
v = item._data.get(field_name, None) v = item._data.get(field_name, None)
if isinstance(v, (pymongo.dbref.DBRef)): if isinstance(v, (DBRef)):
reference_map.setdefault(field.document_type, []).append(v.id) reference_map.setdefault(field.document_type, []).append(v.id)
elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v: elif isinstance(v, (dict, SON)) and '_ref' in v:
reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id) reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
field_cls = getattr(getattr(field, 'field', None), 'document_type', None) field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
@@ -80,15 +79,15 @@ class DeReference(object):
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
key = field_cls key = field_cls
reference_map.setdefault(key, []).extend(refs) reference_map.setdefault(key, []).extend(refs)
elif isinstance(item, (pymongo.dbref.DBRef)): elif isinstance(item, (DBRef)):
reference_map.setdefault(item.collection, []).append(item.id) reference_map.setdefault(item.collection, []).append(item.id)
elif isinstance(item, (dict, pymongo.son.SON)) and '_ref' in item: elif isinstance(item, (dict, SON)) and '_ref' in item:
reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id) reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
elif isinstance(item, (dict, list, tuple)) and depth <= self.max_depth: elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
references = self._find_references(item, depth) references = self._find_references(item, depth - 1)
for key, refs in references.iteritems(): for key, refs in references.iteritems():
reference_map.setdefault(key, []).extend(refs) reference_map.setdefault(key, []).extend(refs)
depth += 1
return reference_map return reference_map
def _fetch_objects(self, doc_type=None): def _fetch_objects(self, doc_type=None):
@@ -103,16 +102,22 @@ class DeReference(object):
for key, doc in references.iteritems(): for key, doc in references.iteritems():
object_map[key] = doc object_map[key] = doc
else: # Generic reference: use the refs data to convert to document else: # Generic reference: use the refs data to convert to document
references = _get_db()[col].find({'_id': {'$in': refs}}) if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ):
references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
for ref in references:
doc = doc_type._from_son(ref)
object_map[doc.id] = doc
else:
references = get_db()[col].find({'_id': {'$in': refs}})
for ref in references: for ref in references:
if '_cls' in ref: if '_cls' in ref:
doc = get_document(ref['_cls'])._from_son(ref) doc = get_document(ref["_cls"])._from_son(ref)
else: else:
doc = doc_type._from_son(ref) doc = doc_type._from_son(ref)
object_map[doc.id] = doc object_map[doc.id] = doc
return object_map return object_map
def _attach_objects(self, items, depth=0, instance=None, name=None, get=False): def _attach_objects(self, items, depth=0, instance=None, name=None):
""" """
Recursively finds all db references to be dereferenced Recursively finds all db references to be dereferenced
@@ -122,7 +127,6 @@ class DeReference(object):
:class:`~mongoengine.base.ComplexBaseField` :class:`~mongoengine.base.ComplexBaseField`
:param name: The name of the field, used for tracking changes by :param name: The name of the field, used for tracking changes by
:class:`~mongoengine.base.ComplexBaseField` :class:`~mongoengine.base.ComplexBaseField`
:param get: A boolean determining if being called by __get__
""" """
if not items: if not items:
if isinstance(items, (BaseDict, BaseList)): if isinstance(items, (BaseDict, BaseList)):
@@ -130,17 +134,16 @@ class DeReference(object):
if instance: if instance:
if isinstance(items, dict): if isinstance(items, dict):
return BaseDict(items, instance=instance, name=name) return BaseDict(items, instance, name)
else: else:
return BaseList(items, instance=instance, name=name) return BaseList(items, instance, name)
if isinstance(items, (dict, pymongo.son.SON)): if isinstance(items, (dict, SON)):
if '_ref' in items: if '_ref' in items:
return self.object_map.get(items['_ref'].id, items) return self.object_map.get(items['_ref'].id, items)
elif '_types' in items and '_cls' in items: elif '_types' in items and '_cls' in items:
doc = get_document(items['_cls'])._from_son(items) doc = get_document(items['_cls'])._from_son(items)
if not get: doc._data = self._attach_objects(doc._data, depth, doc, name)
doc._data = self._attach_objects(doc._data, depth, doc, name, get)
return doc return doc
if not hasattr(items, 'items'): if not hasattr(items, 'items'):
@@ -152,6 +155,7 @@ class DeReference(object):
iterator = items.iteritems() iterator = items.iteritems()
data = {} data = {}
depth += 1
for k, v in iterator: for k, v in iterator:
if is_list: if is_list:
data.append(v) data.append(v)
@@ -163,24 +167,22 @@ class DeReference(object):
elif hasattr(v, '_fields'): elif hasattr(v, '_fields'):
for field_name, field in v._fields.iteritems(): for field_name, field in v._fields.iteritems():
v = data[k]._data.get(field_name, None) v = data[k]._data.get(field_name, None)
if isinstance(v, (pymongo.dbref.DBRef)): if isinstance(v, (DBRef)):
data[k]._data[field_name] = self.object_map.get(v.id, v) data[k]._data[field_name] = self.object_map.get(v.id, v)
elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v: elif isinstance(v, (dict, SON)) and '_ref' in v:
data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v) data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
elif isinstance(v, dict) and depth < self.max_depth: elif isinstance(v, dict) and depth <= self.max_depth:
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get) data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
elif isinstance(v, (list, tuple)): elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get) data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
elif isinstance(v, (dict, list, tuple)) and depth < self.max_depth: elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
data[k] = self._attach_objects(v, depth, instance=instance, name=name, get=get) data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name)
elif hasattr(v, 'id'): elif hasattr(v, 'id'):
data[k] = self.object_map.get(v.id, v) data[k] = self.object_map.get(v.id, v)
if instance and name: if instance and name:
if is_list: if is_list:
return BaseList(data, instance=instance, name=name) return BaseList(data, instance, name)
return BaseDict(data, instance=instance, name=name) return BaseDict(data, instance, name)
depth += 1 depth += 1
return data return data
dereference = DeReference()

View File

@@ -1,13 +1,16 @@
import datetime
from mongoengine import * from mongoengine import *
from django.utils.hashcompat import md5_constructor, sha_constructor
from django.utils.encoding import smart_str from django.utils.encoding import smart_str
from django.contrib.auth.models import AnonymousUser from django.contrib.auth.models import AnonymousUser
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
import datetime try:
from django.contrib.auth.hashers import check_password, make_password
REDIRECT_FIELD_NAME = 'next' except ImportError:
"""Handle older versions of Django"""
from django.utils.hashcompat import md5_constructor, sha_constructor
def get_hexdigest(algorithm, salt, raw_password): def get_hexdigest(algorithm, salt, raw_password):
raw_password, salt = smart_str(raw_password), smart_str(salt) raw_password, salt = smart_str(raw_password), smart_str(salt)
@@ -17,6 +20,19 @@ def get_hexdigest(algorithm, salt, raw_password):
return sha_constructor(salt + raw_password).hexdigest() return sha_constructor(salt + raw_password).hexdigest()
raise ValueError('Got unknown password algorithm type in password') raise ValueError('Got unknown password algorithm type in password')
def check_password(raw_password, password):
algo, salt, hash = password.split('$')
return hash == get_hexdigest(algo, salt, raw_password)
def make_password(raw_password):
from random import random
algo = 'sha1'
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
hash = get_hexdigest(algo, salt, raw_password)
return '%s$%s$%s' % (algo, salt, hash)
REDIRECT_FIELD_NAME = 'next'
class User(Document): class User(Document):
"""A User document that aims to mirror most of the API specified by Django """A User document that aims to mirror most of the API specified by Django
@@ -34,7 +50,7 @@ class User(Document):
email = EmailField(verbose_name=_('e-mail address')) email = EmailField(verbose_name=_('e-mail address'))
password = StringField(max_length=128, password = StringField(max_length=128,
verbose_name=_('password'), verbose_name=_('password'),
help_text=_("Use '[algo]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>.")) help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
is_staff = BooleanField(default=False, is_staff = BooleanField(default=False,
verbose_name=_('staff status'), verbose_name=_('staff status'),
help_text=_("Designates whether the user can log into this admin site.")) help_text=_("Designates whether the user can log into this admin site."))
@@ -75,11 +91,7 @@ class User(Document):
assigning to :attr:`~mongoengine.django.auth.User.password` as the assigning to :attr:`~mongoengine.django.auth.User.password` as the
password is hashed before storage. password is hashed before storage.
""" """
from random import random self.password = make_password(raw_password)
algo = 'sha1'
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
hash = get_hexdigest(algo, salt, raw_password)
self.password = '%s$%s$%s' % (algo, salt, hash)
self.save() self.save()
return self return self
@@ -89,8 +101,7 @@ class User(Document):
:attr:`~mongoengine.django.auth.User.password` as the password is :attr:`~mongoengine.django.auth.User.password` as the password is
hashed before storage. hashed before storage.
""" """
algo, salt, hash = self.password.split('$') return check_password(raw_password, self.password)
return hash == get_hexdigest(algo, salt, raw_password)
@classmethod @classmethod
def create_user(cls, username, password, email=None): def create_user(cls, username, password, email=None):

View File

@@ -1,3 +1,6 @@
from datetime import datetime
from django.conf import settings
from django.contrib.sessions.backends.base import SessionBase, CreateError from django.contrib.sessions.backends.base import SessionBase, CreateError
from django.core.exceptions import SuspiciousOperation from django.core.exceptions import SuspiciousOperation
from django.utils.encoding import force_unicode from django.utils.encoding import force_unicode
@@ -5,8 +8,12 @@ from django.utils.encoding import force_unicode
from mongoengine.document import Document from mongoengine.document import Document
from mongoengine import fields from mongoengine import fields
from mongoengine.queryset import OperationError from mongoengine.queryset import OperationError
from mongoengine.connection import DEFAULT_CONNECTION_NAME
from datetime import datetime
MONGOENGINE_SESSION_DB_ALIAS = getattr(
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
DEFAULT_CONNECTION_NAME)
class MongoSession(Document): class MongoSession(Document):
@@ -14,7 +21,9 @@ class MongoSession(Document):
session_data = fields.StringField() session_data = fields.StringField()
expire_date = fields.DateTimeField() expire_date = fields.DateTimeField()
meta = {'collection': 'django_session', 'allow_inheritance': False} meta = {'collection': 'django_session',
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
'allow_inheritance': False}
class SessionStore(SessionBase): class SessionStore(SessionBase):
@@ -35,7 +44,7 @@ class SessionStore(SessionBase):
def create(self): def create(self):
while True: while True:
self.session_key = self._get_new_session_key() self._session_key = self._get_new_session_key()
try: try:
self.save(must_create=True) self.save(must_create=True)
except CreateError: except CreateError:
@@ -45,6 +54,8 @@ class SessionStore(SessionBase):
return return
def save(self, must_create=False): def save(self, must_create=False):
if self.session_key is None:
self.create()
s = MongoSession(session_key=self.session_key) s = MongoSession(session_key=self.session_key)
s.session_data = self.encode(self._get_session(no_load=must_create)) s.session_data = self.encode(self._get_session(no_load=must_create))
s.expire_date = self.get_expiry_date() s.expire_date = self.get_expiry_date()

View File

@@ -1,13 +1,14 @@
import pymongo
from bson.dbref import DBRef
from mongoengine import signals from mongoengine import signals
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
ValidationError, BaseDict, BaseList) BaseDict, BaseList)
from queryset import OperationError from queryset import OperationError
from connection import _get_db from connection import get_db, DEFAULT_CONNECTION_NAME
import pymongo __all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument',
'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError']
__all__ = ['Document', 'EmbeddedDocument', 'ValidationError',
'OperationError', 'InvalidCollectionError']
class InvalidCollectionError(Exception): class InvalidCollectionError(Exception):
@@ -23,6 +24,10 @@ class EmbeddedDocument(BaseDocument):
__metaclass__ = DocumentMetaclass __metaclass__ = DocumentMetaclass
def __init__(self, *args, **kwargs):
super(EmbeddedDocument, self).__init__(*args, **kwargs)
self._changed_fields = []
def __delattr__(self, *args, **kwargs): def __delattr__(self, *args, **kwargs):
"""Handle deletions of fields""" """Handle deletions of fields"""
field_name = args[0] field_name = args[0]
@@ -35,7 +40,6 @@ class EmbeddedDocument(BaseDocument):
super(EmbeddedDocument, self).__delattr__(*args, **kwargs) super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
class Document(BaseDocument): class Document(BaseDocument):
"""The base class used for defining the structure and properties of """The base class used for defining the structure and properties of
collections of documents stored in MongoDB. Inherit from this class, and collections of documents stored in MongoDB. Inherit from this class, and
@@ -77,42 +81,57 @@ class Document(BaseDocument):
""" """
__metaclass__ = TopLevelDocumentMetaclass __metaclass__ = TopLevelDocumentMetaclass
@classmethod @apply
def _get_collection(self): def pk():
"""Returns the collection for the document.""" """Primary key alias
db = _get_db() """
collection_name = self._get_collection_name() def fget(self):
return getattr(self, self._meta['id_field'])
def fset(self, value):
return setattr(self, self._meta['id_field'], value)
return property(fget, fset)
if not hasattr(self, '_collection') or self._collection is None: @classmethod
def _get_db(cls):
"""Some Model using other db_alias"""
return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME ))
@classmethod
def _get_collection(cls):
"""Returns the collection for the document."""
if not hasattr(cls, '_collection') or cls._collection is None:
db = cls._get_db()
collection_name = cls._get_collection_name()
# Create collection as a capped collection if specified # Create collection as a capped collection if specified
if self._meta['max_size'] or self._meta['max_documents']: if cls._meta['max_size'] or cls._meta['max_documents']:
# Get max document limit and max byte size from meta # Get max document limit and max byte size from meta
max_size = self._meta['max_size'] or 10000000 # 10MB default max_size = cls._meta['max_size'] or 10000000 # 10MB default
max_documents = self._meta['max_documents'] max_documents = cls._meta['max_documents']
if collection_name in db.collection_names(): if collection_name in db.collection_names():
self._collection = db[collection_name] cls._collection = db[collection_name]
# The collection already exists, check if its capped # The collection already exists, check if its capped
# options match the specified capped options # options match the specified capped options
options = self._collection.options() options = cls._collection.options()
if options.get('max') != max_documents or \ if options.get('max') != max_documents or \
options.get('size') != max_size: options.get('size') != max_size:
msg = ('Cannot create collection "%s" as a capped ' msg = ('Cannot create collection "%s" as a capped '
'collection as it already exists') % self._collection 'collection as it already exists') % cls._collection
raise InvalidCollectionError(msg) raise InvalidCollectionError(msg)
else: else:
# Create the collection as a capped collection # Create the collection as a capped collection
opts = {'capped': True, 'size': max_size} opts = {'capped': True, 'size': max_size}
if max_documents: if max_documents:
opts['max'] = max_documents opts['max'] = max_documents
self._collection = db.create_collection( cls._collection = db.create_collection(
collection_name, **opts collection_name, **opts
) )
else: else:
self._collection = db[collection_name] cls._collection = db[collection_name]
return self._collection return cls._collection
def save(self, safe=True, force_insert=False, validate=True, write_options=None, _refs=None): def save(self, safe=True, force_insert=False, validate=True, write_options=None,
cascade=None, cascade_kwargs=None, _refs=None):
"""Save the :class:`~mongoengine.Document` to the database. If the """Save the :class:`~mongoengine.Document` to the database. If the
document already exists, it will be updated, otherwise it will be document already exists, it will be updated, otherwise it will be
created. created.
@@ -130,14 +149,22 @@ class Document(BaseDocument):
which will be used as options for the resultant ``getLastError`` command. which will be used as options for the resultant ``getLastError`` command.
For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers
have recorded the write and will force an fsync on each server being written to. have recorded the write and will force an fsync on each server being written to.
:param cascade: Sets the flag for cascading saves. You can set a default by setting
"cascade" in the document __meta__
:param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves
:param _refs: A list of processed references used in cascading saves
.. versionchanged:: 0.5 .. versionchanged:: 0.5
In existing documents it only saves changed fields using set / unset In existing documents it only saves changed fields using set / unset
Saves are cascaded and any :class:`~pymongo.dbref.DBRef` objects Saves are cascaded and any :class:`~bson.dbref.DBRef` objects
that have changes are saved as well. that have changes are saved as well.
""" .. versionchanged:: 0.6
from fields import ReferenceField, GenericReferenceField Cascade saves are optional = defaults to True, if you want fine grain
control then you can turn off using document meta['cascade'] = False
Also you can pass different kwargs to the cascade save using cascade_kwargs
which overwrites the existing kwargs with custom values
"""
signals.pre_save.send(self.__class__, document=self) signals.pre_save.send(self.__class__, document=self)
if validate: if validate:
@@ -148,11 +175,11 @@ class Document(BaseDocument):
doc = self.to_mongo() doc = self.to_mongo()
created = '_id' in doc created = force_insert or '_id' not in doc
creation_mode = force_insert or not created
try: try:
collection = self.__class__.objects._collection collection = self.__class__.objects._collection
if creation_mode: if created:
if force_insert: if force_insert:
object_id = collection.insert(doc, safe=safe, **write_options) object_id = collection.insert(doc, safe=safe, **write_options)
else: else:
@@ -160,21 +187,33 @@ class Document(BaseDocument):
else: else:
object_id = doc['_id'] object_id = doc['_id']
updates, removals = self._delta() updates, removals = self._delta()
if updates:
collection.update({'_id': object_id}, {"$set": updates}, upsert=True, safe=safe, **write_options)
if removals:
collection.update({'_id': object_id}, {"$unset": removals}, upsert=True, safe=safe, **write_options)
# Save any references / generic references # Need to add shard key to query, or you get an error
_refs = _refs or [] select_dict = {'_id': object_id}
for name, cls in self._fields.items(): shard_key = self.__class__._meta.get('shard_key', tuple())
if isinstance(cls, (ReferenceField, GenericReferenceField)): for k in shard_key:
ref = getattr(self, name) actual_key = self._db_field_map.get(k, k)
if ref and str(ref) not in _refs: select_dict[actual_key] = doc[actual_key]
_refs.append(str(ref))
ref.save(safe=safe, force_insert=force_insert, upsert = self._created
validate=validate, write_options=write_options, if updates:
_refs=_refs) collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options)
if removals:
collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options)
cascade = self._meta.get('cascade', True) if cascade is None else cascade
if cascade:
kwargs = {
"safe": safe,
"force_insert": force_insert,
"validate": validate,
"write_options": write_options,
"cascade": cascade
}
if cascade_kwargs: # Allow granular control over cascades
kwargs.update(cascade_kwargs)
kwargs['_refs'] = _refs
self.cascade_save(**kwargs)
except pymongo.errors.OperationFailure, err: except pymongo.errors.OperationFailure, err:
message = 'Could not save document (%s)' message = 'Could not save document (%s)'
@@ -184,21 +223,26 @@ class Document(BaseDocument):
id_field = self._meta['id_field'] id_field = self._meta['id_field']
self[id_field] = self._fields[id_field].to_python(object_id) self[id_field] = self._fields[id_field].to_python(object_id)
def reset_changed_fields(doc, inspected_docs=None): self._changed_fields = []
"""Loop through and reset changed fields lists""" self._created = False
signals.post_save.send(self.__class__, document=self, created=created)
inspected_docs = inspected_docs or [] def cascade_save(self, *args, **kwargs):
inspected_docs.append(doc) """Recursively saves any references / generic references on an object"""
if hasattr(doc, '_changed_fields'): from fields import ReferenceField, GenericReferenceField
doc._changed_fields = [] _refs = kwargs.get('_refs', []) or []
for name, cls in self._fields.items():
for field_name in doc._fields: if not isinstance(cls, (ReferenceField, GenericReferenceField)):
field = getattr(doc, field_name) continue
if field not in inspected_docs and hasattr(field, '_changed_fields'): ref = getattr(self, name)
reset_changed_fields(field, inspected_docs) if not ref:
continue
reset_changed_fields(self) ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
signals.post_save.send(self.__class__, document=self, created=creation_mode) if ref and ref_id not in _refs:
_refs.append(ref_id)
kwargs["_refs"] = _refs
ref.save(**kwargs)
ref._changed_fields = []
def update(self, **kwargs): def update(self, **kwargs):
"""Performs an update on the :class:`~mongoengine.Document` """Performs an update on the :class:`~mongoengine.Document`
@@ -210,7 +254,12 @@ class Document(BaseDocument):
if not self.pk: if not self.pk:
raise OperationError('attempt to update a document not yet saved') raise OperationError('attempt to update a document not yet saved')
return self.__class__.objects(pk=self.pk).update_one(**kwargs) # Need to add shard key to query, or you get an error
select_dict = {'pk': self.pk}
shard_key = self.__class__._meta.get('shard_key', tuple())
for k in shard_key:
select_dict[k] = getattr(self, k)
return self.__class__.objects(**select_dict).update_one(**kwargs)
def delete(self, safe=False): def delete(self, safe=False):
"""Delete the :class:`~mongoengine.Document` from the database. This """Delete the :class:`~mongoengine.Document` from the database. This
@@ -220,10 +269,8 @@ class Document(BaseDocument):
""" """
signals.pre_delete.send(self.__class__, document=self) signals.pre_delete.send(self.__class__, document=self)
id_field = self._meta['id_field']
object_id = self._fields[id_field].to_mongo(self[id_field])
try: try:
self.__class__.objects(**{id_field: object_id}).delete(safe=safe) self.__class__.objects(pk=self.pk).delete(safe=safe)
except pymongo.errors.OperationFailure, err: except pymongo.errors.OperationFailure, err:
message = u'Could not delete document (%s)' % err.message message = u'Could not delete document (%s)' % err.message
raise OperationError(message) raise OperationError(message)
@@ -231,25 +278,32 @@ class Document(BaseDocument):
signals.post_delete.send(self.__class__, document=self) signals.post_delete.send(self.__class__, document=self)
def select_related(self, max_depth=1): def select_related(self, max_depth=1):
"""Handles dereferencing of :class:`~pymongo.dbref.DBRef` objects to """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
a maximum depth in order to cut down the number queries to mongodb. a maximum depth in order to cut down the number queries to mongodb.
.. versionadded:: 0.5 .. versionadded:: 0.5
""" """
from dereference import dereference from dereference import DeReference
self._data = dereference(self._data, max_depth) self._data = DeReference()(self._data, max_depth)
return self return self
def reload(self): def reload(self, max_depth=1):
"""Reloads all attributes from the database. """Reloads all attributes from the database.
.. versionadded:: 0.1.2 .. versionadded:: 0.1.2
.. versionchanged:: 0.6 Now chainable
""" """
id_field = self._meta['id_field'] id_field = self._meta['id_field']
obj = self.__class__.objects(**{id_field: self[id_field]}).first() obj = self.__class__.objects(
**{id_field: self[id_field]}
).first().select_related(max_depth=max_depth)
for field in self._fields: for field in self._fields:
setattr(self, field, self._reload(field, obj[field])) setattr(self, field, self._reload(field, obj[field]))
self._changed_fields = [] if self._dynamic:
for name in self._dynamic_fields.keys():
setattr(self, name, self._reload(name, obj._data[name]))
self._changed_fields = obj._changed_fields
return obj
def _reload(self, key, value): def _reload(self, key, value):
"""Used by :meth:`~mongoengine.Document.reload` to ensure the """Used by :meth:`~mongoengine.Document.reload` to ensure the
@@ -257,21 +311,21 @@ class Document(BaseDocument):
""" """
if isinstance(value, BaseDict): if isinstance(value, BaseDict):
value = [(k, self._reload(k, v)) for k, v in value.items()] value = [(k, self._reload(k, v)) for k, v in value.items()]
value = BaseDict(value, instance=self, name=key) value = BaseDict(value, self, key)
elif isinstance(value, BaseList): elif isinstance(value, BaseList):
value = [self._reload(key, v) for v in value] value = [self._reload(key, v) for v in value]
value = BaseList(value, instance=self, name=key) value = BaseList(value, self, key)
elif isinstance(value, EmbeddedDocument): elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)):
value._changed_fields = [] value._changed_fields = []
return value return value
def to_dbref(self): def to_dbref(self):
"""Returns an instance of :class:`~pymongo.dbref.DBRef` useful in """Returns an instance of :class:`~bson.dbref.DBRef` useful in
`__raw__` queries.""" `__raw__` queries."""
if not self.pk: if not self.pk:
msg = "Only saved documents can have a valid dbref" msg = "Only saved documents can have a valid dbref"
raise OperationError(msg) raise OperationError(msg)
return pymongo.dbref.DBRef(self.__class__._get_collection_name(), self.pk) return DBRef(self.__class__._get_collection_name(), self.pk)
@classmethod @classmethod
def register_delete_rule(cls, document_cls, field_name, rule): def register_delete_rule(cls, document_cls, field_name, rule):
@@ -285,8 +339,52 @@ class Document(BaseDocument):
"""Drops the entire collection associated with this """Drops the entire collection associated with this
:class:`~mongoengine.Document` type from the database. :class:`~mongoengine.Document` type from the database.
""" """
db = _get_db() from mongoengine.queryset import QuerySet
db = cls._get_db()
db.drop_collection(cls._get_collection_name()) db.drop_collection(cls._get_collection_name())
QuerySet._reset_already_indexed(cls)
class DynamicDocument(Document):
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
way as an ordinary document but has expando style properties. Any data
passed or set against the :class:`~mongoengine.DynamicDocument` that is
not a field is automatically converted into a
:class:`~mongoengine.BaseDynamicField` and data can be attributed to that
field.
..note::
There is one caveat on Dynamic Documents: fields cannot start with `_`
"""
__metaclass__ = TopLevelDocumentMetaclass
_dynamic = True
def __delattr__(self, *args, **kwargs):
"""Deletes the attribute by setting to None and allowing _delta to unset
it"""
field_name = args[0]
if field_name in self._dynamic_fields:
setattr(self, field_name, None)
else:
super(DynamicDocument, self).__delattr__(*args, **kwargs)
class DynamicEmbeddedDocument(EmbeddedDocument):
"""A Dynamic Embedded Document class allowing flexible, expandable and
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
information about dynamic documents.
"""
__metaclass__ = DocumentMetaclass
_dynamic = True
def __delattr__(self, *args, **kwargs):
"""Deletes the attribute by setting to None and allowing _delta to unset
it"""
field_name = args[0]
setattr(self, field_name, None)
class MapReduceDocument(object): class MapReduceDocument(object):
@@ -294,7 +392,7 @@ class MapReduceDocument(object):
:param collection: An instance of :class:`~pymongo.Collection` :param collection: An instance of :class:`~pymongo.Collection`
:param key: Document/result key, often an instance of :param key: Document/result key, often an instance of
:class:`~pymongo.objectid.ObjectId`. If supplied as :class:`~bson.objectid.ObjectId`. If supplied as
an ``ObjectId`` found in the given ``collection``, an ``ObjectId`` found in the given ``collection``,
the object can be accessed via the ``object`` property. the object can be accessed via the ``object`` property.
:param value: The result(s) for this key. :param value: The result(s) for this key.

View File

@@ -1,18 +1,30 @@
from base import (BaseField, ComplexBaseField, ObjectIdField, import datetime
ValidationError, get_document) import time
from queryset import DO_NOTHING
from document import Document, EmbeddedDocument
from connection import _get_db
from operator import itemgetter
import re
import pymongo
import pymongo.dbref
import pymongo.son
import pymongo.binary
import datetime, time
import decimal import decimal
import gridfs import gridfs
import re
import uuid
from bson import Binary, DBRef, SON, ObjectId
from base import (BaseField, ComplexBaseField, ObjectIdField,
ValidationError, get_document, BaseDocument)
from queryset import DO_NOTHING, QuerySet
from document import Document, EmbeddedDocument
from connection import get_db, DEFAULT_CONNECTION_NAME
from operator import itemgetter
try:
from PIL import Image, ImageOps
except ImportError:
Image = None
ImageOps = None
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
@@ -20,8 +32,8 @@ __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', 'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
'DecimalField', 'ComplexDateTimeField', 'URLField', 'DecimalField', 'ComplexDateTimeField', 'URLField',
'GenericReferenceField', 'FileField', 'BinaryField', 'GenericReferenceField', 'FileField', 'BinaryField',
'SortedListField', 'EmailField', 'GeoPointField', 'SortedListField', 'EmailField', 'GeoPointField', 'ImageField',
'SequenceField', 'GenericEmbeddedDocumentField'] 'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField']
RECURSIVE_REFERENCE_CONSTANT = 'self' RECURSIVE_REFERENCE_CONSTANT = 'self'
@@ -40,17 +52,17 @@ class StringField(BaseField):
return unicode(value) return unicode(value)
def validate(self, value): def validate(self, value):
assert isinstance(value, (str, unicode)) if not isinstance(value, (str, unicode)):
self.error('StringField only accepts string values')
if self.max_length is not None and len(value) > self.max_length: if self.max_length is not None and len(value) > self.max_length:
raise ValidationError('String value is too long') self.error('String value is too long')
if self.min_length is not None and len(value) < self.min_length: if self.min_length is not None and len(value) < self.min_length:
raise ValidationError('String value is too short') self.error('String value is too short')
if self.regex is not None and self.regex.match(value) is None: if self.regex is not None and self.regex.match(value) is None:
message = 'String value did not match validation regex' self.error('String value did not match validation regex')
raise ValidationError(message)
def lookup_member(self, member_name): def lookup_member(self, member_name):
return None return None
@@ -100,16 +112,15 @@ class URLField(StringField):
def validate(self, value): def validate(self, value):
if not URLField.URL_REGEX.match(value): if not URLField.URL_REGEX.match(value):
raise ValidationError('Invalid URL: %s' % value) self.error('Invalid URL: %s' % value)
if self.verify_exists: if self.verify_exists:
import urllib2 import urllib2
try: try:
request = urllib2.Request(value) request = urllib2.Request(value)
response = urllib2.urlopen(request) urllib2.urlopen(request)
except Exception, e: except Exception, e:
message = 'This URL appears to be a broken link: %s' % e self.error('This URL appears to be a broken link: %s' % e)
raise ValidationError(message)
class EmailField(StringField): class EmailField(StringField):
@@ -126,7 +137,7 @@ class EmailField(StringField):
def validate(self, value): def validate(self, value):
if not EmailField.EMAIL_REGEX.match(value): if not EmailField.EMAIL_REGEX.match(value):
raise ValidationError('Invalid Mail-address: %s' % value) self.error('Invalid Mail-address: %s' % value)
class IntField(BaseField): class IntField(BaseField):
@@ -144,13 +155,13 @@ class IntField(BaseField):
try: try:
value = int(value) value = int(value)
except: except:
raise ValidationError('%s could not be converted to int' % value) self.error('%s could not be converted to int' % value)
if self.min_value is not None and value < self.min_value: if self.min_value is not None and value < self.min_value:
raise ValidationError('Integer value is too small') self.error('Integer value is too small')
if self.max_value is not None and value > self.max_value: if self.max_value is not None and value > self.max_value:
raise ValidationError('Integer value is too large') self.error('Integer value is too large')
def prepare_query_value(self, op, value): def prepare_query_value(self, op, value):
return int(value) return int(value)
@@ -170,13 +181,14 @@ class FloatField(BaseField):
def validate(self, value): def validate(self, value):
if isinstance(value, int): if isinstance(value, int):
value = float(value) value = float(value)
assert isinstance(value, float) if not isinstance(value, float):
self.error('FoatField only accepts float values')
if self.min_value is not None and value < self.min_value: if self.min_value is not None and value < self.min_value:
raise ValidationError('Float value is too small') self.error('Float value is too small')
if self.max_value is not None and value > self.max_value: if self.max_value is not None and value > self.max_value:
raise ValidationError('Float value is too large') self.error('Float value is too large')
def prepare_query_value(self, op, value): def prepare_query_value(self, op, value):
return float(value) return float(value)
@@ -207,13 +219,13 @@ class DecimalField(BaseField):
try: try:
value = decimal.Decimal(value) value = decimal.Decimal(value)
except Exception, exc: except Exception, exc:
raise ValidationError('Could not convert to decimal: %s' % exc) self.error('Could not convert value to decimal: %s' % exc)
if self.min_value is not None and value < self.min_value: if self.min_value is not None and value < self.min_value:
raise ValidationError('Decimal value is too small') self.error('Decimal value is too small')
if self.max_value is not None and value > self.max_value: if self.max_value is not None and value > self.max_value:
raise ValidationError('Decimal value is too large') self.error('Decimal value is too large')
class BooleanField(BaseField): class BooleanField(BaseField):
@@ -226,7 +238,8 @@ class BooleanField(BaseField):
return bool(value) return bool(value)
def validate(self, value): def validate(self, value):
assert isinstance(value, bool) if not isinstance(value, bool):
self.error('BooleanField only accepts boolean values')
class DateTimeField(BaseField): class DateTimeField(BaseField):
@@ -239,7 +252,8 @@ class DateTimeField(BaseField):
""" """
def validate(self, value): def validate(self, value):
assert isinstance(value, (datetime.datetime, datetime.date)) if not isinstance(value, (datetime.datetime, datetime.date)):
self.error(u'cannot parse date "%s"' % value)
def to_mongo(self, value): def to_mongo(self, value):
return self.prepare_query_value(None, value) return self.prepare_query_value(None, value)
@@ -360,8 +374,8 @@ class ComplexDateTimeField(StringField):
def validate(self, value): def validate(self, value):
if not isinstance(value, datetime.datetime): if not isinstance(value, datetime.datetime):
raise ValidationError('Only datetime objects may used in a \ self.error('Only datetime objects may used in a '
ComplexDateTimeField') 'ComplexDateTimeField')
def to_python(self, value): def to_python(self, value):
return self._convert_from_string(value) return self._convert_from_string(value)
@@ -381,8 +395,8 @@ class EmbeddedDocumentField(BaseField):
def __init__(self, document_type, **kwargs): def __init__(self, document_type, **kwargs):
if not isinstance(document_type, basestring): if not isinstance(document_type, basestring):
if not issubclass(document_type, EmbeddedDocument): if not issubclass(document_type, EmbeddedDocument):
raise ValidationError('Invalid embedded document class ' self.error('Invalid embedded document class provided to an '
'provided to an EmbeddedDocumentField') 'EmbeddedDocumentField')
self.document_type_obj = document_type self.document_type_obj = document_type
super(EmbeddedDocumentField, self).__init__(**kwargs) super(EmbeddedDocumentField, self).__init__(**kwargs)
@@ -411,8 +425,8 @@ class EmbeddedDocumentField(BaseField):
""" """
# Using isinstance also works for subclasses of self.document # Using isinstance also works for subclasses of self.document
if not isinstance(value, self.document_type): if not isinstance(value, self.document_type):
raise ValidationError('Invalid embedded document instance ' self.error('Invalid embedded document instance provided to an '
'provided to an EmbeddedDocumentField') 'EmbeddedDocumentField')
self.document_type.validate(value) self.document_type.validate(value)
def lookup_member(self, member_name): def lookup_member(self, member_name):
@@ -441,8 +455,8 @@ class GenericEmbeddedDocumentField(BaseField):
def validate(self, value): def validate(self, value):
if not isinstance(value, EmbeddedDocument): if not isinstance(value, EmbeddedDocument):
raise ValidationError('Invalid embedded document instance ' self.error('Invalid embedded document instance provided to an '
'provided to an GenericEmbeddedDocumentField') 'GenericEmbeddedDocumentField')
value.validate() value.validate()
@@ -459,6 +473,9 @@ class GenericEmbeddedDocumentField(BaseField):
class ListField(ComplexBaseField): class ListField(ComplexBaseField):
"""A list field that wraps a standard field, allowing multiple instances """A list field that wraps a standard field, allowing multiple instances
of the field to be used as a list in the database. of the field to be used as a list in the database.
.. note::
Required means it cannot be empty - as the default for ListFields is []
""" """
# ListFields cannot be indexed with _types - MongoDB doesn't support this # ListFields cannot be indexed with _types - MongoDB doesn't support this
@@ -472,14 +489,15 @@ class ListField(ComplexBaseField):
def validate(self, value): def validate(self, value):
"""Make sure that a list of valid fields is being used. """Make sure that a list of valid fields is being used.
""" """
if not isinstance(value, (list, tuple)): if (not isinstance(value, (list, tuple, QuerySet)) or
raise ValidationError('Only lists and tuples may be used in a ' isinstance(value, basestring)):
'list field') self.error('Only lists and tuples may be used in a list field')
super(ListField, self).validate(value) super(ListField, self).validate(value)
def prepare_query_value(self, op, value): def prepare_query_value(self, op, value):
if self.field: if self.field:
if op in ('set', 'unset') and (not isinstance(value, basestring) if op in ('set', 'unset') and (not isinstance(value, basestring)
and not isinstance(value, BaseDocument)
and hasattr(value, '__iter__')): and hasattr(value, '__iter__')):
return [self.field.prepare_query_value(op, v) for v in value] return [self.field.prepare_query_value(op, v) for v in value]
return self.field.prepare_query_value(op, value) return self.field.prepare_query_value(op, value)
@@ -491,27 +509,40 @@ class SortedListField(ListField):
the database in order to ensure that a sorted list is always the database in order to ensure that a sorted list is always
retrieved. retrieved.
.. warning::
There is a potential race condition when handling lists. If you set /
save the whole list then other processes trying to save the whole list
as well could overwrite changes. The safest way to append to a list is
to perform a push operation.
.. versionadded:: 0.4 .. versionadded:: 0.4
.. versionchanged:: 0.6 - added reverse keyword
""" """
_ordering = None _ordering = None
_order_reverse = False
def __init__(self, field, **kwargs): def __init__(self, field, **kwargs):
if 'ordering' in kwargs.keys(): if 'ordering' in kwargs.keys():
self._ordering = kwargs.pop('ordering') self._ordering = kwargs.pop('ordering')
if 'reverse' in kwargs.keys():
self._order_reverse = kwargs.pop('reverse')
super(SortedListField, self).__init__(field, **kwargs) super(SortedListField, self).__init__(field, **kwargs)
def to_mongo(self, value): def to_mongo(self, value):
value = super(SortedListField, self).to_mongo(value) value = super(SortedListField, self).to_mongo(value)
if self._ordering is not None: if self._ordering is not None:
return sorted(value, key=itemgetter(self._ordering)) return sorted(value, key=itemgetter(self._ordering), reverse=self._order_reverse)
return sorted(value) return sorted(value, reverse=self._order_reverse)
class DictField(ComplexBaseField): class DictField(ComplexBaseField):
"""A dictionary field that wraps a standard Python dictionary. This is """A dictionary field that wraps a standard Python dictionary. This is
similar to an embedded document, but the structure is not defined. similar to an embedded document, but the structure is not defined.
.. note::
Required means it cannot be empty - as the default for ListFields is []
.. versionadded:: 0.3 .. versionadded:: 0.3
.. versionchanged:: 0.5 - Can now handle complex / varying types of data .. versionchanged:: 0.5 - Can now handle complex / varying types of data
""" """
@@ -519,7 +550,8 @@ class DictField(ComplexBaseField):
def __init__(self, basecls=None, field=None, *args, **kwargs): def __init__(self, basecls=None, field=None, *args, **kwargs):
self.field = field self.field = field
self.basecls = basecls or BaseField self.basecls = basecls or BaseField
assert issubclass(self.basecls, BaseField) if not issubclass(self.basecls, BaseField):
self.error('DictField only accepts dict values')
kwargs.setdefault('default', lambda: {}) kwargs.setdefault('default', lambda: {})
super(DictField, self).__init__(*args, **kwargs) super(DictField, self).__init__(*args, **kwargs)
@@ -527,12 +559,13 @@ class DictField(ComplexBaseField):
"""Make sure that a list of valid fields is being used. """Make sure that a list of valid fields is being used.
""" """
if not isinstance(value, dict): if not isinstance(value, dict):
raise ValidationError('Only dictionaries may be used in a ' self.error('Only dictionaries may be used in a DictField')
'DictField')
if any(('.' in k or '$' in k) for k in value): if any(k for k in value.keys() if not isinstance(k, basestring)):
raise ValidationError('Invalid dictionary key name - keys may not ' self.error('Invalid dictionary key - documents must have only string keys')
'contain "." or "$" characters') if any(('.' in k or '$' in k) for k in value.keys()):
self.error('Invalid dictionary key name - keys may not contain "."'
' or "$" characters')
super(DictField, self).validate(value) super(DictField, self).validate(value)
def lookup_member(self, member_name): def lookup_member(self, member_name):
@@ -559,18 +592,19 @@ class MapField(DictField):
def __init__(self, field=None, *args, **kwargs): def __init__(self, field=None, *args, **kwargs):
if not isinstance(field, BaseField): if not isinstance(field, BaseField):
raise ValidationError('Argument to MapField constructor must be ' self.error('Argument to MapField constructor must be a valid '
'a valid field') 'field')
super(MapField, self).__init__(field=field, *args, **kwargs) super(MapField, self).__init__(field=field, *args, **kwargs)
class ReferenceField(BaseField): class ReferenceField(BaseField):
"""A reference to a document that will be automatically dereferenced on """A reference to a document that will be automatically dereferenced on
access (lazily). access (lazily).
Use the `reverse_delete_rule` to handle what should happen if the document Use the `reverse_delete_rule` to handle what should happen if the document
the field is referencing is deleted. the field is referencing is deleted. EmbeddedDocuments, DictFields and
MapFields do not support reverse_delete_rules and an `InvalidDocumentError`
will be raised if trying to set on one of these Document / Field types.
The options are: The options are:
@@ -590,8 +624,8 @@ class ReferenceField(BaseField):
""" """
if not isinstance(document_type, basestring): if not isinstance(document_type, basestring):
if not issubclass(document_type, (Document, basestring)): if not issubclass(document_type, (Document, basestring)):
raise ValidationError('Argument to ReferenceField constructor ' self.error('Argument to ReferenceField constructor must be a '
'must be a document class or a string') 'document class or a string')
self.document_type_obj = document_type self.document_type_obj = document_type
self.reverse_delete_rule = reverse_delete_rule self.reverse_delete_rule = reverse_delete_rule
super(ReferenceField, self).__init__(**kwargs) super(ReferenceField, self).__init__(**kwargs)
@@ -615,14 +649,17 @@ class ReferenceField(BaseField):
# Get value from document instance if available # Get value from document instance if available
value = instance._data.get(self.name) value = instance._data.get(self.name)
# Dereference DBRefs # Dereference DBRefs
if isinstance(value, (pymongo.dbref.DBRef)): if isinstance(value, (DBRef)):
value = _get_db().dereference(value) value = self.document_type._get_db().dereference(value)
if value is not None: if value is not None:
instance._data[self.name] = self.document_type._from_son(value) instance._data[self.name] = self.document_type._from_son(value)
return super(ReferenceField, self).__get__(instance, owner) return super(ReferenceField, self).__get__(instance, owner)
def to_mongo(self, document): def to_mongo(self, document):
if isinstance(document, DBRef):
return document
id_field_name = self.document_type._meta['id_field'] id_field_name = self.document_type._meta['id_field']
id_field = self.document_type._fields[id_field_name] id_field = self.document_type._fields[id_field_name]
@@ -630,25 +667,28 @@ class ReferenceField(BaseField):
# We need the id from the saved object to create the DBRef # We need the id from the saved object to create the DBRef
id_ = document.id id_ = document.id
if id_ is None: if id_ is None:
raise ValidationError('You can only reference documents once ' self.error('You can only reference documents once they have'
'they have been saved to the database') ' been saved to the database')
else: else:
id_ = document id_ = document
id_ = id_field.to_mongo(id_) id_ = id_field.to_mongo(id_)
collection = self.document_type._get_collection_name() collection = self.document_type._get_collection_name()
return pymongo.dbref.DBRef(collection, id_) return DBRef(collection, id_)
def prepare_query_value(self, op, value): def prepare_query_value(self, op, value):
if value is None:
return None
return self.to_mongo(value) return self.to_mongo(value)
def validate(self, value): def validate(self, value):
assert isinstance(value, (self.document_type, pymongo.dbref.DBRef)) if not isinstance(value, (self.document_type, DBRef)):
self.error('A ReferenceField only accepts DBRef')
if isinstance(value, Document) and value.id is None: if isinstance(value, Document) and value.id is None:
raise ValidationError('You can only reference documents once ' self.error('You can only reference documents once they have been '
'they have been saved to the database') 'saved to the database')
def lookup_member(self, member_name): def lookup_member(self, member_name):
return self.document_type._fields.get(member_name) return self.document_type._fields.get(member_name)
@@ -669,24 +709,24 @@ class GenericReferenceField(BaseField):
return self return self
value = instance._data.get(self.name) value = instance._data.get(self.name)
if isinstance(value, (dict, pymongo.son.SON)): if isinstance(value, (dict, SON)):
instance._data[self.name] = self.dereference(value) instance._data[self.name] = self.dereference(value)
return super(GenericReferenceField, self).__get__(instance, owner) return super(GenericReferenceField, self).__get__(instance, owner)
def validate(self, value): def validate(self, value):
if not isinstance(value, (Document, pymongo.dbref.DBRef)): if not isinstance(value, (Document, DBRef)):
raise ValidationError('GenericReferences can only contain documents') self.error('GenericReferences can only contain documents')
# We need the id from the saved object to create the DBRef # We need the id from the saved object to create the DBRef
if isinstance(value, Document) and value.id is None: if isinstance(value, Document) and value.id is None:
raise ValidationError('You can only reference documents once ' self.error('You can only reference documents once they have been'
'they have been saved to the database') ' saved to the database')
def dereference(self, value): def dereference(self, value):
doc_cls = get_document(value['_cls']) doc_cls = get_document(value['_cls'])
reference = value['_ref'] reference = value['_ref']
doc = _get_db().dereference(reference) doc = doc_cls._get_db().dereference(reference)
if doc is not None: if doc is not None:
doc = doc_cls._from_son(doc) doc = doc_cls._from_son(doc)
return doc return doc
@@ -702,17 +742,20 @@ class GenericReferenceField(BaseField):
# We need the id from the saved object to create the DBRef # We need the id from the saved object to create the DBRef
id_ = document.id id_ = document.id
if id_ is None: if id_ is None:
raise ValidationError('You can only reference documents once ' self.error('You can only reference documents once they have'
'they have been saved to the database') ' been saved to the database')
else: else:
id_ = document id_ = document
id_ = id_field.to_mongo(id_) id_ = id_field.to_mongo(id_)
collection = document._get_collection_name() collection = document._get_collection_name()
ref = pymongo.dbref.DBRef(collection, id_) ref = DBRef(collection, id_)
return {'_cls': document._class_name, '_ref': ref} return {'_cls': document._class_name, '_ref': ref}
def prepare_query_value(self, op, value): def prepare_query_value(self, op, value):
if value is None:
return None
return self.to_mongo(value) return self.to_mongo(value)
@@ -725,17 +768,18 @@ class BinaryField(BaseField):
super(BinaryField, self).__init__(**kwargs) super(BinaryField, self).__init__(**kwargs)
def to_mongo(self, value): def to_mongo(self, value):
return pymongo.binary.Binary(value) return Binary(value)
def to_python(self, value): def to_python(self, value):
# Returns str not unicode as this is binary data # Returns str not unicode as this is binary data
return str(value) return str(value)
def validate(self, value): def validate(self, value):
assert isinstance(value, str) if not isinstance(value, str):
self.error('BinaryField only accepts string values')
if self.max_bytes is not None and len(value) > self.max_bytes: if self.max_bytes is not None and len(value) > self.max_bytes:
raise ValidationError('Binary value is too long') self.error('Binary value is too long')
class GridFSError(Exception): class GridFSError(Exception):
@@ -747,17 +791,28 @@ class GridFSProxy(object):
.. versionadded:: 0.4 .. versionadded:: 0.4
.. versionchanged:: 0.5 - added optional size param to read .. versionchanged:: 0.5 - added optional size param to read
.. versionchanged:: 0.6 - added collection name param
""" """
def __init__(self, grid_id=None, key=None, instance=None): _fs = None
self.fs = gridfs.GridFS(_get_db()) # Filesystem instance
self.newfile = None # Used for partial writes def __init__(self, grid_id=None, key=None,
instance=None,
db_alias=DEFAULT_CONNECTION_NAME,
collection_name='fs'):
self.grid_id = grid_id # Store GridFS id for file self.grid_id = grid_id # Store GridFS id for file
self.gridout = None
self.key = key self.key = key
self.instance = instance self.instance = instance
self.db_alias = db_alias
self.collection_name = collection_name
self.newfile = None # Used for partial writes
self.gridout = None
def __getattr__(self, name): def __getattr__(self, name):
attrs = ('_fs', 'grid_id', 'key', 'instance', 'db_alias',
'collection_name', 'newfile', 'gridout')
if name in attrs:
return self.__getattribute__(name)
obj = self.get() obj = self.get()
if name in dir(obj): if name in dir(obj):
return getattr(obj, name) return getattr(obj, name)
@@ -769,6 +824,17 @@ class GridFSProxy(object):
def __nonzero__(self): def __nonzero__(self):
return bool(self.grid_id) return bool(self.grid_id)
def __getstate__(self):
self_dict = self.__dict__
self_dict['_fs'] = None
return self_dict
@property
def fs(self):
if not self._fs:
self._fs = gridfs.GridFS(get_db(self.db_alias), self.collection_name)
return self._fs
def get(self, id=None): def get(self, id=None):
if id: if id:
self.grid_id = id self.grid_id = id
@@ -840,10 +906,16 @@ class FileField(BaseField):
.. versionadded:: 0.4 .. versionadded:: 0.4
.. versionchanged:: 0.5 added optional size param for read .. versionchanged:: 0.5 added optional size param for read
.. versionchanged:: 0.6 added db_alias for multidb support
""" """
proxy_class = GridFSProxy
def __init__(self, **kwargs): def __init__(self,
db_alias=DEFAULT_CONNECTION_NAME,
collection_name="fs", **kwargs):
super(FileField, self).__init__(**kwargs) super(FileField, self).__init__(**kwargs)
self.collection_name = collection_name
self.db_alias = db_alias
def __get__(self, instance, owner): def __get__(self, instance, owner):
if instance is None: if instance is None:
@@ -852,12 +924,14 @@ class FileField(BaseField):
# Check if a file already exists for this model # Check if a file already exists for this model
grid_file = instance._data.get(self.name) grid_file = instance._data.get(self.name)
self.grid_file = grid_file self.grid_file = grid_file
if isinstance(self.grid_file, GridFSProxy): if isinstance(self.grid_file, self.proxy_class):
if not self.grid_file.key: if not self.grid_file.key:
self.grid_file.key = self.name self.grid_file.key = self.name
self.grid_file.instance = instance self.grid_file.instance = instance
return self.grid_file return self.grid_file
return GridFSProxy(key=self.name, instance=instance) return self.proxy_class(key=self.name, instance=instance,
db_alias=self.db_alias,
collection_name=self.collection_name)
def __set__(self, instance, value): def __set__(self, instance, value):
key = self.name key = self.name
@@ -874,7 +948,8 @@ class FileField(BaseField):
grid_file.put(value) grid_file.put(value)
else: else:
# Create a new proxy object as we don't already have one # Create a new proxy object as we don't already have one
instance._data[key] = GridFSProxy(key=key, instance=instance) instance._data[key] = self.proxy_class(key=key, instance=instance,
collection_name=self.collection_name)
instance._data[key].put(value) instance._data[key].put(value)
else: else:
instance._data[key] = value instance._data[key] = value
@@ -883,18 +958,181 @@ class FileField(BaseField):
def to_mongo(self, value): def to_mongo(self, value):
# Store the GridFS file id in MongoDB # Store the GridFS file id in MongoDB
if isinstance(value, GridFSProxy) and value.grid_id is not None: if isinstance(value, self.proxy_class) and value.grid_id is not None:
return value.grid_id return value.grid_id
return None return None
def to_python(self, value): def to_python(self, value):
if value is not None: if value is not None:
return GridFSProxy(value) return self.proxy_class(value,
collection_name=self.collection_name,
db_alias=self.db_alias)
def validate(self, value): def validate(self, value):
if value.grid_id is not None: if value.grid_id is not None:
assert isinstance(value, GridFSProxy) if not isinstance(value, self.proxy_class):
assert isinstance(value.grid_id, pymongo.objectid.ObjectId) self.error('FileField only accepts GridFSProxy values')
if not isinstance(value.grid_id, ObjectId):
self.error('Invalid GridFSProxy value')
class ImageGridFsProxy(GridFSProxy):
"""
Proxy for ImageField
versionadded: 0.6
"""
def put(self, file_obj, **kwargs):
"""
Insert a image in database
applying field properties (size, thumbnail_size)
"""
field = self.instance._fields[self.key]
try:
img = Image.open(file_obj)
except:
raise ValidationError('Invalid image')
if (field.size and (img.size[0] > field.size['width'] or
img.size[1] > field.size['height'])):
size = field.size
if size['force']:
img = ImageOps.fit(img,
(size['width'],
size['height']),
Image.ANTIALIAS)
else:
img.thumbnail((size['width'],
size['height']),
Image.ANTIALIAS)
thumbnail = None
if field.thumbnail_size:
size = field.thumbnail_size
if size['force']:
thumbnail = ImageOps.fit(img,
(size['width'],
size['height']),
Image.ANTIALIAS)
else:
thumbnail = img.copy()
thumbnail.thumbnail((size['width'],
size['height']),
Image.ANTIALIAS)
if thumbnail:
thumb_id = self._put_thumbnail(thumbnail,
img.format)
else:
thumb_id = None
w, h = img.size
io = StringIO()
img.save(io, img.format)
io.seek(0)
return super(ImageGridFsProxy, self).put(io,
width=w,
height=h,
format=img.format,
thumbnail_id=thumb_id,
**kwargs)
def delete(self, *args, **kwargs):
#deletes thumbnail
out = self.get()
if out and out.thumbnail_id:
self.fs.delete(out.thumbnail_id)
return super(ImageGridFsProxy, self).delete(*args, **kwargs)
def _put_thumbnail(self, thumbnail, format, **kwargs):
w, h = thumbnail.size
io = StringIO()
thumbnail.save(io, format)
io.seek(0)
return self.fs.put(io, width=w,
height=h,
format=format,
**kwargs)
@property
def size(self):
"""
return a width, height of image
"""
out = self.get()
if out:
return out.width, out.height
@property
def format(self):
"""
return format of image
ex: PNG, JPEG, GIF, etc
"""
out = self.get()
if out:
return out.format
@property
def thumbnail(self):
"""
return a gridfs.grid_file.GridOut
representing a thumbnail of Image
"""
out = self.get()
if out and out.thumbnail_id:
return self.fs.get(out.thumbnail_id)
def write(self, *args, **kwargs):
raise RuntimeError("Please use \"put\" method instead")
def writelines(self, *args, **kwargs):
raise RuntimeError("Please use \"put\" method instead")
class ImproperlyConfigured(Exception):
pass
class ImageField(FileField):
"""
A Image File storage field.
@size (width, height, force):
max size to store images, if larger will be automatically resized
ex: size=(800, 600, True)
@thumbnail (width, height, force):
size to generate a thumbnail
.. versionadded:: 0.6
"""
proxy_class = ImageGridFsProxy
def __init__(self, size=None, thumbnail_size=None,
collection_name='images', **kwargs):
if not Image:
raise ImproperlyConfigured("PIL library was not found")
params_size = ('width', 'height', 'force')
extra_args = dict(size=size, thumbnail_size=thumbnail_size)
for att_name, att in extra_args.items():
if att and (isinstance(att, tuple) or isinstance(att, list)):
setattr(self, att_name, dict(
map(None, params_size, att)))
else:
setattr(self, att_name, None)
super(ImageField, self).__init__(
collection_name=collection_name,
**kwargs)
class GeoPointField(BaseField): class GeoPointField(BaseField):
@@ -909,14 +1147,14 @@ class GeoPointField(BaseField):
"""Make sure that a geo-value is of type (x, y) """Make sure that a geo-value is of type (x, y)
""" """
if not isinstance(value, (list, tuple)): if not isinstance(value, (list, tuple)):
raise ValidationError('GeoPointField can only accept tuples or ' self.error('GeoPointField can only accept tuples or lists '
'lists of (x, y)') 'of (x, y)')
if not len(value) == 2: if not len(value) == 2:
raise ValidationError('Value must be a two-dimensional point.') self.error('Value must be a two-dimensional point')
if (not isinstance(value[0], (float, int)) and if (not isinstance(value[0], (float, int)) and
not isinstance(value[1], (float, int))): not isinstance(value[1], (float, int))):
raise ValidationError('Both values in point must be float or int.') self.error('Both values in point must be float or int')
class SequenceField(IntField): class SequenceField(IntField):
@@ -932,8 +1170,9 @@ class SequenceField(IntField):
.. versionadded:: 0.5 .. versionadded:: 0.5
""" """
def __init__(self, collection_name=None, *args, **kwargs): def __init__(self, collection_name=None, db_alias = None, *args, **kwargs):
self.collection_name = collection_name or 'mongoengine.counters' self.collection_name = collection_name or 'mongoengine.counters'
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
return super(SequenceField, self).__init__(*args, **kwargs) return super(SequenceField, self).__init__(*args, **kwargs)
def generate_new_value(self): def generate_new_value(self):
@@ -942,7 +1181,7 @@ class SequenceField(IntField):
""" """
sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(), sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(),
self.name) self.name)
collection = _get_db()[self.collection_name] collection = get_db(alias = self.db_alias )[self.collection_name]
counter = collection.find_and_modify(query={"_id": sequence_id}, counter = collection.find_and_modify(query={"_id": sequence_id},
update={"$inc": {"next": 1}}, update={"$inc": {"next": 1}},
new=True, new=True,
@@ -977,3 +1216,30 @@ class SequenceField(IntField):
if value is None: if value is None:
value = self.generate_new_value() value = self.generate_new_value()
return value return value
class UUIDField(BaseField):
"""A UUID field.
.. versionadded:: 0.6
"""
def __init__(self, **kwargs):
super(UUIDField, self).__init__(**kwargs)
def to_python(self, value):
if not isinstance(value, basestring):
value = unicode(value)
return uuid.UUID(value)
def to_mongo(self, value):
return unicode(value)
def validate(self, value):
if not isinstance(value, uuid.UUID):
if not isinstance(value, basestring):
value = str(value)
try:
value = uuid.UUID(value)
except Exception, exc:
self.error('Could not convert to UUID: %s' % exc)

View File

@@ -1,15 +1,14 @@
from connection import _get_db
import pprint import pprint
import pymongo
import pymongo.code
import pymongo.dbref
import pymongo.objectid
import re import re
import copy import copy
import itertools import itertools
import operator import operator
import pymongo
from bson.code import Code
from mongoengine import signals
__all__ = ['queryset_manager', 'Q', 'InvalidQueryError', __all__ = ['queryset_manager', 'Q', 'InvalidQueryError',
'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY'] 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY']
@@ -274,16 +273,20 @@ class Q(QNode):
class QueryFieldList(object): class QueryFieldList(object):
"""Object that handles combinations of .only() and .exclude() calls""" """Object that handles combinations of .only() and .exclude() calls"""
ONLY = True ONLY = 1
EXCLUDE = False EXCLUDE = 0
def __init__(self, fields=[], value=ONLY, always_include=[]): def __init__(self, fields=[], value=ONLY, always_include=[]):
self.value = value self.value = value
self.fields = set(fields) self.fields = set(fields)
self.always_include = set(always_include) self.always_include = set(always_include)
self._id = None
def as_dict(self): def as_dict(self):
return dict((field, self.value) for field in self.fields) field_list = dict((field, self.value) for field in self.fields)
if self._id is not None:
field_list['_id'] = self._id
return field_list
def __add__(self, f): def __add__(self, f):
if not self.fields: if not self.fields:
@@ -299,6 +302,9 @@ class QueryFieldList(object):
self.value = self.ONLY self.value = self.ONLY
self.fields = f.fields - self.fields self.fields = f.fields - self.fields
if '_id' in f.fields:
self._id = f.value
if self.always_include: if self.always_include:
if self.value is self.ONLY and self.fields: if self.value is self.ONLY and self.fields:
self.fields = self.fields.union(self.always_include) self.fields = self.fields.union(self.always_include)
@@ -334,6 +340,7 @@ class QuerySet(object):
self._timeout = True self._timeout = True
self._class_check = True self._class_check = True
self._slave_okay = False self._slave_okay = False
self._scalar = []
# If inheritance is allowed, only return instances and instances of # If inheritance is allowed, only return instances and instances of
# subclasses of the class being used # subclasses of the class being used
@@ -399,11 +406,13 @@ class QuerySet(object):
index_list = [] index_list = []
use_types = doc_cls._meta.get('allow_inheritance', True) use_types = doc_cls._meta.get('allow_inheritance', True)
for key in spec['fields']: for key in spec['fields']:
# Get direction from + or - # Get ASCENDING direction from +, DESCENDING from -, and GEO2D from *
direction = pymongo.ASCENDING direction = pymongo.ASCENDING
if key.startswith("-"): if key.startswith("-"):
direction = pymongo.DESCENDING direction = pymongo.DESCENDING
if key.startswith(("+", "-")): elif key.startswith("*"):
direction = pymongo.GEO2D
if key.startswith(("+", "-", "*")):
key = key[1:] key = key[1:]
# Use real field name, do it manually because we need field # Use real field name, do it manually because we need field
@@ -421,7 +430,7 @@ class QuerySet(object):
# If _types is being used, prepend it to every specified index # If _types is being used, prepend it to every specified index
index_types = doc_cls._meta.get('index_types', True) index_types = doc_cls._meta.get('index_types', True)
allow_inheritance = doc_cls._meta.get('allow_inheritance') allow_inheritance = doc_cls._meta.get('allow_inheritance')
if spec.get('types', index_types) and allow_inheritance and use_types: if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D:
index_list.insert(0, ('_types', 1)) index_list.insert(0, ('_types', 1))
spec['fields'] = index_list spec['fields'] = index_list
@@ -434,9 +443,11 @@ class QuerySet(object):
return spec return spec
@classmethod @classmethod
def _reset_already_indexed(cls): def _reset_already_indexed(cls, document=None):
"""Helper to reset already indexed, can be useful for testing purposes""" """Helper to reset already indexed, can be useful for testing purposes"""
cls.__already_indexed = set() if document:
cls.__already_indexed.discard(document)
cls.__already_indexed.clear()
def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query): def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query):
"""Filter the selected documents by calling the """Filter the selected documents by calling the
@@ -476,6 +487,13 @@ class QuerySet(object):
perform operations only if the collection is accessed. perform operations only if the collection is accessed.
""" """
if self._document not in QuerySet.__already_indexed: if self._document not in QuerySet.__already_indexed:
# Ensure collection exists
db = self._document._get_db()
if self._collection_obj.name not in db.collection_names():
self._document._collection = None
self._collection_obj = self._document._get_collection()
QuerySet.__already_indexed.add(self._document) QuerySet.__already_indexed.add(self._document)
background = self._document._meta.get('index_background', False) background = self._document._meta.get('index_background', False)
@@ -555,7 +573,7 @@ class QuerySet(object):
self.order_by(*self._document._meta['ordering']) self.order_by(*self._document._meta['ordering'])
if self._limit is not None: if self._limit is not None:
self._cursor_obj.limit(self._limit) self._cursor_obj.limit(self._limit - (self._skip or 0))
if self._skip is not None: if self._skip is not None:
self._cursor_obj.skip(self._skip) self._cursor_obj.skip(self._skip)
@@ -590,8 +608,18 @@ class QuerySet(object):
if field_name == 'pk': if field_name == 'pk':
# Deal with "primary key" alias # Deal with "primary key" alias
field_name = document._meta['id_field'] field_name = document._meta['id_field']
if field_name in document._fields:
field = document._fields[field_name] field = document._fields[field_name]
elif document._dynamic:
from base import BaseDynamicField
field = BaseDynamicField(db_field=field_name)
else: else:
raise InvalidQueryError('Cannot resolve field "%s"'
% field_name)
else:
from mongoengine.fields import ReferenceField, GenericReferenceField
if isinstance(field, (ReferenceField, GenericReferenceField)):
raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts))
# Look up subfield on the previous field # Look up subfield on the previous field
new_field = field.lookup_member(field_name) new_field = field.lookup_member(field_name)
from base import ComplexBaseField from base import ComplexBaseField
@@ -603,7 +631,6 @@ class QuerySet(object):
% field_name) % field_name)
field = new_field # update field to the new field type field = new_field # update field to the new field type
fields.append(field) fields.append(field)
return fields return fields
@classmethod @classmethod
@@ -624,6 +651,7 @@ class QuerySet(object):
match_operators = ['contains', 'icontains', 'startswith', match_operators = ['contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith', 'istartswith', 'endswith', 'iendswith',
'exact', 'iexact'] 'exact', 'iexact']
custom_operators = ['match']
mongo_query = {} mongo_query = {}
for key, value in query.items(): for key, value in query.items():
@@ -636,7 +664,7 @@ class QuerySet(object):
parts = [part for part in parts if not part.isdigit()] parts = [part for part in parts if not part.isdigit()]
# Check for an operator and transform to mongo-style if there is # Check for an operator and transform to mongo-style if there is
op = None op = None
if parts[-1] in operators + match_operators + geo_operators: if parts[-1] in operators + match_operators + geo_operators + custom_operators:
op = parts.pop() op = parts.pop()
negate = False negate = False
@@ -650,8 +678,8 @@ class QuerySet(object):
parts = [] parts = []
cleaned_fields = [] cleaned_fields = []
append_field = True
for field in fields: for field in fields:
append_field = True
if isinstance(field, str): if isinstance(field, str):
parts.append(field) parts.append(field)
append_field = False append_field = False
@@ -669,7 +697,7 @@ class QuerySet(object):
if isinstance(field, basestring): if isinstance(field, basestring):
if op in match_operators and isinstance(value, basestring): if op in match_operators and isinstance(value, basestring):
from mongoengine import StringField from mongoengine import StringField
value = StringField().prepare_query_value(op, value) value = StringField.prepare_query_value(op, value)
else: else:
value = field value = field
else: else:
@@ -696,6 +724,12 @@ class QuerySet(object):
else: else:
raise NotImplementedError("Geo method '%s' has not " raise NotImplementedError("Geo method '%s' has not "
"been implemented" % op) "been implemented" % op)
elif op in custom_operators:
if op == 'match':
value = {"$elemMatch": value}
else:
NotImplementedError("Custom method '%s' has not "
"been implemented" % op)
elif op not in match_operators: elif op not in match_operators:
value = {'$' + op: value} value = {'$' + op: value}
@@ -721,18 +755,23 @@ class QuerySet(object):
.. versionadded:: 0.3 .. versionadded:: 0.3
""" """
self.limit(2)
self.__call__(*q_objs, **query) self.__call__(*q_objs, **query)
count = self.count() try:
if count == 1: result1 = self.next()
return self[0] except StopIteration:
elif count > 1:
message = u'%d items returned, instead of 1' % count
raise self._document.MultipleObjectsReturned(message)
else:
raise self._document.DoesNotExist("%s matching query does not exist." raise self._document.DoesNotExist("%s matching query does not exist."
% self._document._class_name) % self._document._class_name)
try:
result2 = self.next()
except StopIteration:
return result1
def get_or_create(self, write_options=None, *q_objs, **query): self.rewind()
message = u'%d items returned, instead of 1' % self.count()
raise self._document.MultipleObjectsReturned(message)
def get_or_create(self, write_options=None, auto_save=True, *q_objs, **query):
"""Retrieve unique object or create, if it doesn't exist. Returns a tuple of """Retrieve unique object or create, if it doesn't exist. Returns a tuple of
``(object, created)``, where ``object`` is the retrieved or created object ``(object, created)``, where ``object`` is the retrieved or created object
and ``created`` is a boolean specifying whether a new object was created. Raises and ``created`` is a boolean specifying whether a new object was created. Raises
@@ -747,23 +786,25 @@ class QuerySet(object):
Passes any write_options onto :meth:`~mongoengine.Document.save` Passes any write_options onto :meth:`~mongoengine.Document.save`
.. versionadded:: 0.3 .. versionadded:: 0.3
:param auto_save: if the object is to be saved automatically if not found.
.. versionadded:: 0.6
""" """
defaults = query.get('defaults', {}) defaults = query.get('defaults', {})
if 'defaults' in query: if 'defaults' in query:
del query['defaults'] del query['defaults']
self.__call__(*q_objs, **query) try:
count = self.count() doc = self.get(*q_objs, **query)
if count == 0: return doc, False
except self._document.DoesNotExist:
query.update(defaults) query.update(defaults)
doc = self._document(**query) doc = self._document(**query)
if auto_save:
doc.save(write_options=write_options) doc.save(write_options=write_options)
return doc, True return doc, True
elif count == 1:
return self.first(), False
else:
message = u'%d items returned, instead of 1' % count
raise self._document.MultipleObjectsReturned(message)
def create(self, **kwargs): def create(self, **kwargs):
"""Create new object. Returns the saved object instance. """Create new object. Returns the saved object instance.
@@ -812,23 +853,33 @@ class QuerySet(object):
raise OperationError(msg) raise OperationError(msg)
raw.append(doc.to_mongo()) raw.append(doc.to_mongo())
signals.pre_bulk_insert.send(self._document, documents=docs)
ids = self._collection.insert(raw) ids = self._collection.insert(raw)
if not load_bulk: if not load_bulk:
signals.post_bulk_insert.send(
self._document, documents=docs, loaded=False)
return return_one and ids[0] or ids return return_one and ids[0] or ids
documents = self.in_bulk(ids) documents = self.in_bulk(ids)
results = [] results = []
for obj_id in ids: for obj_id in ids:
results.append(documents.get(obj_id)) results.append(documents.get(obj_id))
signals.post_bulk_insert.send(
self._document, documents=results, loaded=True)
return return_one and results[0] or results return return_one and results[0] or results
def with_id(self, object_id): def with_id(self, object_id):
"""Retrieve the object matching the id provided. """Retrieve the object matching the id provided. Uses `object_id` only
and raises InvalidQueryError if a filter has been applied.
:param object_id: the value for the id of the document to look up :param object_id: the value for the id of the document to look up
.. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set
""" """
return self._document.objects(pk=object_id).first() if not self._query_obj.empty:
raise InvalidQueryError("Cannot use a filter whilst using `with_id`")
return self.filter(pk=object_id).first()
def in_bulk(self, object_ids): def in_bulk(self, object_ids):
"""Retrieve a set of documents by their ids. """Retrieve a set of documents by their ids.
@@ -843,6 +894,11 @@ class QuerySet(object):
docs = self._collection.find({'_id': {'$in': object_ids}}, docs = self._collection.find({'_id': {'$in': object_ids}},
**self._cursor_args) **self._cursor_args)
if self._scalar:
for doc in docs:
doc_map[doc['_id']] = self._get_scalar(
self._document._from_son(doc))
else:
for doc in docs: for doc in docs:
doc_map[doc['_id']] = self._document._from_son(doc) doc_map[doc['_id']] = self._document._from_son(doc)
@@ -854,6 +910,9 @@ class QuerySet(object):
try: try:
if self._limit == 0: if self._limit == 0:
raise StopIteration raise StopIteration
if self._scalar:
return self._get_scalar(self._document._from_son(
self._cursor.next()))
return self._document._from_son(self._cursor.next()) return self._document._from_son(self._cursor.next())
except StopIteration, e: except StopIteration, e:
self.rewind() self.rewind()
@@ -887,9 +946,9 @@ class QuerySet(object):
and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced` and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced`
tests in ``tests.queryset.QuerySetTest`` for usage examples. tests in ``tests.queryset.QuerySetTest`` for usage examples.
:param map_f: map function, as :class:`~pymongo.code.Code` or string :param map_f: map function, as :class:`~bson.code.Code` or string
:param reduce_f: reduce function, as :param reduce_f: reduce function, as
:class:`~pymongo.code.Code` or string :class:`~bson.code.Code` or string
:param output: output collection name, if set to 'inline' will try to :param output: output collection name, if set to 'inline' will try to
use :class:`~pymongo.collection.Collection.inline_map_reduce` use :class:`~pymongo.collection.Collection.inline_map_reduce`
:param finalize_f: finalize function, an optional function that :param finalize_f: finalize function, an optional function that
@@ -919,27 +978,27 @@ class QuerySet(object):
raise NotImplementedError("Requires MongoDB >= 1.7.1") raise NotImplementedError("Requires MongoDB >= 1.7.1")
map_f_scope = {} map_f_scope = {}
if isinstance(map_f, pymongo.code.Code): if isinstance(map_f, Code):
map_f_scope = map_f.scope map_f_scope = map_f.scope
map_f = unicode(map_f) map_f = unicode(map_f)
map_f = pymongo.code.Code(self._sub_js_fields(map_f), map_f_scope) map_f = Code(self._sub_js_fields(map_f), map_f_scope)
reduce_f_scope = {} reduce_f_scope = {}
if isinstance(reduce_f, pymongo.code.Code): if isinstance(reduce_f, Code):
reduce_f_scope = reduce_f.scope reduce_f_scope = reduce_f.scope
reduce_f = unicode(reduce_f) reduce_f = unicode(reduce_f)
reduce_f_code = self._sub_js_fields(reduce_f) reduce_f_code = self._sub_js_fields(reduce_f)
reduce_f = pymongo.code.Code(reduce_f_code, reduce_f_scope) reduce_f = Code(reduce_f_code, reduce_f_scope)
mr_args = {'query': self._query} mr_args = {'query': self._query}
if finalize_f: if finalize_f:
finalize_f_scope = {} finalize_f_scope = {}
if isinstance(finalize_f, pymongo.code.Code): if isinstance(finalize_f, Code):
finalize_f_scope = finalize_f.scope finalize_f_scope = finalize_f.scope
finalize_f = unicode(finalize_f) finalize_f = unicode(finalize_f)
finalize_f_code = self._sub_js_fields(finalize_f) finalize_f_code = self._sub_js_fields(finalize_f)
finalize_f = pymongo.code.Code(finalize_f_code, finalize_f_scope) finalize_f = Code(finalize_f_code, finalize_f_scope)
mr_args['finalize'] = finalize_f mr_args['finalize'] = finalize_f
if scope: if scope:
@@ -1030,6 +1089,9 @@ class QuerySet(object):
return self return self
# Integer index provided # Integer index provided
elif isinstance(key, int): elif isinstance(key, int):
if self._scalar:
return self._get_scalar(self._document._from_son(
self._cursor[key]))
return self._document._from_son(self._cursor[key]) return self._document._from_son(self._cursor[key])
raise AttributeError raise AttributeError
@@ -1039,8 +1101,10 @@ class QuerySet(object):
:param field: the field to select distinct values from :param field: the field to select distinct values from
.. versionadded:: 0.4 .. versionadded:: 0.4
.. versionchanged:: 0.5 - Fixed handling references
""" """
return self._cursor.distinct(field) from dereference import DeReference
return DeReference()(self._cursor.distinct(field), 1)
def only(self, *fields): def only(self, *fields):
"""Load only a subset of this document's fields. :: """Load only a subset of this document's fields. ::
@@ -1226,6 +1290,9 @@ class QuerySet(object):
mongo_update = {} mongo_update = {}
for key, value in update.items(): for key, value in update.items():
if key == "__raw__":
mongo_update.update(value)
continue
parts = key.split('__') parts = key.split('__')
# Check for an operator and transform to mongo-style if there is # Check for an operator and transform to mongo-style if there is
op = None op = None
@@ -1249,8 +1316,8 @@ class QuerySet(object):
parts = [] parts = []
cleaned_fields = [] cleaned_fields = []
append_field = True
for field in fields: for field in fields:
append_field = True
if isinstance(field, str): if isinstance(field, str):
# Convert the S operator to $ # Convert the S operator to $
if field == 'S': if field == 'S':
@@ -1266,17 +1333,21 @@ class QuerySet(object):
field = cleaned_fields[-1] field = cleaned_fields[-1]
if op in (None, 'set', 'push', 'pull', 'addToSet'): if op in (None, 'set', 'push', 'pull', 'addToSet'):
if field.required or value is not None:
value = field.prepare_query_value(op, value) value = field.prepare_query_value(op, value)
elif op in ('pushAll', 'pullAll'): elif op in ('pushAll', 'pullAll'):
value = [field.prepare_query_value(op, v) for v in value] value = [field.prepare_query_value(op, v) for v in value]
key = '.'.join(parts) key = '.'.join(parts)
if not op:
raise InvalidQueryError("Updates must supply an operation eg: set__FIELD=value")
if op: if op:
value = {key: value} value = {key: value}
key = '$' + op key = '$' + op
if op is None or key not in mongo_update: if key not in mongo_update:
mongo_update[key] = value mongo_update[key] = value
elif key in mongo_update and isinstance(mongo_update[key], dict): elif key in mongo_update and isinstance(mongo_update[key], dict):
mongo_update[key].update(value) mongo_update[key].update(value)
@@ -1300,8 +1371,15 @@ class QuerySet(object):
write_options = {} write_options = {}
update = QuerySet._transform_update(self._document, **update) update = QuerySet._transform_update(self._document, **update)
query = self._query
# SERVER-5247 hack
remove_types = "_types" in query and ".$." in unicode(update)
if remove_types:
del query["_types"]
try: try:
ret = self._collection.update(self._query, update, multi=multi, ret = self._collection.update(query, update, multi=multi,
upsert=upsert, safe=safe_update, upsert=upsert, safe=safe_update,
**write_options) **write_options)
if ret is not None and 'n' in ret: if ret is not None and 'n' in ret:
@@ -1329,10 +1407,17 @@ class QuerySet(object):
if not write_options: if not write_options:
write_options = {} write_options = {}
update = QuerySet._transform_update(self._document, **update) update = QuerySet._transform_update(self._document, **update)
query = self._query
# SERVER-5247 hack
remove_types = "_types" in query and ".$." in unicode(update)
if remove_types:
del query["_types"]
try: try:
# Explicitly provide 'multi=False' to newer versions of PyMongo # Explicitly provide 'multi=False' to newer versions of PyMongo
# as the default may change to 'True' # as the default may change to 'True'
ret = self._collection.update(self._query, update, multi=False, ret = self._collection.update(query, update, multi=False,
upsert=upsert, safe=safe_update, upsert=upsert, safe=safe_update,
**write_options) **write_options)
@@ -1342,8 +1427,47 @@ class QuerySet(object):
raise OperationError(u'Update failed [%s]' % unicode(e)) raise OperationError(u'Update failed [%s]' % unicode(e))
def __iter__(self): def __iter__(self):
self.rewind()
return self return self
def _get_scalar(self, doc):
def lookup(obj, name):
chunks = name.split('__')
for chunk in chunks:
if hasattr(obj, '_db_field_map'):
chunk = obj._db_field_map.get(chunk, chunk)
obj = getattr(obj, chunk)
return obj
data = [lookup(doc, n) for n in self._scalar]
if len(data) == 1:
return data[0]
return tuple(data)
def scalar(self, *fields):
"""Instead of returning Document instances, return either a specific
value or a tuple of values in order.
This effects all results and can be unset by calling ``scalar``
without arguments. Calls ``only`` automatically.
:param fields: One or more fields to return instead of a Document.
"""
self._scalar = list(fields)
if fields:
self.only(*fields)
else:
self.all_fields()
return self
def values_list(self, *fields):
"""An alias for scalar"""
return self.scalar(*fields)
def _sub_js_fields(self, code): def _sub_js_fields(self, code):
"""When fields are specified with [~fieldname] syntax, where """When fields are specified with [~fieldname] syntax, where
*fieldname* is the Python name of a field, *fieldname* will be *fieldname* is the Python name of a field, *fieldname* will be
@@ -1406,9 +1530,9 @@ class QuerySet(object):
query['$where'] = self._where_clause query['$where'] = self._where_clause
scope['query'] = query scope['query'] = query
code = pymongo.code.Code(code, scope=scope) code = Code(code, scope=scope)
db = _get_db() db = self._document._get_db()
return db.eval(code, *fields) return db.eval(code, *fields)
def where(self, where_clause): def where(self, where_clause):
@@ -1435,13 +1559,13 @@ class QuerySet(object):
.. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work
with sharding. with sharding.
""" """
map_func = pymongo.code.Code(""" map_func = Code("""
function() { function() {
emit(1, this[field] || 0); emit(1, this[field] || 0);
} }
""", scope={'field': field}) """, scope={'field': field})
reduce_func = pymongo.code.Code(""" reduce_func = Code("""
function(key, values) { function(key, values) {
var sum = 0; var sum = 0;
for (var i in values) { for (var i in values) {
@@ -1465,14 +1589,14 @@ class QuerySet(object):
.. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work
with sharding. with sharding.
""" """
map_func = pymongo.code.Code(""" map_func = Code("""
function() { function() {
if (this.hasOwnProperty(field)) if (this.hasOwnProperty(field))
emit(1, {t: this[field] || 0, c: 1}); emit(1, {t: this[field] || 0, c: 1});
} }
""", scope={'field': field}) """, scope={'field': field})
reduce_func = pymongo.code.Code(""" reduce_func = Code("""
function(key, values) { function(key, values) {
var out = {t: 0, c: 0}; var out = {t: 0, c: 0};
for (var i in values) { for (var i in values) {
@@ -1484,7 +1608,7 @@ class QuerySet(object):
} }
""") """)
finalize_func = pymongo.code.Code(""" finalize_func = Code("""
function(key, value) { function(key, value) {
return value.t / value.c; return value.t / value.c;
} }
@@ -1526,13 +1650,20 @@ class QuerySet(object):
function() { function() {
path = '{{~%(field)s}}'.split('.'); path = '{{~%(field)s}}'.split('.');
field = this; field = this;
for (p in path) { field = field[path[p]]; } for (p in path) {
if (field)
field = field[path[p]];
else
break;
}
if (field && field.constructor == Array) { if (field && field.constructor == Array) {
field.forEach(function(item) { field.forEach(function(item) {
emit(item, 1); emit(item, 1);
}); });
} else { } else if (field) {
emit(field, 1); emit(field, 1);
} else {
emit(null, 1);
} }
} }
""" % dict(field=field) """ % dict(field=field)
@@ -1572,7 +1703,12 @@ class QuerySet(object):
var total = 0.0; var total = 0.0;
db[collection].find(query).forEach(function(doc) { db[collection].find(query).forEach(function(doc) {
field = doc; field = doc;
for (p in path) { field = field[path[p]]; } for (p in path) {
if (field)
field = field[path[p]];
else
break;
}
if (field && field.constructor == Array) { if (field && field.constructor == Array) {
total += field.length; total += field.length;
} else { } else {
@@ -1588,7 +1724,12 @@ class QuerySet(object):
} }
db[collection].find(query).forEach(function(doc) { db[collection].find(query).forEach(function(doc) {
field = doc; field = doc;
for (p in path) { field = field[path[p]]; } for (p in path) {
if (field)
field = field[path[p]];
else
break;
}
if (field && field.constructor == Array) { if (field && field.constructor == Array) {
field.forEach(function(item) { field.forEach(function(item) {
frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]);
@@ -1609,10 +1750,16 @@ class QuerySet(object):
def __repr__(self): def __repr__(self):
limit = REPR_OUTPUT_SIZE + 1 limit = REPR_OUTPUT_SIZE + 1
if self._limit is not None and self._limit < limit: start = (0 if self._skip is None else self._skip)
limit = self._limit if self._limit is None:
stop = start + limit
if self._limit is not None:
if self._limit - start > limit:
stop = start + limit
else:
stop = self._limit
try: try:
data = list(self[self._skip:limit]) data = list(self[start:stop])
except pymongo.errors.InvalidOperation: except pymongo.errors.InvalidOperation:
return ".. queryset mid-iteration .." return ".. queryset mid-iteration .."
if len(data) > REPR_OUTPUT_SIZE: if len(data) > REPR_OUTPUT_SIZE:
@@ -1620,13 +1767,15 @@ class QuerySet(object):
return repr(data) return repr(data)
def select_related(self, max_depth=1): def select_related(self, max_depth=1):
"""Handles dereferencing of :class:`~pymongo.dbref.DBRef` objects to """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
a maximum depth in order to cut down the number queries to mongodb. a maximum depth in order to cut down the number queries to mongodb.
.. versionadded:: 0.5 .. versionadded:: 0.5
""" """
from dereference import dereference from dereference import DeReference
return dereference(self, max_depth=max_depth) # Make select related work the same for querysets
max_depth += 1
return DeReference()(self, max_depth=max_depth)
class QuerySetManager(object): class QuerySetManager(object):

View File

@@ -42,3 +42,5 @@ pre_save = _signals.signal('pre_save')
post_save = _signals.signal('post_save') post_save = _signals.signal('post_save')
pre_delete = _signals.signal('pre_delete') pre_delete = _signals.signal('pre_delete')
post_delete = _signals.signal('post_delete') post_delete = _signals.signal('post_delete')
pre_bulk_insert = _signals.signal('pre_bulk_insert')
post_bulk_insert = _signals.signal('post_bulk_insert')

View File

@@ -1,4 +1,4 @@
from mongoengine.connection import _get_db from mongoengine.connection import get_db
class query_counter(object): class query_counter(object):
@@ -7,7 +7,7 @@ class query_counter(object):
def __init__(self): def __init__(self):
""" Construct the query_counter. """ """ Construct the query_counter. """
self.counter = 0 self.counter = 0
self.db = _get_db() self.db = get_db()
def __enter__(self): def __enter__(self):
""" On every with block we need to drop the profile collection. """ """ On every with block we need to drop the profile collection. """

74
python-mongoengine.spec Normal file
View File

@@ -0,0 +1,74 @@
# sitelib for noarch packages, sitearch for others (remove the unneeded one)
%{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")}
%{!?python_sitearch: %global python_sitearch %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")}
%define srcname mongoengine
Name: python-%{srcname}
Version: 0.6.6
Release: 1%{?dist}
Summary: A Python Document-Object Mapper for working with MongoDB
Group: Development/Libraries
License: MIT
URL: https://github.com/MongoEngine/mongoengine
Source0: %{srcname}-%{version}.tar.bz2
BuildRequires: python-devel
BuildRequires: python-setuptools
Requires: mongodb
Requires: pymongo
Requires: python-blinker
Requires: python-imaging
%description
MongoEngine is an ORM-like layer on top of PyMongo.
%prep
%setup -q -n %{srcname}-%{version}
%build
# Remove CFLAGS=... for noarch packages (unneeded)
CFLAGS="$RPM_OPT_FLAGS" %{__python} setup.py build
%install
rm -rf $RPM_BUILD_ROOT
%{__python} setup.py install -O1 --skip-build --root $RPM_BUILD_ROOT
%clean
rm -rf $RPM_BUILD_ROOT
%files
%defattr(-,root,root,-)
%doc docs AUTHORS LICENSE README.rst
# For noarch packages: sitelib
%{python_sitelib}/*
# For arch-specific packages: sitearch
# %{python_sitearch}/*
%changelog
* Wed Apr 24 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5
- 0.6.6 released
* Wed Apr 18 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5
- 0.6.5 released
* Wed Apr 18 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5
- 0.6.4 released
* Wed Mar 24 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5
- 0.6.3 released
* Wed Mar 22 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5
- 0.6.2 released
* Wed Mar 05 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5
- 0.6.1 released
* Mon Mar 05 2012 Ross Lawley <ross.lawley@gmail.com> 0.6
- 0.6 released
* Thu Oct 27 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.3-1
- Update to latest dev version
- Add PIL dependency for ImageField
* Wed Oct 12 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.2-1
- Update version
* Fri Sep 23 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.0-1
- Initial version

1
requirements.txt Normal file
View File

@@ -0,0 +1 @@
pymongo

View File

@@ -38,7 +38,9 @@ setup(name='mongoengine',
packages=find_packages(), packages=find_packages(),
author='Harry Marr', author='Harry Marr',
author_email='harry.marr@{nospam}gmail.com', author_email='harry.marr@{nospam}gmail.com',
url='http://hmarr.com/mongoengine/', maintainer="Ross Lawley",
maintainer_email="ross.lawley@{nospam}gmail.com",
url='http://mongoengine.org/',
license='MIT', license='MIT',
include_package_data=True, include_package_data=True,
description=DESCRIPTION, description=DESCRIPTION,
@@ -47,5 +49,5 @@ setup(name='mongoengine',
classifiers=CLASSIFIERS, classifiers=CLASSIFIERS,
install_requires=['pymongo'], install_requires=['pymongo'],
test_suite='tests', test_suite='tests',
tests_require=['blinker', 'django==1.3'] tests_require=['blinker', 'django>=1.3', 'PIL']
) )

70
tests/connection.py Normal file
View File

@@ -0,0 +1,70 @@
import unittest
import pymongo
import mongoengine.connection
from mongoengine import *
from mongoengine.connection import get_db, get_connection, ConnectionError
class ConnectionTest(unittest.TestCase):
def tearDown(self):
mongoengine.connection._connection_settings = {}
mongoengine.connection._connections = {}
mongoengine.connection._dbs = {}
def test_connect(self):
"""Ensure that the connect() method works properly.
"""
connect('mongoenginetest')
conn = get_connection()
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
db = get_db()
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'mongoenginetest')
connect('mongoenginetest2', alias='testdb')
conn = get_connection('testdb')
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
def test_connect_uri(self):
"""Ensure that the connect() method works properly with uri's
"""
c = connect(db='mongoenginetest', alias='admin')
c.admin.system.users.remove({})
c.mongoenginetest.system.users.remove({})
c.admin.add_user("admin", "password")
c.admin.authenticate("admin", "password")
c.mongoenginetest.add_user("username", "password")
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
conn = get_connection()
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
db = get_db()
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'mongoenginetest')
def test_register_connection(self):
"""Ensure that connections with different aliases may be registered.
"""
register_connection('testdb', 'mongoenginetest2')
self.assertRaises(ConnectionError, get_connection)
conn = get_connection('testdb')
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
db = get_db('testdb')
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'mongoenginetest2')
if __name__ == '__main__':
unittest.main()

View File

@@ -1,7 +1,7 @@
import unittest import unittest
from mongoengine import * from mongoengine import *
from mongoengine.connection import _get_db from mongoengine.connection import get_db
from mongoengine.tests import query_counter from mongoengine.tests import query_counter
@@ -9,7 +9,7 @@ class FieldTest(unittest.TestCase):
def setUp(self): def setUp(self):
connect(db='mongoenginetest') connect(db='mongoenginetest')
self.db = _get_db() self.db = get_db()
def test_list_item_dereference(self): def test_list_item_dereference(self):
"""Ensure that DBRef items in ListFields are dereferenced. """Ensure that DBRef items in ListFields are dereferenced.
@@ -760,3 +760,53 @@ class FieldTest(unittest.TestCase):
UserB.drop_collection() UserB.drop_collection()
UserC.drop_collection() UserC.drop_collection()
Group.drop_collection() Group.drop_collection()
def test_multidirectional_lists(self):
class Asset(Document):
name = StringField(max_length=250, required=True)
parent = GenericReferenceField(default=None)
parents = ListField(GenericReferenceField())
children = ListField(GenericReferenceField())
Asset.drop_collection()
root = Asset(name='', path="/", title="Site Root")
root.save()
company = Asset(name='company', title='Company', parent=root, parents=[root])
company.save()
root.children = [company]
root.save()
root = root.reload()
self.assertEquals(root.children, [company])
self.assertEquals(company.parents, [root])
def test_dict_in_dbref_instance(self):
class Person(Document):
name = StringField(max_length=250, required=True)
class Room(Document):
number = StringField(max_length=250, required=True)
staffs_with_position = ListField(DictField())
Person.drop_collection()
Room.drop_collection()
bob = Person.objects.create(name='Bob')
bob.save()
sarah = Person.objects.create(name='Sarah')
sarah.save()
room_101 = Room.objects.create(number="101")
room_101.staffs_with_position = [
{'position_key': 'window', 'staff': sarah},
{'position_key': 'door', 'staff': bob.to_dbref()}]
room_101.save()
room = Room.objects.first().select_related()
self.assertEquals(room.staffs_with_position[0]['staff'], sarah)
self.assertEquals(room.staffs_with_position[1]['staff'], bob)

View File

@@ -8,8 +8,14 @@ from mongoengine.django.shortcuts import get_document_or_404
from django.http import Http404 from django.http import Http404
from django.template import Context, Template from django.template import Context, Template
from django.conf import settings from django.conf import settings
from django.core.paginator import Paginator
settings.configure() settings.configure()
from django.contrib.sessions.tests import SessionTestsMixin
from mongoengine.django.sessions import SessionStore, MongoSession
class QuerySetTest(unittest.TestCase): class QuerySetTest(unittest.TestCase):
def setUp(self): def setUp(self):
@@ -67,3 +73,33 @@ class QuerySetTest(unittest.TestCase):
self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234') self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234')
self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk)) self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk))
def test_pagination(self):
"""Ensure that Pagination works as expected
"""
class Page(Document):
name = StringField()
Page.drop_collection()
for i in xrange(1, 11):
Page(name=str(i)).save()
paginator = Paginator(Page.objects.all(), 2)
t = Template("{% for i in page.object_list %}{{ i.name }}:{% endfor %}")
for p in paginator.page_range:
d = {"page": paginator.page(p)}
end = p * 2
start = end - 1
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
backend = SessionStore
def setUp(self):
connect(db='mongoenginetest')
MongoSession.drop_collection()
super(MongoDBSessionTest, self).setUp()

File diff suppressed because it is too large Load Diff

502
tests/dynamic_document.py Normal file
View File

@@ -0,0 +1,502 @@
import unittest
from mongoengine import *
from mongoengine.connection import get_db
class DynamicDocTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = get_db()
class Person(DynamicDocument):
name = StringField()
meta = {'allow_inheritance': True}
Person.drop_collection()
self.Person = Person
def test_simple_dynamic_document(self):
"""Ensures simple dynamic documents are saved correctly"""
p = self.Person()
p.name = "James"
p.age = 34
self.assertEquals(p.to_mongo(),
{"_types": ["Person"], "_cls": "Person",
"name": "James", "age": 34}
)
p.save()
self.assertEquals(self.Person.objects.first().age, 34)
# Confirm no changes to self.Person
self.assertFalse(hasattr(self.Person, 'age'))
def test_dynamic_document_delta(self):
"""Ensures simple dynamic documents can delta correctly"""
p = self.Person(name="James", age=34)
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {}))
p.doc = 123
del(p.doc)
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1}))
def test_change_scope_of_variable(self):
"""Test changing the scope of a dynamic field has no adverse effects"""
p = self.Person()
p.name = "Dean"
p.misc = 22
p.save()
p = self.Person.objects.get()
p.misc = {'hello': 'world'}
p.save()
p = self.Person.objects.get()
self.assertEquals(p.misc, {'hello': 'world'})
def test_delete_dynamic_field(self):
"""Test deleting a dynamic field works"""
self.Person.drop_collection()
p = self.Person()
p.name = "Dean"
p.misc = 22
p.save()
p = self.Person.objects.get()
p.misc = {'hello': 'world'}
p.save()
p = self.Person.objects.get()
self.assertEquals(p.misc, {'hello': 'world'})
collection = self.db[self.Person._get_collection_name()]
obj = collection.find_one()
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name'])
del(p.misc)
p.save()
p = self.Person.objects.get()
self.assertFalse(hasattr(p, 'misc'))
obj = collection.find_one()
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name'])
def test_dynamic_document_queries(self):
"""Ensure we can query dynamic fields"""
p = self.Person()
p.name = "Dean"
p.age = 22
p.save()
self.assertEquals(1, self.Person.objects(age=22).count())
p = self.Person.objects(age=22)
p = p.get()
self.assertEquals(22, p.age)
def test_complex_dynamic_document_queries(self):
class Person(DynamicDocument):
name = StringField()
Person.drop_collection()
p = Person(name="test")
p.age = "ten"
p.save()
p1 = Person(name="test1")
p1.age = "less then ten and a half"
p1.save()
p2 = Person(name="test2")
p2.age = 10
p2.save()
self.assertEquals(Person.objects(age__icontains='ten').count(), 2)
self.assertEquals(Person.objects(age__gte=10).count(), 1)
def test_complex_data_lookups(self):
"""Ensure you can query dynamic document dynamic fields"""
p = self.Person()
p.misc = {'hello': 'world'}
p.save()
self.assertEquals(1, self.Person.objects(misc__hello='world').count())
def test_inheritance(self):
"""Ensure that dynamic document plays nice with inheritance"""
class Employee(self.Person):
salary = IntField()
Employee.drop_collection()
self.assertTrue('name' in Employee._fields)
self.assertTrue('salary' in Employee._fields)
self.assertEqual(Employee._get_collection_name(),
self.Person._get_collection_name())
joe_bloggs = Employee()
joe_bloggs.name = "Joe Bloggs"
joe_bloggs.salary = 10
joe_bloggs.age = 20
joe_bloggs.save()
self.assertEquals(1, self.Person.objects(age=20).count())
self.assertEquals(1, Employee.objects(age=20).count())
joe_bloggs = self.Person.objects.first()
self.assertTrue(isinstance(joe_bloggs, Employee))
def test_embedded_dynamic_document(self):
"""Test dynamic embedded documents"""
class Embedded(DynamicEmbeddedDocument):
pass
class Doc(DynamicDocument):
pass
Doc.drop_collection()
doc = Doc()
embedded_1 = Embedded()
embedded_1.string_field = 'hello'
embedded_1.int_field = 1
embedded_1.dict_field = {'hello': 'world'}
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field = embedded_1
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
"embedded_field": {
"_types": ['Embedded'], "_cls": "Embedded",
"string_field": "hello",
"int_field": 1,
"dict_field": {"hello": "world"},
"list_field": ['1', 2, {'hello': 'world'}]
}
})
doc.save()
doc = Doc.objects.first()
self.assertEquals(doc.embedded_field.__class__, Embedded)
self.assertEquals(doc.embedded_field.string_field, "hello")
self.assertEquals(doc.embedded_field.int_field, 1)
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}])
def test_complex_embedded_documents(self):
"""Test complex dynamic embedded documents setups"""
class Embedded(DynamicEmbeddedDocument):
pass
class Doc(DynamicDocument):
pass
Doc.drop_collection()
doc = Doc()
embedded_1 = Embedded()
embedded_1.string_field = 'hello'
embedded_1.int_field = 1
embedded_1.dict_field = {'hello': 'world'}
embedded_2 = Embedded()
embedded_2.string_field = 'hello'
embedded_2.int_field = 1
embedded_2.dict_field = {'hello': 'world'}
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
embedded_1.list_field = ['1', 2, embedded_2]
doc.embedded_field = embedded_1
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
"embedded_field": {
"_types": ['Embedded'], "_cls": "Embedded",
"string_field": "hello",
"int_field": 1,
"dict_field": {"hello": "world"},
"list_field": ['1', 2,
{"_types": ['Embedded'], "_cls": "Embedded",
"string_field": "hello",
"int_field": 1,
"dict_field": {"hello": "world"},
"list_field": ['1', 2, {'hello': 'world'}]}
]
}
})
doc.save()
doc = Doc.objects.first()
self.assertEquals(doc.embedded_field.__class__, Embedded)
self.assertEquals(doc.embedded_field.string_field, "hello")
self.assertEquals(doc.embedded_field.int_field, 1)
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
self.assertEquals(doc.embedded_field.list_field[0], '1')
self.assertEquals(doc.embedded_field.list_field[1], 2)
embedded_field = doc.embedded_field.list_field[2]
self.assertEquals(embedded_field.__class__, Embedded)
self.assertEquals(embedded_field.string_field, "hello")
self.assertEquals(embedded_field.int_field, 1)
self.assertEquals(embedded_field.dict_field, {'hello': 'world'})
self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}])
def test_delta_for_dynamic_documents(self):
p = self.Person()
p.name = "Dean"
p.age = 22
p.save()
p.age = 24
self.assertEquals(p.age, 24)
self.assertEquals(p._get_changed_fields(), ['age'])
self.assertEquals(p._delta(), ({'age': 24}, {}))
p = self.Person.objects(age=22).get()
p.age = 24
self.assertEquals(p.age, 24)
self.assertEquals(p._get_changed_fields(), ['age'])
self.assertEquals(p._delta(), ({'age': 24}, {}))
p.save()
self.assertEquals(1, self.Person.objects(age=24).count())
def test_delta(self):
class Doc(DynamicDocument):
pass
Doc.drop_collection()
doc = Doc()
doc.save()
doc = Doc.objects.first()
self.assertEquals(doc._get_changed_fields(), [])
self.assertEquals(doc._delta(), ({}, {}))
doc.string_field = 'hello'
self.assertEquals(doc._get_changed_fields(), ['string_field'])
self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {}))
doc._changed_fields = []
doc.int_field = 1
self.assertEquals(doc._get_changed_fields(), ['int_field'])
self.assertEquals(doc._delta(), ({'int_field': 1}, {}))
doc._changed_fields = []
dict_value = {'hello': 'world', 'ping': 'pong'}
doc.dict_field = dict_value
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {}))
doc._changed_fields = []
list_value = ['1', 2, {'hello': 'world'}]
doc.list_field = list_value
self.assertEquals(doc._get_changed_fields(), ['list_field'])
self.assertEquals(doc._delta(), ({'list_field': list_value}, {}))
# Test unsetting
doc._changed_fields = []
doc.dict_field = {}
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
self.assertEquals(doc._delta(), ({}, {'dict_field': 1}))
doc._changed_fields = []
doc.list_field = []
self.assertEquals(doc._get_changed_fields(), ['list_field'])
self.assertEquals(doc._delta(), ({}, {'list_field': 1}))
def test_delta_recursive(self):
"""Testing deltaing works with dynamic documents"""
class Embedded(DynamicEmbeddedDocument):
pass
class Doc(DynamicDocument):
pass
Doc.drop_collection()
doc = Doc()
doc.save()
doc = Doc.objects.first()
self.assertEquals(doc._get_changed_fields(), [])
self.assertEquals(doc._delta(), ({}, {}))
embedded_1 = Embedded()
embedded_1.string_field = 'hello'
embedded_1.int_field = 1
embedded_1.dict_field = {'hello': 'world'}
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field = embedded_1
self.assertEquals(doc._get_changed_fields(), ['embedded_field'])
embedded_delta = {
'string_field': 'hello',
'int_field': 1,
'dict_field': {'hello': 'world'},
'list_field': ['1', 2, {'hello': 'world'}]
}
self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {}))
embedded_delta.update({
'_types': ['Embedded'],
'_cls': 'Embedded',
})
self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {}))
doc.save()
doc.reload()
doc.embedded_field.dict_field = {}
self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field'])
self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
doc.save()
doc.reload()
doc.embedded_field.list_field = []
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1}))
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1}))
doc.save()
doc.reload()
embedded_2 = Embedded()
embedded_2.string_field = 'hello'
embedded_2.int_field = 1
embedded_2.dict_field = {'hello': 'world'}
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field.list_field = ['1', 2, embedded_2]
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEquals(doc.embedded_field._delta(), ({
'list_field': ['1', 2, {
'_cls': 'Embedded',
'_types': ['Embedded'],
'string_field': 'hello',
'dict_field': {'hello': 'world'},
'int_field': 1,
'list_field': ['1', 2, {'hello': 'world'}],
}]
}, {}))
self.assertEquals(doc._delta(), ({
'embedded_field.list_field': ['1', 2, {
'_cls': 'Embedded',
'_types': ['Embedded'],
'string_field': 'hello',
'dict_field': {'hello': 'world'},
'int_field': 1,
'list_field': ['1', 2, {'hello': 'world'}],
}]
}, {}))
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, [])
self.assertEquals(doc.embedded_field.list_field[0], '1')
self.assertEquals(doc.embedded_field.list_field[1], 2)
for k in doc.embedded_field.list_field[2]._fields:
self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k])
doc.embedded_field.list_field[2].string_field = 'world'
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field'])
self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world')
# Test multiple assignments
doc.embedded_field.list_field[2].string_field = 'hello world'
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEquals(doc.embedded_field._delta(), ({
'list_field': ['1', 2, {
'_types': ['Embedded'],
'_cls': 'Embedded',
'string_field': 'hello world',
'int_field': 1,
'list_field': ['1', 2, {'hello': 'world'}],
'dict_field': {'hello': 'world'}}]}, {}))
self.assertEquals(doc._delta(), ({
'embedded_field.list_field': ['1', 2, {
'_types': ['Embedded'],
'_cls': 'Embedded',
'string_field': 'hello world',
'int_field': 1,
'list_field': ['1', 2, {'hello': 'world'}],
'dict_field': {'hello': 'world'}}
]}, {}))
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world')
# Test list native methods
doc.embedded_field.list_field[2].list_field.pop(0)
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
doc.save()
doc.reload()
doc.embedded_field.list_field[2].list_field.append(1)
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
doc.embedded_field.list_field[2].list_field.sort()
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
doc.save()
doc.reload()
del(doc.embedded_field.list_field[2].list_field)
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
doc.save()
doc.reload()
doc.dict_field = {'embedded': embedded_1}
doc.save()
doc.reload()
doc.dict_field['embedded'].string_field = 'Hello World'
self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field'])
self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {}))
def test_indexes(self):
"""Ensure that indexes are used when meta[indexes] is specified.
"""
class BlogPost(DynamicDocument):
meta = {
'indexes': [
'-date',
('category', '-date')
],
}
BlogPost.drop_collection()
info = BlogPost.objects._collection.index_information()
# _id, '-date', ('cat', 'date')
# NB: there is no index on _types by itself, since
# the indices on -date and tags will both contain
# _types as first element in the key
self.assertEqual(len(info), 3)
# Indexes are lazy so use list() to perform query
list(BlogPost.objects)
info = BlogPost.objects._collection.index_information()
info = [value['key'] for key, value in info.iteritems()]
self.assertTrue([('_types', 1), ('category', 1), ('date', -1)]
in info)
self.assertTrue([('_types', 1), ('date', -1)] in info)

View File

@@ -1,20 +1,22 @@
import unittest
import datetime import datetime
import os
import unittest
import uuid
from decimal import Decimal from decimal import Decimal
import pymongo
import gridfs
from mongoengine import * from mongoengine import *
from mongoengine.connection import _get_db from mongoengine.connection import get_db
from mongoengine.base import _document_registry, NotRegistered from mongoengine.base import _document_registry, NotRegistered
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
class FieldTest(unittest.TestCase): class FieldTest(unittest.TestCase):
def setUp(self): def setUp(self):
connect(db='mongoenginetest') connect(db='mongoenginetest')
self.db = _get_db() self.db = get_db()
def test_default_values(self): def test_default_values(self):
"""Ensure that default field values are used when creating a document. """Ensure that default field values are used when creating a document.
@@ -44,6 +46,81 @@ class FieldTest(unittest.TestCase):
person = Person(age=30) person = Person(age=30)
self.assertRaises(ValidationError, person.validate) self.assertRaises(ValidationError, person.validate)
def test_not_required_handles_none_in_update(self):
"""Ensure that every fields should accept None if required is False.
"""
class HandleNoneFields(Document):
str_fld = StringField()
int_fld = IntField()
flt_fld = FloatField()
comp_dt_fld = ComplexDateTimeField()
HandleNoneFields.drop_collection()
doc = HandleNoneFields()
doc.str_fld = u'spam ham egg'
doc.int_fld = 42
doc.flt_fld = 4.2
doc.com_dt_fld = datetime.datetime.utcnow()
doc.save()
res = HandleNoneFields.objects(id=doc.id).update(
set__str_fld=None,
set__int_fld=None,
set__flt_fld=None,
set__comp_dt_fld=None,
)
self.assertEqual(res, 1)
# Retrive data from db and verify it.
ret = HandleNoneFields.objects.all()[0]
self.assertEqual(ret.str_fld, None)
self.assertEqual(ret.int_fld, None)
self.assertEqual(ret.flt_fld, None)
# Return current time if retrived value is None.
self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime))
def test_not_required_handles_none_from_database(self):
"""Ensure that every fields can handle null values from the database.
"""
class HandleNoneFields(Document):
str_fld = StringField(required=True)
int_fld = IntField(required=True)
flt_fld = FloatField(required=True)
comp_dt_fld = ComplexDateTimeField(required=True)
HandleNoneFields.drop_collection()
doc = HandleNoneFields()
doc.str_fld = u'spam ham egg'
doc.int_fld = 42
doc.flt_fld = 4.2
doc.com_dt_fld = datetime.datetime.utcnow()
doc.save()
collection = self.db[HandleNoneFields._get_collection_name()]
obj = collection.update({"_id": doc.id}, {"$unset": {
"str_fld": 1,
"int_fld": 1,
"flt_fld": 1,
"comp_dt_fld": 1}
})
# Retrive data from db and verify it.
ret = HandleNoneFields.objects.all()[0]
self.assertEqual(ret.str_fld, None)
self.assertEqual(ret.int_fld, None)
self.assertEqual(ret.flt_fld, None)
# Return current time if retrived value is None.
self.assert_(isinstance(ret.comp_dt_fld, datetime.datetime))
self.assertRaises(ValidationError, ret.validate)
def test_object_id_validation(self): def test_object_id_validation(self):
"""Ensure that invalid values cannot be assigned to string fields. """Ensure that invalid values cannot be assigned to string fields.
""" """
@@ -175,6 +252,26 @@ class FieldTest(unittest.TestCase):
person.admin = 'Yes' person.admin = 'Yes'
self.assertRaises(ValidationError, person.validate) self.assertRaises(ValidationError, person.validate)
def test_uuid_validation(self):
"""Ensure that invalid values cannot be assigned to UUID fields.
"""
class Person(Document):
api_key = UUIDField()
person = Person()
# any uuid type is valid
person.api_key = uuid.uuid4()
person.validate()
person.api_key = uuid.uuid1()
person.validate()
# last g cannot belong to an hex number
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113g'
self.assertRaises(ValidationError, person.validate)
# short strings don't validate
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113'
self.assertRaises(ValidationError, person.validate)
def test_datetime_validation(self): def test_datetime_validation(self):
"""Ensure that invalid values cannot be assigned to datetime fields. """Ensure that invalid values cannot be assigned to datetime fields.
""" """
@@ -459,6 +556,31 @@ class FieldTest(unittest.TestCase):
BlogPost.drop_collection() BlogPost.drop_collection()
def test_reverse_list_sorting(self):
'''Ensure that a reverse sorted list field properly sorts values'''
class Category(EmbeddedDocument):
count = IntField()
name = StringField()
class CategoryList(Document):
categories = SortedListField(EmbeddedDocumentField(Category), ordering='count', reverse=True)
name = StringField()
catlist = CategoryList(name="Top categories")
cat1 = Category(name='posts', count=10)
cat2 = Category(name='food', count=100)
cat3 = Category(name='drink', count=40)
catlist.categories = [cat1, cat2, cat3]
catlist.save()
catlist.reload()
self.assertEqual(catlist.categories[0].name, cat2.name)
self.assertEqual(catlist.categories[1].name, cat3.name)
self.assertEqual(catlist.categories[2].name, cat1.name)
CategoryList.drop_collection()
def test_list_field(self): def test_list_field(self):
"""Ensure that list types work as expected. """Ensure that list types work as expected.
""" """
@@ -485,7 +607,6 @@ class FieldTest(unittest.TestCase):
post.info = [{'test': 3}] post.info = [{'test': 3}]
post.save() post.save()
self.assertEquals(BlogPost.objects.count(), 3) self.assertEquals(BlogPost.objects.count(), 3)
self.assertEquals(BlogPost.objects.filter(info__exact='test').count(), 1) self.assertEquals(BlogPost.objects.filter(info__exact='test').count(), 1)
self.assertEquals(BlogPost.objects.filter(info__0__test='test').count(), 1) self.assertEquals(BlogPost.objects.filter(info__0__test='test').count(), 1)
@@ -495,6 +616,21 @@ class FieldTest(unittest.TestCase):
self.assertEquals(BlogPost.objects.filter(info__100__test__exact='test').count(), 0) self.assertEquals(BlogPost.objects.filter(info__100__test__exact='test').count(), 0)
BlogPost.drop_collection() BlogPost.drop_collection()
def test_list_field_passed_in_value(self):
class Foo(Document):
bars = ListField(ReferenceField("Bar"))
class Bar(Document):
text = StringField()
bar = Bar(text="hi")
bar.save()
foo = Foo(bars=[])
foo.bars.append(bar)
self.assertEquals(repr(foo.bars), '[<Bar: Bar object>]')
def test_list_field_strict(self): def test_list_field_strict(self):
"""Ensure that list field handles validation if provided a strict field type.""" """Ensure that list field handles validation if provided a strict field type."""
@@ -515,6 +651,39 @@ class FieldTest(unittest.TestCase):
Simple.drop_collection() Simple.drop_collection()
def test_list_field_rejects_strings(self):
"""Strings aren't valid list field data types"""
class Simple(Document):
mapping = ListField()
Simple.drop_collection()
e = Simple()
e.mapping = 'hello world'
self.assertRaises(ValidationError, e.save)
def test_complex_field_required(self):
"""Ensure required cant be None / Empty"""
class Simple(Document):
mapping = ListField(required=True)
Simple.drop_collection()
e = Simple()
e.mapping = []
self.assertRaises(ValidationError, e.save)
class Simple(Document):
mapping = DictField(required=True)
Simple.drop_collection()
e = Simple()
e.mapping = {}
self.assertRaises(ValidationError, e.save)
def test_list_field_complex(self): def test_list_field_complex(self):
"""Ensure that the list fields can handle the complex types.""" """Ensure that the list fields can handle the complex types."""
@@ -582,6 +751,9 @@ class FieldTest(unittest.TestCase):
post.info = {'the.title': 'test'} post.info = {'the.title': 'test'}
self.assertRaises(ValidationError, post.validate) self.assertRaises(ValidationError, post.validate)
post.info = {1: 'test'}
self.assertRaises(ValidationError, post.validate)
post.info = {'title': 'test'} post.info = {'title': 'test'}
post.save() post.save()
@@ -600,6 +772,13 @@ class FieldTest(unittest.TestCase):
# Confirm handles non strings or non existing keys # Confirm handles non strings or non existing keys
self.assertEquals(BlogPost.objects.filter(info__details__test__exact=5).count(), 0) self.assertEquals(BlogPost.objects.filter(info__details__test__exact=5).count(), 0)
self.assertEquals(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) self.assertEquals(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0)
post = BlogPost.objects.create(info={'title': 'original'})
post.info.update({'title': 'updated'})
post.save()
post.reload()
self.assertEquals('updated', post.info['title'])
BlogPost.drop_collection() BlogPost.drop_collection()
def test_dictfield_strict(self): def test_dictfield_strict(self):
@@ -942,15 +1121,29 @@ class FieldTest(unittest.TestCase):
class Company(Document): class Company(Document):
name = StringField() name = StringField()
Product.drop_collection()
Company.drop_collection()
ten_gen = Company(name='10gen') ten_gen = Company(name='10gen')
ten_gen.save() ten_gen.save()
mongodb = Product(name='MongoDB', company=ten_gen) mongodb = Product(name='MongoDB', company=ten_gen)
mongodb.save() mongodb.save()
me = Product(name='MongoEngine')
me.save()
obj = Product.objects(company=ten_gen).first() obj = Product.objects(company=ten_gen).first()
self.assertEqual(obj, mongodb) self.assertEqual(obj, mongodb)
self.assertEqual(obj.company, ten_gen) self.assertEqual(obj.company, ten_gen)
obj = Product.objects(company=None).first()
self.assertEqual(obj, me)
obj, created = Product.objects.get_or_create(company=None)
self.assertEqual(created, False)
self.assertEqual(obj, me)
def test_reference_query_conversion(self): def test_reference_query_conversion(self):
"""Ensure that ReferenceFields can be queried using objects and values """Ensure that ReferenceFields can be queried using objects and values
of the type of the primary key of the referenced object. of the type of the primary key of the referenced object.
@@ -1062,7 +1255,6 @@ class FieldTest(unittest.TestCase):
Post.drop_collection() Post.drop_collection()
User.drop_collection() User.drop_collection()
def test_generic_reference_document_not_registered(self): def test_generic_reference_document_not_registered(self):
"""Ensure dereferencing out of the document registry throws a """Ensure dereferencing out of the document registry throws a
`NotRegistered` error. `NotRegistered` error.
@@ -1089,7 +1281,7 @@ class FieldTest(unittest.TestCase):
user = User.objects.first() user = User.objects.first()
try: try:
user.bookmarks user.bookmarks
raise AssertionError, "Link was removed from the registry" raise AssertionError("Link was removed from the registry")
except NotRegistered: except NotRegistered:
pass pass
@@ -1213,6 +1405,53 @@ class FieldTest(unittest.TestCase):
Shirt.drop_collection() Shirt.drop_collection()
def test_simple_choices_validation(self):
"""Ensure that value is in a container of allowed values.
"""
class Shirt(Document):
size = StringField(max_length=3, choices=('S', 'M', 'L', 'XL', 'XXL'))
Shirt.drop_collection()
shirt = Shirt()
shirt.validate()
shirt.size = "S"
shirt.validate()
shirt.size = "XS"
self.assertRaises(ValidationError, shirt.validate)
Shirt.drop_collection()
def test_simple_choices_get_field_display(self):
"""Test dynamic helper for returning the display value of a choices field.
"""
class Shirt(Document):
size = StringField(max_length=3, choices=('S', 'M', 'L', 'XL', 'XXL'))
style = StringField(max_length=3, choices=('Small', 'Baggy', 'wide'), default='Small')
Shirt.drop_collection()
shirt = Shirt()
self.assertEqual(shirt.get_size_display(), None)
self.assertEqual(shirt.get_style_display(), 'Small')
shirt.size = "XXL"
shirt.style = "Baggy"
self.assertEqual(shirt.get_size_display(), 'XXL')
self.assertEqual(shirt.get_style_display(), 'Baggy')
# Set as Z - an invalid choice
shirt.size = "Z"
shirt.style = "Z"
self.assertEqual(shirt.get_size_display(), 'Z')
self.assertEqual(shirt.get_style_display(), 'Z')
self.assertRaises(ValidationError, shirt.validate)
Shirt.drop_collection()
def test_file_fields(self): def test_file_fields(self):
"""Ensure that file fields can be written to and their data retrieved """Ensure that file fields can be written to and their data retrieved
""" """
@@ -1289,7 +1528,7 @@ class FieldTest(unittest.TestCase):
# Make sure FileField is optional and not required # Make sure FileField is optional and not required
class DemoFile(Document): class DemoFile(Document):
file = FileField() file = FileField()
d = DemoFile.objects.create() DemoFile.objects.create()
def test_file_uniqueness(self): def test_file_uniqueness(self):
"""Ensure that each instance of a FileField is unique """Ensure that each instance of a FileField is unique
@@ -1328,6 +1567,95 @@ class FieldTest(unittest.TestCase):
TestFile.drop_collection() TestFile.drop_collection()
def test_image_field(self):
class TestImage(Document):
image = ImageField()
TestImage.drop_collection()
t = TestImage()
t.image.put(open(TEST_IMAGE_PATH, 'r'))
t.save()
t = TestImage.objects.first()
self.assertEquals(t.image.format, 'PNG')
w, h = t.image.size
self.assertEquals(w, 371)
self.assertEquals(h, 76)
t.image.delete()
def test_image_field_resize(self):
class TestImage(Document):
image = ImageField(size=(185, 37))
TestImage.drop_collection()
t = TestImage()
t.image.put(open(TEST_IMAGE_PATH, 'r'))
t.save()
t = TestImage.objects.first()
self.assertEquals(t.image.format, 'PNG')
w, h = t.image.size
self.assertEquals(w, 185)
self.assertEquals(h, 37)
t.image.delete()
def test_image_field_thumbnail(self):
class TestImage(Document):
image = ImageField(thumbnail_size=(92, 18))
TestImage.drop_collection()
t = TestImage()
t.image.put(open(TEST_IMAGE_PATH, 'r'))
t.save()
t = TestImage.objects.first()
self.assertEquals(t.image.thumbnail.format, 'PNG')
self.assertEquals(t.image.thumbnail.width, 92)
self.assertEquals(t.image.thumbnail.height, 18)
t.image.delete()
def test_file_multidb(self):
register_connection('testfiles', 'testfiles')
class TestFile(Document):
name = StringField()
file = FileField(db_alias="testfiles",
collection_name="macumba")
TestFile.drop_collection()
# delete old filesystem
get_db("testfiles").macumba.files.drop()
get_db("testfiles").macumba.chunks.drop()
# First instance
testfile = TestFile()
testfile.name = "Hello, World!"
testfile.file.put('Hello, World!',
name="hello.txt")
testfile.save()
data = get_db("testfiles").macumba.files.find_one()
self.assertEquals(data.get('name'), 'hello.txt')
testfile = TestFile.objects.first()
self.assertEquals(testfile.file.read(),
'Hello, World!')
def test_geo_indexes(self): def test_geo_indexes(self):
"""Ensure that indexes are created automatically for GeoPointFields. """Ensure that indexes are created automatically for GeoPointFields.
""" """
@@ -1488,7 +1816,6 @@ class FieldTest(unittest.TestCase):
c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'})
self.assertEqual(c['next'], 10) self.assertEqual(c['next'], 10)
def test_generic_embedded_document(self): def test_generic_embedded_document(self):
class Car(EmbeddedDocument): class Car(EmbeddedDocument):
name = StringField() name = StringField()
@@ -1514,5 +1841,88 @@ class FieldTest(unittest.TestCase):
person = Person.objects.first() person = Person.objects.first()
self.assertTrue(isinstance(person.like, Dish)) self.assertTrue(isinstance(person.like, Dish))
def test_recursive_validation(self):
"""Ensure that a validation result to_dict is available.
"""
class Author(EmbeddedDocument):
name = StringField(required=True)
class Comment(EmbeddedDocument):
author = EmbeddedDocumentField(Author, required=True)
content = StringField(required=True)
class Post(Document):
title = StringField(required=True)
comments = ListField(EmbeddedDocumentField(Comment))
bob = Author(name='Bob')
post = Post(title='hello world')
post.comments.append(Comment(content='hello', author=bob))
post.comments.append(Comment(author=bob))
try:
post.validate()
except ValidationError, error:
pass
# ValidationError.errors property
self.assertTrue(hasattr(error, 'errors'))
self.assertTrue(isinstance(error.errors, dict))
self.assertTrue('comments' in error.errors)
self.assertTrue(1 in error.errors['comments'])
self.assertTrue(isinstance(error.errors['comments'][1]['content'],
ValidationError))
# ValidationError.schema property
error_dict = error.to_dict()
self.assertTrue(isinstance(error_dict, dict))
self.assertTrue('comments' in error_dict)
self.assertTrue(1 in error_dict['comments'])
self.assertTrue('content' in error_dict['comments'][1])
self.assertEquals(error_dict['comments'][1]['content'],
u'Field is required ("content")')
post.comments[1].content = 'here we go'
post.validate()
class ValidatorErrorTest(unittest.TestCase):
def test_to_dict(self):
"""Ensure a ValidationError handles error to_dict correctly.
"""
error = ValidationError('root')
self.assertEquals(error.to_dict(), {})
# 1st level error schema
error.errors = {'1st': ValidationError('bad 1st'), }
self.assertTrue('1st' in error.to_dict())
self.assertEquals(error.to_dict()['1st'], 'bad 1st')
# 2nd level error schema
error.errors = {'1st': ValidationError('bad 1st', errors={
'2nd': ValidationError('bad 2nd'),
})}
self.assertTrue('1st' in error.to_dict())
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
self.assertTrue('2nd' in error.to_dict()['1st'])
self.assertEquals(error.to_dict()['1st']['2nd'], 'bad 2nd')
# moar levels
error.errors = {'1st': ValidationError('bad 1st', errors={
'2nd': ValidationError('bad 2nd', errors={
'3rd': ValidationError('bad 3rd', errors={
'4th': ValidationError('Inception'),
}),
}),
})}
self.assertTrue('1st' in error.to_dict())
self.assertTrue('2nd' in error.to_dict()['1st'])
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'],
'Inception')
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -1,9 +1,6 @@
from datetime import datetime from datetime import datetime
import pymongo
from mongoengine import * from mongoengine import *
from mongoengine.base import BaseField
from mongoengine.connection import _get_db
class PickleEmbedded(EmbeddedDocument): class PickleEmbedded(EmbeddedDocument):
@@ -15,6 +12,7 @@ class PickleTest(Document):
string = StringField(choices=(('One', '1'), ('Two', '2'))) string = StringField(choices=(('One', '1'), ('Two', '2')))
embedded = EmbeddedDocumentField(PickleEmbedded) embedded = EmbeddedDocumentField(PickleEmbedded)
lists = ListField(StringField()) lists = ListField(StringField())
photo = FileField()
class Mixin(object): class Mixin(object):
@@ -22,4 +20,4 @@ class Mixin(object):
class Base(Document): class Base(Document):
pass meta = {'allow_inheritance': True}

BIN
tests/mongoengine.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.1 KiB

View File

@@ -1,13 +1,14 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import unittest import unittest
import pymongo import pymongo
from bson import ObjectId
from datetime import datetime, timedelta from datetime import datetime, timedelta
from mongoengine.queryset import (QuerySet, QuerySetManager, from mongoengine.queryset import (QuerySet, QuerySetManager,
MultipleObjectsReturned, DoesNotExist, MultipleObjectsReturned, DoesNotExist,
QueryFieldList) QueryFieldList)
from mongoengine import * from mongoengine import *
from mongoengine.connection import _get_connection from mongoengine.connection import get_connection
from mongoengine.tests import query_counter from mongoengine.tests import query_counter
@@ -19,6 +20,7 @@ class QuerySetTest(unittest.TestCase):
class Person(Document): class Person(Document):
name = StringField() name = StringField()
age = IntField() age = IntField()
meta = {'allow_inheritance': True}
self.Person = Person self.Person = Person
def test_initialisation(self): def test_initialisation(self):
@@ -59,8 +61,7 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(len(people), 2) self.assertEqual(len(people), 2)
results = list(people) results = list(people)
self.assertTrue(isinstance(results[0], self.Person)) self.assertTrue(isinstance(results[0], self.Person))
self.assertTrue(isinstance(results[0].id, (pymongo.objectid.ObjectId, self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode)))
str, unicode)))
self.assertEqual(results[0].name, "User A") self.assertEqual(results[0].name, "User A")
self.assertEqual(results[0].age, 20) self.assertEqual(results[0].age, 20)
self.assertEqual(results[1].name, "User B") self.assertEqual(results[1].name, "User B")
@@ -110,6 +111,16 @@ class QuerySetTest(unittest.TestCase):
people = list(self.Person.objects[80000:80001]) people = list(self.Person.objects[80000:80001])
self.assertEqual(len(people), 0) self.assertEqual(len(people), 0)
# Test larger slice __repr__
self.Person.objects.delete()
for i in xrange(55):
self.Person(name='A%s' % i, age=i).save()
self.assertEqual(len(self.Person.objects), 55)
self.assertEqual("Person object", "%s" % self.Person.objects[0])
self.assertEqual("[<Person: Person object>, <Person: Person object>]", "%s" % self.Person.objects[1:3])
self.assertEqual("[<Person: Person object>, <Person: Person object>]", "%s" % self.Person.objects[51:53])
def test_find_one(self): def test_find_one(self):
"""Ensure that a query using find_one returns a valid result. """Ensure that a query using find_one returns a valid result.
""" """
@@ -144,6 +155,8 @@ class QuerySetTest(unittest.TestCase):
person = self.Person.objects.with_id(person1.id) person = self.Person.objects.with_id(person1.id)
self.assertEqual(person.name, "User A") self.assertEqual(person.name, "User A")
self.assertRaises(InvalidQueryError, self.Person.objects(name="User A").with_id, person1.id)
def test_find_only_one(self): def test_find_only_one(self):
"""Ensure that a query using ``get`` returns at most one result. """Ensure that a query using ``get`` returns at most one result.
""" """
@@ -316,11 +329,11 @@ class QuerySetTest(unittest.TestCase):
BlogPost(title="ABC", comments=[c1, c2]).save() BlogPost(title="ABC", comments=[c1, c2]).save()
BlogPost.objects(comments__by="joe").update(inc__comments__S__votes=1) BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1)
post = BlogPost.objects.first() post = BlogPost.objects.first()
self.assertEquals(post.comments[0].by, 'joe') self.assertEquals(post.comments[1].by, 'jane')
self.assertEquals(post.comments[0].votes, 4) self.assertEquals(post.comments[1].votes, 8)
# Currently the $ operator only applies to the first matched item in # Currently the $ operator only applies to the first matched item in
# the query # the query
@@ -368,6 +381,34 @@ class QuerySetTest(unittest.TestCase):
self.assertRaises(OperationError, update_nested) self.assertRaises(OperationError, update_nested)
Simple.drop_collection() Simple.drop_collection()
def test_update_using_positional_operator_embedded_document(self):
"""Ensure that the embedded documents can be updated using the positional
operator."""
class Vote(EmbeddedDocument):
score = IntField()
class Comment(EmbeddedDocument):
by = StringField()
votes = EmbeddedDocumentField(Vote)
class BlogPost(Document):
title = StringField()
comments = ListField(EmbeddedDocumentField(Comment))
BlogPost.drop_collection()
c1 = Comment(by="joe", votes=Vote(score=3))
c2 = Comment(by="jane", votes=Vote(score=7))
BlogPost(title="ABC", comments=[c1, c2]).save()
BlogPost.objects(comments__by="joe").update(set__comments__S__votes=Vote(score=4))
post = BlogPost.objects.first()
self.assertEquals(post.comments[0].by, 'joe')
self.assertEquals(post.comments[0].votes.score, 4)
def test_mapfield_update(self): def test_mapfield_update(self):
"""Ensure that the MapField can be updated.""" """Ensure that the MapField can be updated."""
class Member(EmbeddedDocument): class Member(EmbeddedDocument):
@@ -455,6 +496,9 @@ class QuerySetTest(unittest.TestCase):
Blog.drop_collection() Blog.drop_collection()
# Recreates the collection
self.assertEqual(0, Blog.objects.count())
with query_counter() as q: with query_counter() as q:
self.assertEqual(q, 0) self.assertEqual(q, 0)
@@ -468,10 +512,10 @@ class QuerySetTest(unittest.TestCase):
blogs.append(Blog(title="post %s" % i, posts=[post1, post2])) blogs.append(Blog(title="post %s" % i, posts=[post1, post2]))
Blog.objects.insert(blogs, load_bulk=False) Blog.objects.insert(blogs, load_bulk=False)
self.assertEqual(q, 2) # 1 for the inital connection and 1 for the insert self.assertEqual(q, 1) # 1 for the insert
Blog.objects.insert(blogs) Blog.objects.insert(blogs)
self.assertEqual(q, 4) # 1 for insert, and 1 for in bulk self.assertEqual(q, 3) # 1 for insert, and 1 for in bulk fetch (3 in total)
Blog.drop_collection() Blog.drop_collection()
@@ -567,7 +611,13 @@ class QuerySetTest(unittest.TestCase):
people1 = [person for person in queryset] people1 = [person for person in queryset]
people2 = [person for person in queryset] people2 = [person for person in queryset]
# Check that it still works even if iteration is interrupted.
for person in queryset:
break
people3 = [person for person in queryset]
self.assertEqual(people1, people2) self.assertEqual(people1, people2)
self.assertEqual(people1, people3)
def test_repr_iteration(self): def test_repr_iteration(self):
"""Ensure that QuerySet __repr__ can handle loops """Ensure that QuerySet __repr__ can handle loops
@@ -1371,20 +1421,39 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection() BlogPost.drop_collection()
def test_update_pull(self): def test_update_push_and_pull(self):
"""Ensure that the 'pull' update operation works correctly. """Ensure that the 'pull' update operation works correctly.
""" """
class BlogPost(Document): class BlogPost(Document):
slug = StringField() slug = StringField()
tags = ListField(StringField()) tags = ListField(StringField())
post = BlogPost(slug="test", tags=['code', 'mongodb', 'code']) BlogPost.drop_collection()
post = BlogPost(slug="test")
post.save() post.save()
BlogPost.objects.filter(id=post.id).update(push__tags="code")
post.reload()
self.assertEqual(post.tags, ["code"])
BlogPost.objects.filter(id=post.id).update(push_all__tags=["mongodb", "code"])
post.reload()
self.assertEqual(post.tags, ["code", "mongodb", "code"])
BlogPost.objects(slug="test").update(pull__tags="code") BlogPost.objects(slug="test").update(pull__tags="code")
post.reload() post.reload()
self.assertTrue('code' not in post.tags) self.assertEqual(post.tags, ["mongodb"])
self.assertEqual(len(post.tags), 1)
BlogPost.objects(slug="test").update(pull_all__tags=["mongodb", "code"])
post.reload()
self.assertEqual(post.tags, [])
BlogPost.objects(slug="test").update(__raw__={"$addToSet": {"tags": {"$each": ["code", "mongodb", "code"]}}})
post.reload()
self.assertEqual(post.tags, ["code", "mongodb"])
def test_update_one_pop_generic_reference(self): def test_update_one_pop_generic_reference(self):
@@ -1449,6 +1518,37 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection() BlogPost.drop_collection()
def test_set_list_embedded_documents(self):
class Author(EmbeddedDocument):
name = StringField()
class Message(Document):
title = StringField()
authors = ListField(EmbeddedDocumentField('Author'))
Message.drop_collection()
message = Message(title="hello", authors=[Author(name="Harry")])
message.save()
Message.objects(authors__name="Harry").update_one(
set__authors__S=Author(name="Ross"))
message = message.reload()
self.assertEquals(message.authors[0].name, "Ross")
Message.objects(authors__name="Ross").update_one(
set__authors=[Author(name="Harry"),
Author(name="Ross"),
Author(name="Adam")])
message = message.reload()
self.assertEquals(message.authors[0].name, "Harry")
self.assertEquals(message.authors[1].name, "Ross")
self.assertEquals(message.authors[2].name, "Adam")
def test_order_by(self): def test_order_by(self):
"""Ensure that QuerySets may be ordered. """Ensure that QuerySets may be ordered.
""" """
@@ -1840,6 +1940,35 @@ class QuerySetTest(unittest.TestCase):
freq = Person.objects.item_frequencies('city', normalize=True, map_reduce=True) freq = Person.objects.item_frequencies('city', normalize=True, map_reduce=True)
self.assertEquals(freq, {'CRB': 0.5, None: 0.5}) self.assertEquals(freq, {'CRB': 0.5, None: 0.5})
def test_item_frequencies_with_null_embedded(self):
class Data(EmbeddedDocument):
name = StringField()
class Extra(EmbeddedDocument):
tag = StringField()
class Person(Document):
data = EmbeddedDocumentField(Data, required=True)
extra = EmbeddedDocumentField(Extra)
Person.drop_collection()
p = Person()
p.data = Data(name="Wilson Jr")
p.save()
p = Person()
p.data = Data(name="Wesley")
p.extra = Extra(tag="friend")
p.save()
ot = Person.objects.item_frequencies('extra.tag', map_reduce=False)
self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
ot = Person.objects.item_frequencies('extra.tag', map_reduce=True)
self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
def test_average(self): def test_average(self):
"""Ensure that field can be averaged correctly. """Ensure that field can be averaged correctly.
""" """
@@ -1882,6 +2011,24 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(set(self.Person.objects(age=30).distinct('name')), self.assertEqual(set(self.Person.objects(age=30).distinct('name')),
set(['Mr Orange', 'Mr Pink'])) set(['Mr Orange', 'Mr Pink']))
def test_distinct_handles_references(self):
class Foo(Document):
bar = ReferenceField("Bar")
class Bar(Document):
text = StringField()
Bar.drop_collection()
Foo.drop_collection()
bar = Bar(text="hi")
bar.save()
foo = Foo(bar=bar)
foo.save()
self.assertEquals(Foo.objects.distinct("bar"), [bar])
def test_custom_manager(self): def test_custom_manager(self):
"""Ensure that custom QuerySetManager instances work as expected. """Ensure that custom QuerySetManager instances work as expected.
""" """
@@ -2200,7 +2347,7 @@ class QuerySetTest(unittest.TestCase):
# check that polygon works for users who have a server >= 1.9 # check that polygon works for users who have a server >= 1.9
server_version = tuple( server_version = tuple(
_get_connection().server_info()['version'].split('.') get_connection().server_info()['version'].split('.')
) )
required_version = tuple("1.9.0".split(".")) required_version = tuple("1.9.0".split("."))
if server_version >= required_version: if server_version >= required_version:
@@ -2569,6 +2716,265 @@ class QuerySetTest(unittest.TestCase):
self.assertRaises(TypeError, invalid_where) self.assertRaises(TypeError, invalid_where)
def test_scalar(self):
class Organization(Document):
id = ObjectIdField('_id')
name = StringField()
class User(Document):
id = ObjectIdField('_id')
name = StringField()
organization = ObjectIdField()
User.drop_collection()
Organization.drop_collection()
whitehouse = Organization(name="White House")
whitehouse.save()
User(name="Bob Dole", organization=whitehouse.id).save()
# Efficient way to get all unique organization names for a given
# set of users (Pretend this has additional filtering.)
user_orgs = set(User.objects.scalar('organization'))
orgs = Organization.objects(id__in=user_orgs).scalar('name')
self.assertEqual(list(orgs), ['White House'])
# Efficient for generating listings, too.
orgs = Organization.objects.scalar('name').in_bulk(list(user_orgs))
user_map = User.objects.scalar('name', 'organization')
user_listing = [(user, orgs[org]) for user, org in user_map]
self.assertEqual([("Bob Dole", "White House")], user_listing)
def test_scalar_simple(self):
class TestDoc(Document):
x = IntField()
y = BooleanField()
TestDoc.drop_collection()
TestDoc(x=10, y=True).save()
TestDoc(x=20, y=False).save()
TestDoc(x=30, y=True).save()
plist = list(TestDoc.objects.scalar('x', 'y'))
self.assertEqual(len(plist), 3)
self.assertEqual(plist[0], (10, True))
self.assertEqual(plist[1], (20, False))
self.assertEqual(plist[2], (30, True))
class UserDoc(Document):
name = StringField()
age = IntField()
UserDoc.drop_collection()
UserDoc(name="Wilson Jr", age=19).save()
UserDoc(name="Wilson", age=43).save()
UserDoc(name="Eliana", age=37).save()
UserDoc(name="Tayza", age=15).save()
ulist = list(UserDoc.objects.scalar('name', 'age'))
self.assertEqual(ulist, [
(u'Wilson Jr', 19),
(u'Wilson', 43),
(u'Eliana', 37),
(u'Tayza', 15)])
ulist = list(UserDoc.objects.scalar('name').order_by('age'))
self.assertEqual(ulist, [
(u'Tayza'),
(u'Wilson Jr'),
(u'Eliana'),
(u'Wilson')])
def test_scalar_embedded(self):
class Profile(EmbeddedDocument):
name = StringField()
age = IntField()
class Locale(EmbeddedDocument):
city = StringField()
country = StringField()
class Person(Document):
profile = EmbeddedDocumentField(Profile)
locale = EmbeddedDocumentField(Locale)
Person.drop_collection()
Person(profile=Profile(name="Wilson Jr", age=19),
locale=Locale(city="Corumba-GO", country="Brazil")).save()
Person(profile=Profile(name="Gabriel Falcao", age=23),
locale=Locale(city="New York", country="USA")).save()
Person(profile=Profile(name="Lincoln de souza", age=28),
locale=Locale(city="Belo Horizonte", country="Brazil")).save()
Person(profile=Profile(name="Walter cruz", age=30),
locale=Locale(city="Brasilia", country="Brazil")).save()
self.assertEqual(
list(Person.objects.order_by('profile__age').scalar('profile__name')),
[u'Wilson Jr', u'Gabriel Falcao', u'Lincoln de souza', u'Walter cruz'])
ulist = list(Person.objects.order_by('locale.city')
.scalar('profile__name', 'profile__age', 'locale__city'))
self.assertEqual(ulist,
[(u'Lincoln de souza', 28, u'Belo Horizonte'),
(u'Walter cruz', 30, u'Brasilia'),
(u'Wilson Jr', 19, u'Corumba-GO'),
(u'Gabriel Falcao', 23, u'New York')])
def test_scalar_decimal(self):
from decimal import Decimal
class Person(Document):
name = StringField()
rating = DecimalField()
Person.drop_collection()
Person(name="Wilson Jr", rating=Decimal('1.0')).save()
ulist = list(Person.objects.scalar('name', 'rating'))
self.assertEqual(ulist, [(u'Wilson Jr', Decimal('1.0'))])
def test_scalar_reference_field(self):
class State(Document):
name = StringField()
class Person(Document):
name = StringField()
state = ReferenceField(State)
State.drop_collection()
Person.drop_collection()
s1 = State(name="Goias")
s1.save()
Person(name="Wilson JR", state=s1).save()
plist = list(Person.objects.scalar('name', 'state'))
self.assertEqual(plist, [(u'Wilson JR', s1)])
def test_scalar_generic_reference_field(self):
class State(Document):
name = StringField()
class Person(Document):
name = StringField()
state = GenericReferenceField()
State.drop_collection()
Person.drop_collection()
s1 = State(name="Goias")
s1.save()
Person(name="Wilson JR", state=s1).save()
plist = list(Person.objects.scalar('name', 'state'))
self.assertEqual(plist, [(u'Wilson JR', s1)])
def test_scalar_db_field(self):
class TestDoc(Document):
x = IntField()
y = BooleanField()
TestDoc.drop_collection()
TestDoc(x=10, y=True).save()
TestDoc(x=20, y=False).save()
TestDoc(x=30, y=True).save()
plist = list(TestDoc.objects.scalar('x', 'y'))
self.assertEqual(len(plist), 3)
self.assertEqual(plist[0], (10, True))
self.assertEqual(plist[1], (20, False))
self.assertEqual(plist[2], (30, True))
def test_scalar_cursor_behaviour(self):
"""Ensure that a query returns a valid set of results.
"""
person1 = self.Person(name="User A", age=20)
person1.save()
person2 = self.Person(name="User B", age=30)
person2.save()
# Find all people in the collection
people = self.Person.objects.scalar('name')
self.assertEqual(len(people), 2)
results = list(people)
self.assertEqual(results[0], "User A")
self.assertEqual(results[1], "User B")
# Use a query to filter the people found to just person1
people = self.Person.objects(age=20).scalar('name')
self.assertEqual(len(people), 1)
person = people.next()
self.assertEqual(person, "User A")
# Test limit
people = list(self.Person.objects.limit(1).scalar('name'))
self.assertEqual(len(people), 1)
self.assertEqual(people[0], 'User A')
# Test skip
people = list(self.Person.objects.skip(1).scalar('name'))
self.assertEqual(len(people), 1)
self.assertEqual(people[0], 'User B')
person3 = self.Person(name="User C", age=40)
person3.save()
# Test slice limit
people = list(self.Person.objects[:2].scalar('name'))
self.assertEqual(len(people), 2)
self.assertEqual(people[0], 'User A')
self.assertEqual(people[1], 'User B')
# Test slice skip
people = list(self.Person.objects[1:].scalar('name'))
self.assertEqual(len(people), 2)
self.assertEqual(people[0], 'User B')
self.assertEqual(people[1], 'User C')
# Test slice limit and skip
people = list(self.Person.objects[1:2].scalar('name'))
self.assertEqual(len(people), 1)
self.assertEqual(people[0], 'User B')
people = list(self.Person.objects[1:1].scalar('name'))
self.assertEqual(len(people), 0)
# Test slice out of range
people = list(self.Person.objects.scalar('name')[80000:80001])
self.assertEqual(len(people), 0)
# Test larger slice __repr__
self.Person.objects.delete()
for i in xrange(55):
self.Person(name='A%s' % i, age=i).save()
self.assertEqual(len(self.Person.objects.scalar('name')), 55)
self.assertEqual("A0", "%s" % self.Person.objects.order_by('name').scalar('name').first())
self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0])
self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by('age').scalar('name')[1:3])
self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by('age').scalar('name')[51:53])
# with_id and in_bulk
person = self.Person.objects.order_by('name').first()
self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').with_id(person.id))
pks = self.Person.objects.order_by('age').scalar('pk')[1:3]
self.assertEqual("[u'A1', u'A2']", "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values()))
class QTest(unittest.TestCase): class QTest(unittest.TestCase):
@@ -2790,6 +3196,30 @@ class QueryFieldListTest(unittest.TestCase):
q += QueryFieldList(fields=['a'], value={"$slice": 5}) q += QueryFieldList(fields=['a'], value={"$slice": 5})
self.assertEqual(q.as_dict(), {'a': {"$slice": 5}}) self.assertEqual(q.as_dict(), {'a': {"$slice": 5}})
def test_elem_match(self):
class Foo(EmbeddedDocument):
shape = StringField()
color = StringField()
trick = BooleanField()
meta = {'allow_inheritance': False}
class Bar(Document):
foo = ListField(EmbeddedDocumentField(Foo))
meta = {'allow_inheritance': False}
Bar.drop_collection()
b1 = Bar(foo=[Foo(shape= "square", color ="purple", thick = False),
Foo(shape= "circle", color ="red", thick = True)])
b1.save()
b2 = Bar(foo=[Foo(shape= "square", color ="red", thick = True),
Foo(shape= "circle", color ="purple", thick = False)])
b2.save()
ak = list(Bar.objects(foo__match={'shape': "square", "color": "purple"}))
self.assertEqual([b1], ak)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -56,6 +56,18 @@ class SignalTests(unittest.TestCase):
@classmethod @classmethod
def post_delete(cls, sender, document, **kwargs): def post_delete(cls, sender, document, **kwargs):
signal_output.append('post_delete signal, %s' % document) signal_output.append('post_delete signal, %s' % document)
@classmethod
def pre_bulk_insert(cls, sender, documents, **kwargs):
signal_output.append('pre_bulk_insert signal, %s' % documents)
@classmethod
def post_bulk_insert(cls, sender, documents, **kwargs):
signal_output.append('post_bulk_insert signal, %s' % documents)
if kwargs.get('loaded', False):
signal_output.append('Is loaded')
else:
signal_output.append('Not loaded')
self.Author = Author self.Author = Author
@@ -104,7 +116,9 @@ class SignalTests(unittest.TestCase):
len(signals.pre_save.receivers), len(signals.pre_save.receivers),
len(signals.post_save.receivers), len(signals.post_save.receivers),
len(signals.pre_delete.receivers), len(signals.pre_delete.receivers),
len(signals.post_delete.receivers) len(signals.post_delete.receivers),
len(signals.pre_bulk_insert.receivers),
len(signals.post_bulk_insert.receivers),
) )
signals.pre_init.connect(Author.pre_init, sender=Author) signals.pre_init.connect(Author.pre_init, sender=Author)
@@ -113,6 +127,8 @@ class SignalTests(unittest.TestCase):
signals.post_save.connect(Author.post_save, sender=Author) signals.post_save.connect(Author.post_save, sender=Author)
signals.pre_delete.connect(Author.pre_delete, sender=Author) signals.pre_delete.connect(Author.pre_delete, sender=Author)
signals.post_delete.connect(Author.post_delete, sender=Author) signals.post_delete.connect(Author.post_delete, sender=Author)
signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author)
signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author)
signals.pre_init.connect(Another.pre_init, sender=Another) signals.pre_init.connect(Another.pre_init, sender=Another)
signals.post_init.connect(Another.post_init, sender=Another) signals.post_init.connect(Another.post_init, sender=Another)
@@ -128,6 +144,8 @@ class SignalTests(unittest.TestCase):
signals.pre_delete.disconnect(self.Author.pre_delete) signals.pre_delete.disconnect(self.Author.pre_delete)
signals.post_save.disconnect(self.Author.post_save) signals.post_save.disconnect(self.Author.post_save)
signals.pre_save.disconnect(self.Author.pre_save) signals.pre_save.disconnect(self.Author.pre_save)
signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert)
signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert)
signals.pre_init.disconnect(self.Another.pre_init) signals.pre_init.disconnect(self.Another.pre_init)
signals.post_init.disconnect(self.Another.post_init) signals.post_init.disconnect(self.Another.post_init)
@@ -143,7 +161,9 @@ class SignalTests(unittest.TestCase):
len(signals.pre_save.receivers), len(signals.pre_save.receivers),
len(signals.post_save.receivers), len(signals.post_save.receivers),
len(signals.pre_delete.receivers), len(signals.pre_delete.receivers),
len(signals.post_delete.receivers) len(signals.post_delete.receivers),
len(signals.pre_bulk_insert.receivers),
len(signals.post_bulk_insert.receivers),
) )
self.assertEqual(self.pre_signals, post_signals) self.assertEqual(self.pre_signals, post_signals)
@@ -154,6 +174,14 @@ class SignalTests(unittest.TestCase):
def create_author(): def create_author():
a1 = self.Author(name='Bill Shakespeare') a1 = self.Author(name='Bill Shakespeare')
def bulk_create_author_with_load():
a1 = self.Author(name='Bill Shakespeare')
self.Author.objects.insert([a1], load_bulk=True)
def bulk_create_author_without_load():
a1 = self.Author(name='Bill Shakespeare')
self.Author.objects.insert([a1], load_bulk=False)
self.assertEqual(self.get_signal_output(create_author), [ self.assertEqual(self.get_signal_output(create_author), [
"pre_init signal, Author", "pre_init signal, Author",
"{'name': 'Bill Shakespeare'}", "{'name': 'Bill Shakespeare'}",
@@ -179,3 +207,24 @@ class SignalTests(unittest.TestCase):
'pre_delete signal, William Shakespeare', 'pre_delete signal, William Shakespeare',
'post_delete signal, William Shakespeare', 'post_delete signal, William Shakespeare',
]) ])
signal_output = self.get_signal_output(bulk_create_author_with_load)
# The output of this signal is not entirely deterministic. The reloaded
# object will have an object ID. Hence, we only check part of the output
self.assertEquals(signal_output[3],
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]")
self.assertEquals(signal_output[-2:],
["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
"Is loaded",])
self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [
"pre_init signal, Author",
"{'name': 'Bill Shakespeare'}",
"post_init signal, Bill Shakespeare",
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]",
"post_bulk_insert signal, [<Author: Bill Shakespeare>]",
"Not loaded",
])
self.Author.objects.delete()