Compare commits

...

236 Commits

Author SHA1 Message Date
Ross Lawley
74343841e4 Merge branch 'master' of github.com:MongoEngine/mongoengine 2012-12-10 15:17:05 +00:00
Ross Lawley
3b3738b36b 0.7.9 2012-12-10 15:16:31 +00:00
Ross Lawley
b15c3f6a3f Update AUTHORS
Sorry Jorge!
2012-12-10 10:33:55 +00:00
Ross Lawley
2459f9b0aa Merge pull request #195 from balboah/patch-1
Corrected user guide link in README
2012-12-10 02:24:02 -08:00
Johnny Bergström
6ff1bd9b3c Corrected user guide link in README 2012-12-10 11:01:08 +01:00
Ross Lawley
1bc2d2ec37 Version Bump 2012-12-10 09:54:20 +00:00
Ross Lawley
d7fd6a4628 Merge branch 'ref_field_to_mongo_bug' of https://github.com/yakxxx/mongoengine
Conflicts:
	AUTHORS
2012-12-10 09:20:20 +00:00
Ross Lawley
9236f365fa Fix sequence fields in embedded documents (MongoEngine/mongoengine#166) 2012-12-10 09:11:31 +00:00
Ross Lawley
90d22c2a28 Update AUTHORS & Changelog (MongoEngine/mongoengine#176) 2012-12-10 08:50:21 +00:00
Ross Lawley
c9f6e6b62a Merge branch 'order-by-chaining' of https://github.com/pteichman/mongoengine 2012-12-10 08:49:19 +00:00
Ross Lawley
260d9377f5 Updated Changelog 2012-12-10 08:26:42 +00:00
Ross Lawley
22d1ce6319 Merge branch 'master' of https://github.com/AdrianScott/mongoengine 2012-12-10 08:25:37 +00:00
Ross Lawley
6997e02476 Fixed EmailField so can add extra validation
(MongoEngine/mongoengine#173, MongoEngine/mongoengine#174, MongoEngine/mongoengine#187)
2012-12-10 08:23:41 +00:00
Ross Lawley
155d79ff4d Merge branch 'master' of https://github.com/shaunduncan/mongoengine 2012-12-10 08:22:25 +00:00
Ross Lawley
452cd125fa Updated Changelog 2012-12-10 08:11:35 +00:00
Jorge Bastida
e62c35b040 Add more tests 2012-12-07 16:21:31 +00:00
Jorge Bastida
d5ec3c6a31 Add as_pymongo to __getitem__ and in_bulk 2012-12-07 15:59:09 +00:00
Jorge Bastida
ad983dc279 Implement _get_as_pymongo 2012-12-07 15:42:10 +00:00
Adrian Scott
bb15bf8d13 Update AUTHORS
added me (Adrian Scott, issues 180, 181)
2012-12-07 10:02:12 -05:00
Jorge Bastida
94adc207ad First as_pymongo implementation 2012-12-07 11:20:27 +00:00
Shaun Duncan
376d1c97ab EmailField should honor StringField validation as well 2012-12-04 13:08:49 -05:00
Adrian Scott
4fe87b40da added comments 2012-11-29 21:49:54 -05:00
Adrian Scott
b10d76cf4b split line to meet 79 char max line limit 2012-11-29 21:28:03 -05:00
Adrian Scott
3bdc9a2f09 session collection parameter; encoding optional
Added a parameter for the name of the session collection; Added the
option to not encode session_data, which is useful for expiring sessions
of users when a password is changed, etc.; these upgrades provided by
SocialVilla Inc.
2012-11-29 20:53:09 -05:00
Peter Teichman
9d52e18659 Don't freeze the current query state when calling .order_by()
This changes order_by() to eliminate its reference to
self._cursor. This meant that any parameters built by QuerySet
that followed an order_by() clause were ignored.
2012-11-27 10:55:55 -05:00
Ross Lawley
653c4259ee Fixed handling for old style types 2012-11-27 11:59:34 +00:00
Ross Lawley
9f5ab8149f Adding some debugging 2012-11-27 11:06:55 +00:00
Ross Lawley
66c6d14f7a Trying to fix seesaw test on travis 2012-11-27 10:50:22 +00:00
Ross Lawley
2c0fc142a3 Updated travis.yml 2012-11-26 21:04:06 +00:00
yak
0da2dfd191 addition to AUTHORS 2012-11-13 13:04:05 +01:00
yak
787fc1cd8b bug fix for RefferenceField.to_mongo when dbref=False 2012-11-13 13:02:07 +01:00
Ross Lawley
c31488add9 Version bump 2012-11-05 11:14:02 +00:00
Ross Lawley
31ec7907b5 Fixing py3 compat 2012-10-01 20:01:43 +00:00
Ross Lawley
12f3f8c694 Added chaining regression test (MongoEngine/mongoengine#135) 2012-10-01 16:27:59 +00:00
Ross Lawley
79098e997e Updated test 2012-10-01 16:20:48 +00:00
Ross Lawley
dc1849bad5 Unicode fix for repr (MongoEngine/mongoengine#133) 2012-10-01 16:15:25 +00:00
Ross Lawley
e2d826c412 Allow updates with match operators (MongoEngine/mongoengine#144) 2012-10-01 15:26:34 +00:00
Ross Lawley
e6d796832e Unicode fix reverted but can have custom validator
MongoEngine/mongoengine#136
2012-10-01 14:48:53 +00:00
Ross Lawley
6f0a6df4f6 Fix loop 2012-10-01 14:23:05 +00:00
Ross Lawley
7a877a00d5 Updated URLField
handle unicode and custom validator (MongoEngine/mongoengine#136)
2012-10-01 13:59:15 +00:00
Ross Lawley
e8604d100e Added Garry Polley to contributors list
hmarr/mongoengine#573
2012-10-01 10:20:28 +00:00
Ross Lawley
1647441ce8 Merge branch 'master' of github.com:hmarr/mongoengine 2012-10-01 10:17:01 +00:00
Ross Lawley
9f8d6b3a00 Merge pull request #573 from garrypolley/master
Allow Django  AuthenticationBackends to work with Django user

Thanks @garrypolley
2012-10-01 00:24:52 -07:00
Garry Polley
4b2ad25405 can now use AuthenticationBackends with permissions. 2012-09-27 10:21:05 -05:00
Ross Lawley
3ce163b1a0 Updates to the readme 2012-09-27 10:29:24 +00:00
Ross Lawley
7c1ee28f13 Added CONTRIBUTING.rst 2012-09-27 10:26:22 +00:00
Ross Lawley
2645e43da1 Merge branch 'master' into 0.7 2012-09-27 08:30:36 +00:00
Ross Lawley
59bfe551a3 Updated docs thanks @mitar 2012-09-27 08:29:49 +00:00
Ross Lawley
e2c78047b1 Moved injection of Exceptions to top level 2012-09-26 10:43:14 +00:00
Ross Lawley
6a4351e44f Fixed reload issue with ReferenceField where dbref=False (MongoEngine/mongoengine#138) 2012-09-24 18:49:29 +00:00
Ross Lawley
adb60ef1ac Improved import cache 2012-09-24 18:45:02 +00:00
Ross Lawley
3090adac04 Fixed objectId for DBRef 2012-09-24 11:37:54 +00:00
Ross Lawley
b9253d86cc Updated travis.yml 2012-09-19 18:53:31 +00:00
Ross Lawley
ab4d4e6230 Fix ReferenceField dbref = False 2012-09-18 21:37:45 +00:00
Ross Lawley
7cd38c56c6 Merge branch 'master' of github.com:MongoEngine/mongoengine into 0.7 2012-09-14 10:19:04 +00:00
Ross Lawley
864053615b Updated docs 2012-09-14 10:18:44 +00:00
Ross Lawley
db2366f112 Merge pull request #126 from mahmoudhossam/patch-1
Update docs/index.rst
2012-09-13 06:18:06 -07:00
Ross Lawley
4defc82192 Version Bump 2012-09-11 15:16:39 +00:00
Ross Lawley
5949970a95 Fixed index inheritance issues
firmed up testcases (MongoEngine/mongoengine#123) (MongoEngine/mongoengine#125)
2012-09-11 15:14:37 +00:00
Mahmoud Hossam
0ea4abda81 Update docs/index.rst
Correct link for the source.
2012-09-11 12:38:40 +03:00
Ross Lawley
5c6035d636 Updated upgrade docs for BinaryFields 2012-09-11 08:56:32 +00:00
Ross Lawley
a2183e3dcc Reverted EmbeddedDocuments meta handling.
You now can turn off inheritance (MongoEngine/mongoengine#119)
2012-09-07 13:23:46 +01:00
Ross Lawley
99637151b5 Updated version 2012-09-05 10:24:25 +01:00
Ross Lawley
a8e787c120 Update index spec generation so its not destructive (MongoEngine/mongoengine#113) 2012-09-04 14:39:19 +01:00
Ross Lawley
53339c7c72 Version bump 2012-09-04 08:21:44 +01:00
Ross Lawley
3534bf7d70 Fixed index spec inheritance (MongoEngine/mongoengine#111) 2012-09-04 08:20:19 +01:00
Ross Lawley
1cf3989664 Updated setup.py 2012-09-04 08:10:56 +01:00
Ross Lawley
fd296918da Fixing readme for pypi 2012-09-03 15:51:36 +01:00
Ross Lawley
8ad1f03dc5 Updated travis 2012-09-03 15:23:30 +01:00
Ross Lawley
fe7e17dbd5 Updated 0.7 branch 2012-09-03 15:21:52 +01:00
Ross Lawley
d582394a42 Merge branch 'master' into 0.7 2012-09-03 15:20:17 +01:00
Ross Lawley
02ef0df019 Updated travis.yml 2012-09-03 13:29:29 +01:00
Ross Lawley
0dfd6aa518 Updated travis.yml - now testing multiple pymongos 2012-09-03 13:22:08 +01:00
Ross Lawley
0b23bc9cf2 python 2.6.4 and lower cannot handle unicode keys passed to __init__ (MongoEngine/mongoengine#101) 2012-09-03 13:10:06 +01:00
Ross Lawley
f108c4288e Updated queryset.delete so you can use with skip / limit (MongoEngine/mongoengine#107) 2012-09-03 12:53:50 +01:00
Ross Lawley
9b9696aefd Updated index creation allows kwargs to be passed through refs (MongoEngine/mongoengine#104) 2012-09-03 12:29:58 +01:00
Ross Lawley
576e198ece Fixed Q object merge edge case (MongoEngine/mongoengine#109) 2012-09-03 11:33:30 +01:00
Ross Lawley
52f85aab18 Merge branch 'master' of https://github.com/dimonb/mongoengine
Conflicts:
	mongoengine/base.py
2012-09-03 11:00:41 +01:00
Dmitry Balabanov
ab60fd0490 sharded collection document reload testcase 2012-08-30 14:37:11 +04:00
Dmitry Balabanov
d79ae30f31 fix object reload with shard_key in meta 2012-08-30 14:20:53 +04:00
Dmitry Balabanov
f27debe7f9 Respect sharding key when delete object from collection 2012-08-30 12:40:44 +04:00
Ross Lawley
735e043ff6 RC Version Bump 2012-08-24 16:36:40 +01:00
Ross Lawley
6e7f2b73cf Fix VERSION 2012-08-24 16:36:17 +01:00
Ross Lawley
d645ce9745 Updated version calculation 2012-08-24 15:08:37 +01:00
Ross Lawley
7c08c140da Updated upgrade documents 2012-08-24 14:18:45 +01:00
Ross Lawley
81d402dc17 Fixing tests 2012-08-24 13:58:38 +01:00
Ross Lawley
966fa12358 Updated docs for map_reduce MongoEngine/mongoengine#67 2012-08-24 13:47:00 +01:00
Ross Lawley
87792e1921 Test checking can save if not included
ref: MongoEngine/mongoengine#70
2012-08-24 11:45:04 +01:00
Ross Lawley
4c8296acc6 Added testcase for MongoEngine/mongoengine#77 2012-08-24 11:02:32 +01:00
Ross Lawley
9989da07ed Updated AUTHORS and changelog.rst
ref: MongoEngine/mongoenginei#62
2012-08-24 10:40:45 +01:00
Ross Lawley
1c5e6a3425 NotUniqueError gracefully replacing ambiguous OperationError when appropriate 2012-08-24 10:38:10 +01:00
Ross Lawley
eedf908770 Update name ref: MongoEngine/mongoengine#96 2012-08-24 09:33:56 +01:00
Ross Lawley
5c9ef41403 Updated AUTHORS (MongoEngine/mongoengine#85) 2012-08-24 09:32:15 +01:00
Ross Lawley
0bf2ad5b67 Use pk in ReferenceField fixes MongoEngine/mongoengine#85 2012-08-24 09:30:08 +01:00
Ross Lawley
a0e3f382cd Added testcase ref MongoEngine/mongoengine#92 2012-08-24 09:20:14 +01:00
Ross Lawley
f09c39b5d7 Updated AUTHORS and chaneglog.rst
refs MongoEngine/mongoengine#92
2012-08-24 09:10:13 +01:00
Ross Lawley
89c67bf259 Merge branch 'master' of https://github.com/nikitinsm/mongoengine 2012-08-24 09:08:35 +01:00
Ross Lawley
ea666d4607 Updated AUTHORS and changelog.rst
Refs: MongoEngine/mongoengine#88
2012-08-24 09:00:39 +01:00
Ross Lawley
b8af154439 Merge pull request #88 from akolechkin/master
Fix for UnboundLocalError in composite index with pk field
2012-08-24 00:58:09 -07:00
Ross Lawley
f594ece32a Fixed MRO issue in base.py MongoEngine/mongoengine#95 2012-08-24 08:54:54 +01:00
Ross Lawley
03beb6852a Merge branch 'master' of https://github.com/elasticsales/mongoengine 2012-08-24 08:53:26 +01:00
Anthony Nemitz
ab9e9a3329 adding comment to the MRO test case 2012-08-23 18:09:51 -07:00
Anthony Nemitz
a4b09344af test case for multiple inheritance raising MRO exception 2012-08-23 18:08:12 -07:00
Ross Lawley
8cb8aa392c Updated tests 2012-08-23 17:08:23 +01:00
Ross Lawley
3255519792 Updating test 2012-08-23 16:31:22 +01:00
Sergey
7e64bb2503 Update mongoengine/fields.py
mistake
2012-08-23 14:32:27 +04:00
Sergey
86a78402c3 Update mongoengine/fields.py 2012-08-23 14:29:29 +04:00
Ross Lawley
ba276452fb Fix tests 2012-08-23 11:09:07 +01:00
Ross Lawley
4ffa8d0124 Updated ReferenceField's to optionally store ObjectId strings.
This will become the default in 0.8 (MongoEngine/mongoengine#89)
2012-08-23 11:02:38 +01:00
Anton Kolechkin
4bc5082681 fix for UnboundLocalError when use the composite index with primary key field 2012-08-23 11:58:04 +07:00
Anton Kolechkin
0e3c34e1da test for composite index with pk, i used EmbeddedDocument because this is the only issue when it's needed 2012-08-23 11:57:22 +07:00
Ross Lawley
658b3784ae Split out warning tests as they are order dependent 2012-08-22 09:44:32 +01:00
Ross Lawley
0526f577ff Embedded Documents still can inherit fields MongoEngine/mongoengine#84 2012-08-22 09:27:18 +01:00
Ross Lawley
bb1b9bc1d3 Fixing api docs 2012-08-21 17:49:12 +01:00
Ross Lawley
b1eeb77ddc Added FutureWarning - save will default to cascade=False in 0.8 2012-08-21 17:45:51 +01:00
Ross Lawley
999d4a7676 Fixed broken test 2012-08-21 17:29:38 +01:00
Ross Lawley
1b80193aac Added example of indexing embedded fields
MongoEngine/mongoengine#75
2012-08-20 17:50:18 +01:00
Ross Lawley
be8d39a48c Updated changelog / AUTHORS (MongoEngine/mongoengine#80) 2012-08-20 17:35:19 +01:00
Ross Lawley
a2f3d70f28 Merge pull request #80 from biszkoptwielki/master
Image resize fails when force flag is set. Fix & unittest
2012-08-20 09:34:05 -07:00
mikolaj
676a7bf712 Image resize fails when Froce flag is set 2012-08-18 17:27:22 +01:00
Ross Lawley
e990a6c70c Improve unicode key handling for PY25 2012-08-17 16:13:45 +01:00
Ross Lawley
90fa0f6c4a Add flexibility for fields handling bad data (MongoEngine/mongoengine#78) 2012-08-17 16:02:33 +01:00
Ross Lawley
22010d7d95 Updated benchmark stats 2012-08-17 15:04:09 +01:00
Ross Lawley
66279bd90f Post refactor cleanups (ref: meta cleanups) 2012-08-17 11:58:57 +01:00
Ross Lawley
19da228855 Cleaned up the metaclasses for documents
Refactored and clarified intent and
tidied up
2012-08-17 11:53:46 +01:00
Ross Lawley
9e67941bad Use weakref proxies in base lists / dicts (MongoEngine/mongoengine#74) 2012-08-14 10:34:17 +01:00
Ross Lawley
0454fc74e9 Updated changelog 2012-08-14 10:32:26 +01:00
Ross Lawley
2f6b1c7611 Improved queryset filtering (hmarr/mongoengine#554) 2012-08-13 17:29:33 +01:00
Ross Lawley
f00bed6058 Updated test cases for dynamic docs 2012-08-13 16:53:36 +01:00
Ross Lawley
529c522594 Updated changelog - fixed dynamic document issue 2012-08-13 16:42:51 +01:00
Ross Lawley
2bb9493fcf Updated doc 2012-08-13 15:05:01 +01:00
Ross Lawley
839ed8a64a Added test for abstract shard key 2012-08-13 14:57:35 +01:00
Ross Lawley
017a31ffd0 Merge branch 'master' of github.com:MongoEngine/mongoengine 2012-08-07 13:24:27 +01:00
Ross Lawley
83b961c84d Merge pull request #64 from alonho/abstract_shard_key
propagate the shard_key from abstract base classes' meta
2012-08-07 05:24:45 -07:00
Ross Lawley
fa07423ca5 Removing python 2.4 code 2012-08-07 13:10:56 +01:00
Ross Lawley
dd4af2df81 Python 2.5 and 3.1 support confirmed 2012-08-07 13:07:51 +01:00
Ross Lawley
44bd8cb85b Merge branch 'master' of https://github.com/LaineHerron/mongoengine into 0.7 2012-08-07 10:45:01 +01:00
Ross Lawley
52d80ac23c Merge branch 'master' into 0.7
Conflicts:
	mongoengine/base.py
2012-08-07 10:41:49 +01:00
Alon Horev
43a5d73e14 propagate the shard_key from abstract base classes' meta 2012-08-07 09:12:39 +00:00
Ross Lawley
abc764951d Merge branch 'master' into dev
Conflicts:
	mongoengine/queryset.py
2012-08-07 10:07:54 +01:00
Ross Lawley
9cc6164026 Version bump 2012-08-07 10:05:01 +01:00
Ross Lawley
475488b9f2 Added support for distinct and db_alias (MongoEngine/mongoengine#59) 2012-08-07 10:04:05 +01:00
Ross Lawley
95b1783834 Updated changelog 2012-08-07 09:31:51 +01:00
Anthony Nemitz
12c8b5c0b9 Make chained querysets work if constraining the same fields.
Refs hmarr/mongoengine#554
2012-08-07 08:59:56 +01:00
Ross Lawley
f99b7a811b Fixed error in Binary Field 2012-08-07 08:53:58 +01:00
Laine
0575abab23 mend 2012-08-03 15:09:50 -07:00
Laine
9eebcf7beb Merge branch 'master' of https://github.com/LaineHerron/mongoengine
Conflicts:
	tests/test_document.py
2012-08-03 15:08:21 -07:00
Laine
ed74477150 made compatable with python 2.5 2012-08-03 15:04:59 -07:00
Ross Lawley
2801b38c75 Version Bump 2012-08-03 14:36:31 +01:00
Ross Lawley
dc3fea875e Merge branch 'master' of github.com:MongoEngine/mongoengine 2012-08-03 12:56:27 +01:00
Ross Lawley
aab8c2b687 Merge pull request #57 from filipd/patch-1
Added reference to the official repository in the README
2012-08-03 04:56:55 -07:00
Filip Dupanović
3577773af3 Added reference to the official repository 2012-08-03 14:55:18 +03:00
Laine
dd023edc0f made compatable with python 2.5 2012-08-02 15:30:21 -07:00
Ross Lawley
8ac9e6dc19 Updated the documents 2012-08-02 14:11:02 +01:00
Ross Lawley
f45d4d781d Updated test for py3.2 2012-08-02 09:26:33 +01:00
Ross Lawley
c95652d6a8 Updated AUTHORS 2012-08-02 09:23:15 +01:00
Ross Lawley
97b37f75d3 Travis - Not sure python 3.3 support is there yet 2012-08-02 09:22:12 +01:00
Ross Lawley
95dae48778 Fixing build 2012-08-02 09:20:41 +01:00
Ross Lawley
73635033bd Updated travis config 2012-08-02 09:10:42 +01:00
Ross Lawley
c1619d2a62 Fixed image_field attributes regression 2012-08-02 08:47:38 +01:00
Ross Lawley
b87ef982f6 Merge branch 'master' into dev 2012-08-02 08:33:17 +01:00
Laine
91aa90ad4a Added Python 3 support to MongoEngine 2012-08-01 17:21:48 -07:00
Ross Lawley
4b3cea9e78 Added Binary support to UUID (MongoEngine/mongoengine#47) 2012-08-01 16:03:33 +01:00
Ross Lawley
2420b5e937 Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46) 2012-08-01 15:14:56 +01:00
Ross Lawley
f23a976bea Added Tommi Komulainen to the contributors list
refs MongoEngine/mongoengine#48
2012-08-01 15:01:21 +01:00
Ross Lawley
4226cd08f1 Updated Changelog 2012-08-01 15:00:14 +01:00
Ross Lawley
7a230f1693 Merge branch 'master' of github.com:MongoEngine/mongoengine 2012-08-01 14:58:06 +01:00
Ross Lawley
a43d0d4612 Fixed BinaryField python value issue (MongoEngine/mongoengine#48) 2012-08-01 14:57:46 +01:00
Ross Lawley
78a40a0c70 Merge pull request #41 from wpjunior/patch-13
Small fix in SequenceField
2012-08-01 06:26:58 -07:00
Ross Lawley
2c69d8f0b0 Updated License 2012-08-01 13:54:24 +01:00
Ross Lawley
0018c38b83 Fixed queryset manager issue (MongoEngine/mongoengine#52) 2012-08-01 13:51:51 +01:00
Ross Lawley
8df81571fc Fixed FileField comparision
Refs hmarr/mongoengine#547
2012-08-01 13:28:28 +01:00
Ross Lawley
d1add62a06 More updates 2012-08-01 13:11:36 +01:00
Ross Lawley
c419f3379a Style changes 2012-07-30 15:43:53 +01:00
Ross Lawley
69d57209f7 Minor 2012-07-30 13:35:45 +01:00
Ross Lawley
7ca81d6fb8 Fixes 2012-07-30 13:00:42 +01:00
Ross Lawley
8a046bfa5d Merge branch 'master' into dev 2012-07-27 09:29:10 +01:00
Ross Lawley
3628a7653c Squashed commit of the following:
commit 48f988acd7
Merge: 6526923 1304f27
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Thu Jul 26 08:17:45 2012 -0700

    Merge pull request #44 from faulkner/fix-notes

    Proper syntax for RST notes (so they actually render).

commit 6526923345
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Thu Jul 26 16:00:32 2012 +0100

    Fixed recursion loading bug in _get_changed_fields

    fixes hmarr/mongoengine#548

commit 24fd1acce6
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Thu Jul 26 14:14:10 2012 +0100

    Version bump

commit cbb9235dc5
Merge: 6459d4c 19ec2c9
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Wed Jul 25 15:12:34 2012 +0100

    Merge branch 'master' of github.com:hmarr/mongoengine

commit 19ec2c9bc9
Merge: 3070e0b 601f0eb
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Wed Jul 25 07:12:07 2012 -0700

    Merge pull request #545 from maxcountryman/patch-1

    Correcting typo in DynamicField docstring

commit 6459d4c0b6
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Wed Jul 25 14:55:10 2012 +0100

    Fixed issue with custom queryset manager expecting explict variable names

    If using / expecting kwargs you have to call the queryset manager
    explicitly.

commit 1304f2721f
Author: Chris Faulkner <thefaulkner@gmail.com>
Date:   Tue Jul 24 14:06:43 2012 -0700

    Proper syntax for RST notes (so they actually render).

commit 598ffd3e5c
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Mon Jul 23 15:32:02 2012 +0100

    Fixed documentation

commit 601f0eb168
Author: Max Countryman <maxc@me.com>
Date:   Fri Jul 20 19:12:43 2012 -0700

    Correcting typo in DynamicField docstring
2012-07-26 22:50:39 +01:00
Ross Lawley
48f988acd7 Merge pull request #44 from faulkner/fix-notes
Proper syntax for RST notes (so they actually render).
2012-07-26 08:17:45 -07:00
Ross Lawley
6526923345 Fixed recursion loading bug in _get_changed_fields
fixes hmarr/mongoengine#548
2012-07-26 16:00:32 +01:00
Ross Lawley
24fd1acce6 Version bump 2012-07-26 14:14:10 +01:00
Ross Lawley
cbb9235dc5 Merge branch 'master' of github.com:hmarr/mongoengine 2012-07-25 15:12:34 +01:00
Ross Lawley
19ec2c9bc9 Merge pull request #545 from maxcountryman/patch-1
Correcting typo in DynamicField docstring
2012-07-25 07:12:07 -07:00
Ross Lawley
6459d4c0b6 Fixed issue with custom queryset manager expecting explict variable names
If using / expecting kwargs you have to call the queryset manager
explicitly.
2012-07-25 14:55:10 +01:00
Chris Faulkner
1304f2721f Proper syntax for RST notes (so they actually render). 2012-07-24 14:06:43 -07:00
Wilson Júnior
8bde0c0e53 Small fix in SequenceField 2012-07-23 12:31:47 -03:00
Ross Lawley
598ffd3e5c Fixed documentation 2012-07-23 15:32:02 +01:00
Ross Lawley
1a4533a9cf Minor perf update 2012-07-23 14:46:48 +01:00
Max Countryman
601f0eb168 Correcting typo in DynamicField docstring 2012-07-20 19:12:43 -07:00
Ross Lawley
3070e0bf5d Fix for inheritance bug and db_alias 2012-07-20 10:34:08 +01:00
Ross Lawley
83c11a9834 Version bump 2012-07-19 16:12:21 +01:00
Ross Lawley
5c912b930e Removed tests testing MongoDB not mongoengine 2012-07-19 16:03:29 +01:00
Ross Lawley
1b17fb0ae7 Updated validation error messages
refs hmarr/mongoengine#539
2012-07-19 15:04:12 +01:00
Ross Lawley
d83e67c121 Added support for null / zero / false values in item_frequencies
refs /MongoEngine/mongoengine#40
2012-07-19 12:08:07 +01:00
Ross Lawley
ae39ed94c9 Fixed cascade save edge case
refs MongoEngine/mongoengine#40
2012-07-19 11:52:26 +01:00
Ross Lawley
1e51180d42 Fixed geo index creation bug
fixes MongoEngine/mongoengine#36
2012-07-19 11:39:52 +01:00
Ross Lawley
87ba69d02e Updated changelog 2012-07-19 10:35:37 +01:00
Ross Lawley
8879d5560b Added support for args / kwargs and queryset_manager
Closes MongoEngine/mongoengine#37
2012-07-19 10:32:33 +01:00
Ross Lawley
c1621ee39c Merge pull request #39 from wpjunior/tests2
More one test
2012-07-18 06:10:26 -07:00
Ross Lawley
b0aa98edb4 Deref list custom id fix 2012-07-18 14:09:24 +01:00
Wilson Júnior
a7a2fe0216 added more tests 2012-07-18 06:37:23 -03:00
Ross Lawley
8e50f5fa3c Version bump 2012-07-11 16:59:24 +01:00
Ross Lawley
31793520bf Updated changelog / AUTHORS
refs hmarr/mongoengine#529
2012-07-11 16:38:15 +01:00
Ross Lawley
0b6b0368c5 Merge branch 'master' of https://github.com/elasticsales/mongoengine 2012-07-11 16:36:35 +01:00
Ross Lawley
d1d30a9280 Added test and updated changelog
refs hmarr/mongoengine#527
2012-07-11 16:34:28 +01:00
Ross Lawley
420c6f2d1e Merge branch 'patch-10' of https://github.com/wpjunior/mongoengine 2012-07-11 16:33:16 +01:00
Ross Lawley
34f06c4971 Updated changelog / AUTHORS
refs hmarr/mongoengine#524
2012-07-11 16:27:43 +01:00
Ross Lawley
9cc4bbd49d Merge branch 'patch-1' of https://github.com/daevaorn/mongoengine 2012-07-11 16:26:50 +01:00
Ross Lawley
f66b312869 Updated api docs
fixes hmarr/mongoengine#526
2012-07-11 16:25:40 +01:00
Ross Lawley
2405ba8708 Updated Changelog / AUTHORS
refs hmarr/mongoengine#531
2012-07-11 16:11:13 +01:00
Ross Lawley
a91b6bff8b Merge branch 'master' of https://github.com/agonzalezro/mongoengine 2012-07-11 16:09:33 +01:00
Ross Lawley
450dc11a68 Unicode fixes
refs hmarr/mongoengine#533 MongoEngine/mongoengine#32
2012-07-11 16:01:24 +01:00
Ross Lawley
1ce2f84ce5 Updated docs regarding fields
refs hmarr/mongoengine#535
2012-07-11 15:56:34 +01:00
Ross Lawley
f55b241cfa Trying to bump travis 2012-07-11 15:45:31 +01:00
Ross Lawley
34d08ce8ef Only dereference fields than need it
Fixes MongoEngine/mongoengine#31
2012-07-11 15:23:27 +01:00
Ross Lawley
4f5aa8c43b Fixed config / added test 2012-07-11 14:29:35 +01:00
Ross Lawley
27b375060d Updated changelog / AUTHORS
refs MongoEngine/mongoengine#32
2012-07-11 14:25:38 +01:00
Ross Lawley
cbfdc401f7 Merge pull request #32 from jaimeirurzun/master
Fix _transform_update to accept unicode fields

Thanks jaimeirurzun
2012-07-11 06:24:29 -07:00
Ross Lawley
b58bf3e0ce Added support for addToSet and each
fixes MongoEngine/mongoengine#33
2012-07-11 14:22:50 +01:00
Jaime Irurzun
1fff7e9aca Fix _transform_update to accept unicode fields 2012-07-10 10:40:14 +01:00
Álex González
494b981b13 Default value for direction 2012-07-02 10:05:25 +02:00
Álex González
dd93995bd0 Forced cast to list 2012-07-02 10:01:22 +02:00
Thomas Steinacher
b3bb4add9c Fix error dict with nested validation. 2012-06-27 13:46:06 -07:00
Wilson Júnior
d305e71c27 Fixes for __ne operator in IntField and FloatField 2012-06-25 15:53:42 -03:00
Alexander Koshelev
0d92baa670 Exclude tests from installation 2012-06-24 03:08:49 +04:00
Ross Lawley
7a1b110f62 Added Tristan Escalada to authors
refs #hmarr/mongoengine#520
2012-06-23 22:24:09 +01:00
Ross Lawley
db8df057ce Merge pull request #520 from tescalada/patch-1
documentation typo: inheritence
2012-06-23 14:23:07 -07:00
Ross Lawley
5d8ffded40 Fixed issue with embedded_docs and db_fields
Bumped version also
refs: hmarr/mongoengine#523
2012-06-23 22:19:02 +01:00
Ross Lawley
07f3e5356d Updated changelog / AUTHORS
refs: hmarr/mongoengine#522
2012-06-23 21:46:31 +01:00
Ross Lawley
1ece62f960 Merge branch 'unicode-fix' of https://github.com/aparajita/mongoengine 2012-06-23 21:43:09 +01:00
Ross Lawley
056c604dc3 Fixes __repr__ modifying the cursor
Fixes MongoEngine/mongoengine#30
2012-06-22 16:22:27 +01:00
Aparajita Fishman
2d08eec093 Fix conversion of StringField value to unicode, replace outdated (str, unicode) check with unicode 2012-06-21 18:57:14 -07:00
Tristan Escalada
614b590551 documentation typo: inheritence
inheritence corrected to inheritance
only in the documentation, not in the code
2012-06-19 17:08:28 -03:00
Ross Lawley
6d90ce250a Version bump 2012-06-19 17:01:28 +01:00
Ross Lawley
ea31846a19 Fixes scalar lookups for primary_key
fixes hmarr/mongoengine#519
2012-06-19 16:59:18 +01:00
Ross Lawley
e6317776c1 Fixes DBRef handling in _delta
refs: hmarr/mongoengine#518
2012-06-19 16:45:23 +01:00
36 changed files with 3792 additions and 1327 deletions

View File

@@ -1,12 +1,29 @@
# http://travis-ci.org/#!/MongoEngine/mongoengine
language: python
services: mongodb
python:
- 2.5
- 2.6
- 2.7
- 3.1
- 3.2
env:
- PYMONGO=dev
- PYMONGO=2.3
- PYMONGO=2.2
install:
- sudo apt-get install zlib1g zlib1g-dev
- sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/
- pip install PIL --use-mirrors ; true
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/; fi
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi
- if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
- if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi
- python setup.py install
script:
- python setup.py test
- python setup.py test
notifications:
irc: "irc.freenode.org#mongoengine"
branches:
only:
- master
- 0.8

24
AUTHORS
View File

@@ -8,6 +8,7 @@ Florian Schlachter <flori@n-schlachter.de>
Steve Challis <steve@stevechallis.com>
Wilson Júnior <wilsonpjunior@gmail.com>
Dan Crosta https://github.com/dcrosta
Laine Herron https://github.com/LaineHerron
CONTRIBUTORS
@@ -105,6 +106,25 @@ that much better:
* Adam Reeve
* Anthony Nemitz
* deignacio
* shaunduncan
* Shaun Duncan
* Meir Kriheli
* Andrey Fedoseev
* Andrey Fedoseev
* aparajita
* Tristan Escalada
* Alexander Koshelev
* Jaime Irurzun
* Alexandre González
* Thomas Steinacher
* Tommi Komulainen
* Peter Landry
* biszkoptwielki
* Anton Kolechkin
* Sergey Nikitin
* psychogenic
* Stefan Wójcik
* dimonb
* Garry Polley
* Adrian Scott
* Peter Teichman
* Jakub Kot
* Jorge Bastida

61
CONTRIBUTING.rst Normal file
View File

@@ -0,0 +1,61 @@
Contributing to MongoEngine
===========================
MongoEngine has a large `community
<https://raw.github.com/MongoEngine/mongoengine/master/AUTHORS>`_ and
contributions are always encouraged. Contributions can be as simple as
minor tweaks to the documentation. Please read these guidelines before
sending a pull request.
Bugfixes and New Features
-------------------------
Before starting to write code, look for existing `tickets
<https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one
<https://github.com/MongoEngine/mongoengine/issues>`_ for your specific
issue or feature request. That way you avoid working on something
that might not be of interest or that has already been addressed. If in doubt
post to the `user group <http://groups.google.com/group/mongoengine-users>`
Supported Interpreters
----------------------
PyMongo supports CPython 2.5 and newer. Language
features not supported by all interpreters can not be used.
Please also ensure that your code is properly converted by
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.
Style Guide
-----------
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
including 4 space indents and 79 character line limits.
Testing
-------
All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
and any pull requests are automatically tested by Travis. Any pull requests
without tests will take longer to be integrated and might be refused.
General Guidelines
------------------
- Avoid backward breaking changes if at all possible.
- Write inline documentation for new classes and methods.
- Write tests and make sure they pass (make sure you have a mongod
running on the default port, then execute ``python setup.py test``
from the cmd line to run the test suite).
- Add yourself to AUTHORS.rst :)
Documentation
-------------
To contribute to the `API documentation
<http://docs.mongoengine.org/en/latest/apireference.html>`_
just make your changes to the inline documentation of the appropriate
`source code <https://github.com/MongoEngine/mongoengine>`_ or `rst file
<https://github.com/MongoEngine/mongoengine/tree/master/docs>`_ in a
branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_.
You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_
button.

View File

@@ -1,5 +1,5 @@
Copyright (c) 2009-2010 Harry Marr
Copyright (c) 2009-2012 See AUTHORS
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
@@ -8,10 +8,10 @@ copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND

View File

@@ -2,6 +2,7 @@
MongoEngine
===========
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
:Repository: https://github.com/MongoEngine/mongoengine
:Author: Harry Marr (http://github.com/hmarr)
:Maintainer: Ross Lawley (http://github.com/rozza)
@@ -13,7 +14,7 @@ About
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
Documentation available at http://mongoengine-odm.rtfd.org - there is currently
a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide
<http://readthedocs.org/docs/mongoengine-odm/en/latest/userguide.html>`_ and an `API reference
<https://mongoengine-odm.readthedocs.org/en/latest/guide/index.html>`_ and an `API reference
<http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_.
Installation
@@ -62,11 +63,6 @@ Some simple examples of what MongoEngine code looks like::
... print 'Link:', post.url
... print
...
=== Using MongoEngine ===
See the tutorial
=== MongoEngine Docs ===
Link: hmarr.com/mongoengine
>>> len(BlogPost.objects)
2
@@ -84,7 +80,7 @@ Some simple examples of what MongoEngine code looks like::
Tests
=====
To run the test suite, ensure you are running a local instance of MongoDB on
the standard port, and run ``python setup.py test``.
the standard port, and run: ``python setup.py test``.
Community
=========
@@ -92,11 +88,8 @@ Community
<http://groups.google.com/group/mongoengine-users>`_
- `MongoEngine Developers mailing list
<http://groups.google.com/group/mongoengine-dev>`_
- `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_
- `#mongoengine IRC channel <http://webchat.freenode.net/?channels=mongoengine>`_
Contributing
============
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
contribute to the project, fork it on GitHub and send a pull request, all
contributions and suggestions are welcome!
We welcome contributions! see the`Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_

View File

@@ -28,47 +28,64 @@ def main():
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo
1.1141769886
3.86744189262
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine
2.37724113464
6.23374891281
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
1.92479610443
5.33027005196
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
pass - No Cascade
0.5.X
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo
1.10552310944
3.89597702026
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine
16.5169169903
21.7735359669
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
14.9446101189
19.8670389652
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
14.912801981
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, force=True
14.9617750645
pass - No Cascade
Performance
0.6.X
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo
1.10072994232
3.81559205055
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine
5.27341103554
10.0446798801
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
4.49365401268
9.51354718208
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
4.43459296227
9.02567505836
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, force=True
4.40114378929
8.44933390617
0.7.X
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo
3.78801012039
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine
9.73050498962
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
8.33456707001
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
8.37778115273
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, force=True
8.36906409264
"""
setup = """

View File

@@ -47,25 +47,28 @@ Querying
Fields
======
.. autoclass:: mongoengine.StringField
.. autoclass:: mongoengine.URLField
.. autoclass:: mongoengine.EmailField
.. autoclass:: mongoengine.IntField
.. autoclass:: mongoengine.FloatField
.. autoclass:: mongoengine.DecimalField
.. autoclass:: mongoengine.DateTimeField
.. autoclass:: mongoengine.BinaryField
.. autoclass:: mongoengine.BooleanField
.. autoclass:: mongoengine.ComplexDateTimeField
.. autoclass:: mongoengine.ListField
.. autoclass:: mongoengine.SortedListField
.. autoclass:: mongoengine.DateTimeField
.. autoclass:: mongoengine.DecimalField
.. autoclass:: mongoengine.DictField
.. autoclass:: mongoengine.DynamicField
.. autoclass:: mongoengine.EmailField
.. autoclass:: mongoengine.EmbeddedDocumentField
.. autoclass:: mongoengine.FileField
.. autoclass:: mongoengine.FloatField
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
.. autoclass:: mongoengine.GenericReferenceField
.. autoclass:: mongoengine.GeoPointField
.. autoclass:: mongoengine.ImageField
.. autoclass:: mongoengine.IntField
.. autoclass:: mongoengine.ListField
.. autoclass:: mongoengine.MapField
.. autoclass:: mongoengine.ObjectIdField
.. autoclass:: mongoengine.ReferenceField
.. autoclass:: mongoengine.GenericReferenceField
.. autoclass:: mongoengine.EmbeddedDocumentField
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
.. autoclass:: mongoengine.BooleanField
.. autoclass:: mongoengine.FileField
.. autoclass:: mongoengine.BinaryField
.. autoclass:: mongoengine.GeoPointField
.. autoclass:: mongoengine.SequenceField
.. autoclass:: mongoengine.SortedListField
.. autoclass:: mongoengine.StringField
.. autoclass:: mongoengine.URLField
.. autoclass:: mongoengine.UUIDField

View File

@@ -2,6 +2,134 @@
Changelog
=========
Changes in 0.7.9
================
- Better fix handling for old style _types
- Embedded SequenceFields follow collection naming convention
Changes in 0.7.8
================
- Fix sequence fields in embedded documents (MongoEngine/mongoengine#166)
- Fix query chaining with .order_by() (MongoEngine/mongoengine#176)
- Added optional encoding and collection config for Django sessions (MongoEngine/mongoengine#180, MongoEngine/mongoengine#181, MongoEngine/mongoengine#183)
- Fixed EmailField so can add extra validation (MongoEngine/mongoengine#173, MongoEngine/mongoengine#174, MongoEngine/mongoengine#187)
- Fixed bulk inserts can now handle custom pk's (MongoEngine/mongoengine#192)
- Added as_pymongo method to return raw or cast results from pymongo (MongoEngine/mongoengine#193)
Changes in 0.7.7
================
- Fix handling for old style _types
Changes in 0.7.6
================
- Unicode fix for repr (MongoEngine/mongoengine#133)
- Allow updates with match operators (MongoEngine/mongoengine#144)
- Updated URLField - now can have a override the regex (MongoEngine/mongoengine#136)
- Allow Django AuthenticationBackends to work with Django user (hmarr/mongoengine#573)
- Fixed reload issue with ReferenceField where dbref=False (MongoEngine/mongoengine#138)
Changes in 0.7.5
================
- ReferenceFields with dbref=False use ObjectId instead of strings (MongoEngine/mongoengine#134)
See ticket for upgrade notes (https://github.com/MongoEngine/mongoengine/issues/134)
Changes in 0.7.4
================
- Fixed index inheritance issues - firmed up testcases (MongoEngine/mongoengine#123) (MongoEngine/mongoengine#125)
Changes in 0.7.3
================
- Reverted EmbeddedDocuments meta handling - now can turn off inheritance (MongoEngine/mongoengine#119)
Changes in 0.7.2
================
- Update index spec generation so its not destructive (MongoEngine/mongoengine#113)
Changes in 0.7.1
=================
- Fixed index spec inheritance (MongoEngine/mongoengine#111)
Changes in 0.7.0
=================
- Updated queryset.delete so you can use with skip / limit (MongoEngine/mongoengine#107)
- Updated index creation allows kwargs to be passed through refs (MongoEngine/mongoengine#104)
- Fixed Q object merge edge case (MongoEngine/mongoengine#109)
- Fixed reloading on sharded documents (hmarr/mongoengine#569)
- Added NotUniqueError for duplicate keys (MongoEngine/mongoengine#62)
- Added custom collection / sequence naming for SequenceFields (MongoEngine/mongoengine#92)
- Fixed UnboundLocalError in composite index with pk field (MongoEngine/mongoengine#88)
- Updated ReferenceField's to optionally store ObjectId strings
this will become the default in 0.8 (MongoEngine/mongoengine#89)
- Added FutureWarning - save will default to `cascade=False` in 0.8
- Added example of indexing embedded document fields (MongoEngine/mongoengine#75)
- Fixed ImageField resizing when forcing size (MongoEngine/mongoengine#80)
- Add flexibility for fields handling bad data (MongoEngine/mongoengine#78)
- Embedded Documents no longer handle meta definitions
- Use weakref proxies in base lists / dicts (MongoEngine/mongoengine#74)
- Improved queryset filtering (hmarr/mongoengine#554)
- Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561)
- Fixed abstract classes and shard keys (MongoEngine/mongoengine#64)
- Fixed Python 2.5 support
- Added Python 3 support (thanks to Laine Heron)
Changes in 0.6.20
=================
- Added support for distinct and db_alias (MongoEngine/mongoengine#59)
- Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554)
- Fixed BinaryField lookup re (MongoEngine/mongoengine#48)
Changes in 0.6.19
=================
- Added Binary support to UUID (MongoEngine/mongoengine#47)
- Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46)
- Fixed BinaryField python value issue (MongoEngine/mongoengine#48)
- Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41)
- Fixed queryset manager issue (MongoEngine/mongoengine#52)
- Fixed FileField comparision (hmarr/mongoengine#547)
Changes in 0.6.18
=================
- Fixed recursion loading bug in _get_changed_fields
Changes in 0.6.17
=================
- Fixed issue with custom queryset manager expecting explict variable names
Changes in 0.6.16
=================
- Fixed issue where db_alias wasn't inherited
Changes in 0.6.15
=================
- Updated validation error messages
- Added support for null / zero / false values in item_frequencies
- Fixed cascade save edge case
- Fixed geo index creation through reference fields
- Added support for args / kwargs when using @queryset_manager
- Deref list custom id fix
Changes in 0.6.14
=================
- Fixed error dict with nested validation
- Fixed Int/Float fields and not equals None
- Exclude tests from installation
- Allow tuples for index meta
- Fixed use of str in instance checks
- Fixed unicode support in transform update
- Added support for add_to_set and each
Changes in 0.6.13
=================
- Fixed EmbeddedDocument db_field validation issue
- Fixed StringField unicode issue
- Fixes __repr__ modifying the cursor
Changes in 0.6.12
=================
- Fixes scalar lookups for primary_key
- Fixes error with _delta handling DBRefs
Changes in 0.6.11
==================
- Fixed inconsistency handling None values field attrs
@@ -28,7 +156,7 @@ Changes in 0.6.8
================
- Fixed FileField losing reference when no default set
- Removed possible race condition from FileField (grid_file)
- Added assignment to save, can now do: b = MyDoc(**kwargs).save()
- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()`
- Added support for pull operations on nested EmbeddedDocuments
- Added support for choices with GenericReferenceFields
- Added support for choices with GenericEmbeddedDocumentFields

View File

@@ -62,28 +62,31 @@ not provided. Default values may optionally be a callable, which will be called
to retrieve the value (such as in the above example). The field types available
are as follows:
* :class:`~mongoengine.StringField`
* :class:`~mongoengine.URLField`
* :class:`~mongoengine.EmailField`
* :class:`~mongoengine.IntField`
* :class:`~mongoengine.FloatField`
* :class:`~mongoengine.DecimalField`
* :class:`~mongoengine.DateTimeField`
* :class:`~mongoengine.BinaryField`
* :class:`~mongoengine.BooleanField`
* :class:`~mongoengine.ComplexDateTimeField`
* :class:`~mongoengine.ListField`
* :class:`~mongoengine.SortedListField`
* :class:`~mongoengine.DateTimeField`
* :class:`~mongoengine.DecimalField`
* :class:`~mongoengine.DictField`
* :class:`~mongoengine.DynamicField`
* :class:`~mongoengine.EmailField`
* :class:`~mongoengine.EmbeddedDocumentField`
* :class:`~mongoengine.FileField`
* :class:`~mongoengine.FloatField`
* :class:`~mongoengine.GenericEmbeddedDocumentField`
* :class:`~mongoengine.GenericReferenceField`
* :class:`~mongoengine.GeoPointField`
* :class:`~mongoengine.ImageField`
* :class:`~mongoengine.IntField`
* :class:`~mongoengine.ListField`
* :class:`~mongoengine.MapField`
* :class:`~mongoengine.ObjectIdField`
* :class:`~mongoengine.ReferenceField`
* :class:`~mongoengine.GenericReferenceField`
* :class:`~mongoengine.EmbeddedDocumentField`
* :class:`~mongoengine.GenericEmbeddedDocumentField`
* :class:`~mongoengine.BooleanField`
* :class:`~mongoengine.FileField`
* :class:`~mongoengine.BinaryField`
* :class:`~mongoengine.GeoPointField`
* :class:`~mongoengine.SequenceField`
* :class:`~mongoengine.SortedListField`
* :class:`~mongoengine.StringField`
* :class:`~mongoengine.URLField`
* :class:`~mongoengine.UUIDField`
Field arguments
---------------
@@ -256,6 +259,35 @@ as the constructor's argument::
content = StringField()
.. _one-to-many-with-listfields:
One to Many with ListFields
'''''''''''''''''''''''''''
If you are implementing a one to many relationship via a list of references,
then the references are stored as DBRefs and to query you need to pass an
instance of the object to the query::
class User(Document):
name = StringField()
class Page(Document):
content = StringField()
authors = ListField(ReferenceField(User))
bob = User(name="Bob Jones").save()
john = User(name="John Smith").save()
Page(content="Test Page", authors=[bob, john]).save()
Page(content="Another Page", authors=[john]).save()
# Find all pages Bob authored
Page.objects(authors__in=[bob])
# Find all pages that both Bob and John have authored
Page.objects(authors__all=[bob, john])
Dealing with deletion of referred documents
'''''''''''''''''''''''''''''''''''''''''''
By default, MongoDB doesn't check the integrity of your data, so deleting
@@ -312,6 +344,10 @@ Its value can take any of the following constants:
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
.. warning::
Signals are not triggered when doing cascading updates / deletes - if this
is required you must manually handle the update / delete.
Generic reference fields
''''''''''''''''''''''''
A second kind of reference field also exists,
@@ -433,13 +469,18 @@ If a dictionary is passed then the following options are available:
Whether the index should be sparse.
:attr:`unique` (Default: False)
Whether the index should be sparse.
Whether the index should be unique.
.. note ::
To index embedded files / dictionary fields use 'dot' notation eg:
`rank.title`
.. warning::
Inheritance adds extra indices.
If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`.
Inheritance adds extra indices.
If don't need inheritance for a document turn inheritance off -
see :ref:`document-inheritance`.
Geospatial indexes

View File

@@ -232,7 +232,7 @@ custom manager methods as you like::
BlogPost(title='test1', published=False).save()
BlogPost(title='test2', published=True).save()
assert len(BlogPost.objects) == 2
assert len(BlogPost.live_posts) == 1
assert len(BlogPost.live_posts()) == 1
Custom QuerySets
================
@@ -243,11 +243,16 @@ a document, set ``queryset_class`` to the custom class in a
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
class AwesomerQuerySet(QuerySet):
pass
def get_awesome(self):
return self.filter(awesome=True)
class Page(Document):
meta = {'queryset_class': AwesomerQuerySet}
# To call:
Page.objects.get_awesome()
.. versionadded:: 0.4
Aggregation

View File

@@ -50,4 +50,11 @@ Example usage::
signals.post_save.connect(Author.post_save, sender=Author)
ReferenceFields and signals
---------------------------
Currently `reverse_delete_rules` do not trigger signals on the other part of
the relationship. If this is required you must manually handled the
reverse deletion.
.. _blinker: http://pypi.python.org/pypi/blinker

View File

@@ -34,10 +34,10 @@ To get help with using MongoEngine, use the `MongoEngine Users mailing list
Contributing
------------
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ and
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ and
contributions are always encouraged. Contributions can be as simple as
minor tweaks to this documentation. To contribute, fork the project on
`GitHub <http://github.com/hmarr/mongoengine>`_ and send a
`GitHub <http://github.com/MongoEngine/mongoengine>`_ and send a
pull request.
Also, you can join the developers' `mailing list

View File

@@ -2,18 +2,86 @@
Upgrading
=========
0.6 to 0.7
==========
Cascade saves
-------------
Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set
to True. This is because in 0.8 it will default to False. If you require
cascading saves then either set it in the `meta` or pass
via `save` eg ::
# At the class level:
class Person(Document):
meta = {'cascade': True}
# Or in code:
my_document.save(cascade=True)
.. note ::
Remember: cascading saves **do not** cascade through lists.
ReferenceFields
---------------
ReferenceFields now can store references as ObjectId strings instead of DBRefs.
This will become the default in 0.8 and if `dbref` is not set a `FutureWarning`
will be raised.
To explicitly continue to use DBRefs change the `dbref` flag
to True ::
class Person(Document):
groups = ListField(ReferenceField(Group, dbref=True))
To migrate to using strings instead of DBRefs you will have to manually
migrate ::
# Step 1 - Migrate the model definition
class Group(Document):
author = ReferenceField(User, dbref=False)
members = ListField(ReferenceField(User, dbref=False))
# Step 2 - Migrate the data
for g in Group.objects():
g.author = g.author
g.members = g.members
g.save()
item_frequencies
----------------
In the 0.6 series we added support for null / zero / false values in
item_frequencies. A side effect was to return keys in the value they are
stored in rather than as string representations. Your code may need to be
updated to handle native types rather than strings keys for the results of
item frequency queries.
BinaryFields
------------
Binary fields have been updated so that they are native binary types. If you
previously were doing `str` comparisons with binary field values you will have
to update and wrap the value in a `str`.
0.5 to 0.6
==========
Embedded Documents - if you had a `pk` field you will have to rename it from `_id`
to `pk` as pk is no longer a property of Embedded Documents.
Embedded Documents - if you had a `pk` field you will have to rename it from
`_id` to `pk` as pk is no longer a property of Embedded Documents.
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
an InvalidDocument error as they aren't currently supported.
Document._get_subclasses - Is no longer used and the class method has been removed.
Document._get_subclasses - Is no longer used and the class method has been
removed.
Document.objects.with_id - now raises an InvalidQueryError if used with a filter.
Document.objects.with_id - now raises an InvalidQueryError if used with a
filter.
FutureWarning - A future warning has been added to all inherited classes that
don't define `allow_inheritance` in their meta.
@@ -37,11 +105,11 @@ human-readable name for the option.
PyMongo / MongoDB
-----------------
map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output
parameters, have been depreciated.
map reduce now requires pymongo 1.11+- The pymongo `merge_output` and
`reduce_output` parameters, have been depreciated.
More methods now use map_reduce as db.eval is not supported for sharding as such
the following have been changed:
More methods now use map_reduce as db.eval is not supported for sharding as
such the following have been changed:
* :meth:`~mongoengine.queryset.QuerySet.sum`
* :meth:`~mongoengine.queryset.QuerySet.average`
@@ -51,8 +119,8 @@ the following have been changed:
Default collection naming
-------------------------
Previously it was just lowercase, its now much more pythonic and readable as its
lowercase and underscores, previously ::
Previously it was just lowercase, its now much more pythonic and readable as
its lowercase and underscores, previously ::
class MyAceDocument(Document):
pass
@@ -88,7 +156,8 @@ Alternatively, you can rename your collections eg ::
failure = False
collection_names = [d._get_collection_name() for d in _document_registry.values()]
collection_names = [d._get_collection_name()
for d in _document_registry.values()]
for new_style_name in collection_names:
if not new_style_name: # embedded documents don't have collections
@@ -106,7 +175,8 @@ Alternatively, you can rename your collections eg ::
old_style_name, new_style_name)
else:
db[old_style_name].rename(new_style_name)
print "Renamed: %s to %s" % (old_style_name, new_style_name)
print "Renamed: %s to %s" % (old_style_name,
new_style_name)
if failure:
print "Upgrading collection names failed"

View File

@@ -12,13 +12,12 @@ from signals import *
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
queryset.__all__ + signals.__all__)
VERSION = (0, 6, 11)
VERSION = (0, 7, 9)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
if isinstance(VERSION[-1], basestring):
return '.'.join(map(str, VERSION[:-1])) + VERSION[-1]
return '.'.join(map(str, VERSION))
__version__ = get_version()

File diff suppressed because it is too large Load Diff

View File

@@ -31,15 +31,34 @@ class DeReference(object):
items = [i for i in items]
self.max_depth = max_depth
doc_type = None
if instance and instance._fields:
doc_type = instance._fields[name].field
doc_type = instance._fields.get(name)
if hasattr(doc_type, 'field'):
doc_type = doc_type.field
if isinstance(doc_type, ReferenceField):
field = doc_type
doc_type = doc_type.document_type
if all([i.__class__ == doc_type for i in items]):
is_list = not hasattr(items, 'items')
if is_list and all([i.__class__ == doc_type for i in items]):
return items
elif not is_list and all([i.__class__ == doc_type
for i in items.values()]):
return items
elif not field.dbref:
if not hasattr(items, 'items'):
items = [field.to_python(v)
if not isinstance(v, (DBRef, Document)) else v
for v in items]
else:
items = dict([
(k, field.to_python(v))
if not isinstance(v, (DBRef, Document)) else (k, v)
for k, v in items.iteritems()]
)
self.reference_map = self._find_references(items)
self.object_map = self._fetch_objects(doc_type=doc_type)
@@ -115,7 +134,7 @@ class DeReference(object):
elif doc_type is None:
doc = get_document(
''.join(x.capitalize()
for x in col.split('_')))._from_son(ref)
for x in col.split('_')))._from_son(ref)
else:
doc = doc_type._from_son(ref)
object_map[doc.id] = doc
@@ -147,7 +166,7 @@ class DeReference(object):
return self.object_map.get(items['_ref'].id, items)
elif '_types' in items and '_cls' in items:
doc = get_document(items['_cls'])._from_son(items)
doc._data = self._attach_objects(doc._data, depth, doc, name)
doc._data = self._attach_objects(doc._data, depth, doc, None)
return doc
if not hasattr(items, 'items'):
@@ -166,7 +185,7 @@ class DeReference(object):
else:
data[k] = v
if k in self.object_map:
if k in self.object_map and not is_list:
data[k] = self.object_map[k]
elif hasattr(v, '_fields'):
for field_name, field in v._fields.iteritems():

View File

@@ -3,6 +3,8 @@ import datetime
from mongoengine import *
from django.utils.encoding import smart_str
from django.contrib.auth.models import _user_get_all_permissions
from django.contrib.auth.models import _user_has_perm
from django.contrib.auth.models import AnonymousUser
from django.utils.translation import ugettext_lazy as _
@@ -104,6 +106,25 @@ class User(Document):
"""
return check_password(raw_password, self.password)
def get_all_permissions(self, obj=None):
return _user_get_all_permissions(self, obj)
def has_perm(self, perm, obj=None):
"""
Returns True if the user has the specified permission. This method
queries all available auth backends, but returns immediately if any
backend returns True. Thus, a user who has permission from a single
auth backend is assumed to have permission in general. If an object is
provided, permissions for this specific object are checked.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
# Otherwise we need to check the backends.
return _user_has_perm(self, perm, obj)
@classmethod
def create_user(cls, username, password, email=None):
"""Create (and save) a new user with the given username, password and

View File

@@ -15,13 +15,23 @@ MONGOENGINE_SESSION_DB_ALIAS = getattr(
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
DEFAULT_CONNECTION_NAME)
# a setting for the name of the collection used to store sessions
MONGOENGINE_SESSION_COLLECTION = getattr(
settings, 'MONGOENGINE_SESSION_COLLECTION',
'django_session')
# a setting for whether session data is stored encoded or not
MONGOENGINE_SESSION_DATA_ENCODE = getattr(
settings, 'MONGOENGINE_SESSION_DATA_ENCODE',
True)
class MongoSession(Document):
session_key = fields.StringField(primary_key=True, max_length=40)
session_data = fields.StringField()
session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \
else fields.DictField()
expire_date = fields.DateTimeField()
meta = {'collection': 'django_session',
meta = {'collection': MONGOENGINE_SESSION_COLLECTION,
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
'allow_inheritance': False}
@@ -34,7 +44,10 @@ class SessionStore(SessionBase):
try:
s = MongoSession.objects(session_key=self.session_key,
expire_date__gt=datetime.now())[0]
return self.decode(force_unicode(s.session_data))
if MONGOENGINE_SESSION_DATA_ENCODE:
return self.decode(force_unicode(s.session_data))
else:
return s.session_data
except (IndexError, SuspiciousOperation):
self.create()
return {}
@@ -57,7 +70,10 @@ class SessionStore(SessionBase):
if self.session_key is None:
self._session_key = self._get_new_session_key()
s = MongoSession(session_key=self.session_key)
s.session_data = self.encode(self._get_session(no_load=must_create))
if MONGOENGINE_SESSION_DATA_ENCODE:
s.session_data = self.encode(self._get_session(no_load=must_create))
else:
s.session_data = self._get_session(no_load=must_create)
s.expire_date = self.get_expiry_date()
try:
s.save(force_insert=must_create, safe=True)

View File

@@ -1,4 +1,3 @@
from django.http import Http404
from mongoengine.queryset import QuerySet
from mongoengine.base import BaseDocument
from mongoengine.base import ValidationError
@@ -27,6 +26,7 @@ def get_document_or_404(cls, *args, **kwargs):
try:
return queryset.get(*args, **kwargs)
except (queryset._document.DoesNotExist, ValidationError):
from django.http import Http404
raise Http404('No %s matches the given query.' % queryset._document._class_name)
def get_list_or_404(cls, *args, **kwargs):
@@ -42,5 +42,6 @@ def get_list_or_404(cls, *args, **kwargs):
queryset = _get_queryset(cls)
obj_list = list(queryset.filter(*args, **kwargs))
if not obj_list:
from django.http import Http404
raise Http404('No %s matches the given query.' % queryset._document._class_name)
return obj_list

View File

@@ -1,10 +1,28 @@
#coding: utf-8
from django.test import TestCase
from django.conf import settings
from nose.plugins.skip import SkipTest
from mongoengine.python_support import PY3
from mongoengine import connect
try:
from django.test import TestCase
from django.conf import settings
except Exception as err:
if PY3:
from unittest import TestCase
# Dummy value so no error
class settings:
MONGO_DATABASE_NAME = 'dummy'
else:
raise err
class MongoTestCase(TestCase):
def setUp(self):
if PY3:
raise SkipTest('django does not have Python 3 support')
"""
TestCase class that clear the collection between the tests
"""

View File

@@ -1,15 +1,19 @@
import warnings
import pymongo
import re
from bson.dbref import DBRef
from mongoengine import signals, queryset
from mongoengine import signals
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
BaseDict, BaseList)
from queryset import OperationError
from queryset import OperationError, NotUniqueError
from connection import get_db, DEFAULT_CONNECTION_NAME
__all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument',
'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError']
'DynamicEmbeddedDocument', 'OperationError',
'InvalidCollectionError', 'NotUniqueError']
class InvalidCollectionError(Exception):
@@ -21,8 +25,19 @@ class EmbeddedDocument(BaseDocument):
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
fields on :class:`~mongoengine.Document`\ s through the
:class:`~mongoengine.EmbeddedDocumentField` field type.
A :class:`~mongoengine.EmbeddedDocument` subclass may be itself subclassed,
to create a specialised version of the embedded document that will be
stored in the same collection. To facilitate this behaviour, `_cls` and
`_types` fields are added to documents (hidden though the MongoEngine
interface though). To disable this behaviour and remove the dependence on
the presence of `_cls` and `_types`, set :attr:`allow_inheritance` to
``False`` in the :attr:`meta` dictionary.
"""
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
my_metaclass = DocumentMetaclass
__metaclass__ = DocumentMetaclass
def __init__(self, *args, **kwargs):
@@ -87,13 +102,16 @@ class Document(BaseDocument):
system.
By default, _types will be added to the start of every index (that
doesn't contain a list) if allow_inheritence is True. This can be
doesn't contain a list) if allow_inheritance is True. This can be
disabled by either setting types to False on the specific index or
by setting index_types to False on the meta dictionary for the document.
"""
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
my_metaclass = TopLevelDocumentMetaclass
__metaclass__ = TopLevelDocumentMetaclass
@apply
def pk():
"""Primary key alias
"""
@@ -102,6 +120,7 @@ class Document(BaseDocument):
def fset(self, value):
return setattr(self, self._meta['id_field'], value)
return property(fget, fset)
pk = pk()
@classmethod
def _get_db(cls):
@@ -127,8 +146,9 @@ class Document(BaseDocument):
options = cls._collection.options()
if options.get('max') != max_documents or \
options.get('size') != max_size:
msg = ('Cannot create collection "%s" as a capped '
'collection as it already exists') % cls._collection
msg = (('Cannot create collection "%s" as a capped '
'collection as it already exists')
% cls._collection)
raise InvalidCollectionError(msg)
else:
# Create the collection as a capped collection
@@ -142,8 +162,9 @@ class Document(BaseDocument):
cls._collection = db[collection_name]
return cls._collection
def save(self, safe=True, force_insert=False, validate=True, write_options=None,
cascade=None, cascade_kwargs=None, _refs=None):
def save(self, safe=True, force_insert=False, validate=True,
write_options=None, cascade=None, cascade_kwargs=None,
_refs=None):
"""Save the :class:`~mongoengine.Document` to the database. If the
document already exists, it will be updated, otherwise it will be
created.
@@ -156,27 +177,30 @@ class Document(BaseDocument):
updates of existing documents
:param validate: validates the document; set to ``False`` to skip.
:param write_options: Extra keyword arguments are passed down to
:meth:`~pymongo.collection.Collection.save` OR
:meth:`~pymongo.collection.Collection.insert`
which will be used as options for the resultant ``getLastError`` command.
For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will
wait until at least two servers have recorded the write and will force an
fsync on each server being written to.
:param cascade: Sets the flag for cascading saves. You can set a default by setting
"cascade" in the document __meta__
:param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves
:meth:`~pymongo.collection.Collection.save` OR
:meth:`~pymongo.collection.Collection.insert`
which will be used as options for the resultant
``getLastError`` command. For example,
``save(..., write_options={w: 2, fsync: True}, ...)`` will
wait until at least two servers have recorded the write and
will force an fsync on the primary server.
:param cascade: Sets the flag for cascading saves. You can set a
default by setting "cascade" in the document __meta__
:param cascade_kwargs: optional kwargs dictionary to be passed throw
to cascading saves
:param _refs: A list of processed references used in cascading saves
.. versionchanged:: 0.5
In existing documents it only saves changed fields using set / unset
Saves are cascaded and any :class:`~bson.dbref.DBRef` objects
that have changes are saved as well.
In existing documents it only saves changed fields using
set / unset. Saves are cascaded and any
:class:`~bson.dbref.DBRef` objects that have changes are
saved as well.
.. versionchanged:: 0.6
Cascade saves are optional = defaults to True, if you want fine grain
control then you can turn off using document meta['cascade'] = False
Also you can pass different kwargs to the cascade save using cascade_kwargs
which overwrites the existing kwargs with custom values
Cascade saves are optional = defaults to True, if you want
fine grain control then you can turn off using document
meta['cascade'] = False Also you can pass different kwargs to
the cascade save using cascade_kwargs which overwrites the
existing kwargs with custom values
"""
signals.pre_save.send(self.__class__, document=self)
@@ -194,13 +218,14 @@ class Document(BaseDocument):
collection = self.__class__.objects._collection
if created:
if force_insert:
object_id = collection.insert(doc, safe=safe, **write_options)
object_id = collection.insert(doc, safe=safe,
**write_options)
else:
object_id = collection.save(doc, safe=safe, **write_options)
object_id = collection.save(doc, safe=safe,
**write_options)
else:
object_id = doc['_id']
updates, removals = self._delta()
# Need to add shard key to query, or you get an error
select_dict = {'_id': object_id}
shard_key = self.__class__._meta.get('shard_key', tuple())
@@ -210,11 +235,15 @@ class Document(BaseDocument):
upsert = self._created
if updates:
collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options)
collection.update(select_dict, {"$set": updates},
upsert=upsert, safe=safe, **write_options)
if removals:
collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options)
collection.update(select_dict, {"$unset": removals},
upsert=upsert, safe=safe, **write_options)
cascade = self._meta.get('cascade', True) if cascade is None else cascade
warn_cascade = not cascade and 'cascade' not in self._meta
cascade = (self._meta.get('cascade', True)
if cascade is None else cascade)
if cascade:
kwargs = {
"safe": safe,
@@ -226,39 +255,64 @@ class Document(BaseDocument):
if cascade_kwargs: # Allow granular control over cascades
kwargs.update(cascade_kwargs)
kwargs['_refs'] = _refs
self._changed_fields = []
self.cascade_save(**kwargs)
self.cascade_save(warn_cascade=warn_cascade, **kwargs)
except pymongo.errors.OperationFailure, err:
message = 'Could not save document (%s)'
if u'duplicate key' in unicode(err):
if re.match('^E1100[01] duplicate key', unicode(err)):
# E11000 - duplicate key error index
# E11001 - duplicate key on update
message = u'Tried to save duplicate unique keys (%s)'
raise NotUniqueError(message % unicode(err))
raise OperationError(message % unicode(err))
id_field = self._meta['id_field']
self[id_field] = self._fields[id_field].to_python(object_id)
if id_field not in self._meta.get('shard_key', []):
self[id_field] = self._fields[id_field].to_python(object_id)
self._changed_fields = []
self._created = False
signals.post_save.send(self.__class__, document=self, created=created)
return self
def cascade_save(self, *args, **kwargs):
"""Recursively saves any references / generic references on an object"""
from fields import ReferenceField, GenericReferenceField
def cascade_save(self, warn_cascade=None, *args, **kwargs):
"""Recursively saves any references /
generic references on an objects"""
import fields
_refs = kwargs.get('_refs', []) or []
for name, cls in self._fields.items():
if not isinstance(cls, (ReferenceField, GenericReferenceField)):
if not isinstance(cls, (fields.ReferenceField,
fields.GenericReferenceField)):
continue
ref = getattr(self, name)
if not ref:
if not ref or isinstance(ref, DBRef):
continue
if not getattr(ref, '_changed_fields', True):
continue
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
if ref and ref_id not in _refs:
if warn_cascade:
msg = ("Cascading saves will default to off in 0.8, "
"please explicitly set `.save(cascade=True)`")
warnings.warn(msg, FutureWarning)
_refs.append(ref_id)
kwargs["_refs"] = _refs
ref.save(**kwargs)
ref._changed_fields = []
@property
def _object_key(self):
"""Dict to identify object in collection
"""
select_dict = {'pk': self.pk}
shard_key = self.__class__._meta.get('shard_key', tuple())
for k in shard_key:
select_dict[k] = getattr(self, k)
return select_dict
def update(self, **kwargs):
"""Performs an update on the :class:`~mongoengine.Document`
A convenience wrapper to :meth:`~mongoengine.QuerySet.update`.
@@ -270,11 +324,7 @@ class Document(BaseDocument):
raise OperationError('attempt to update a document not yet saved')
# Need to add shard key to query, or you get an error
select_dict = {'pk': self.pk}
shard_key = self.__class__._meta.get('shard_key', tuple())
for k in shard_key:
select_dict[k] = getattr(self, k)
return self.__class__.objects(**select_dict).update_one(**kwargs)
return self.__class__.objects(**self._object_key).update_one(**kwargs)
def delete(self, safe=False):
"""Delete the :class:`~mongoengine.Document` from the database. This
@@ -285,7 +335,7 @@ class Document(BaseDocument):
signals.pre_delete.send(self.__class__, document=self)
try:
self.__class__.objects(pk=self.pk).delete(safe=safe)
self.__class__.objects(**self._object_key).delete(safe=safe)
except pymongo.errors.OperationFailure, err:
message = u'Could not delete document (%s)' % err.message
raise OperationError(message)
@@ -298,8 +348,8 @@ class Document(BaseDocument):
.. versionadded:: 0.5
"""
from dereference import DeReference
self._data = DeReference()(self._data, max_depth)
import dereference
self._data = dereference.DeReference()(self._data, max_depth)
return self
def reload(self, max_depth=1):
@@ -311,7 +361,12 @@ class Document(BaseDocument):
id_field = self._meta['id_field']
obj = self.__class__.objects(
**{id_field: self[id_field]}
).first().select_related(max_depth=max_depth)
).limit(1).select_related(max_depth=max_depth)
if obj:
obj = obj[0]
else:
msg = "Reloaded document has been deleted"
raise OperationError(msg)
for field in self._fields:
setattr(self, field, self._reload(field, obj[field]))
if self._dynamic:
@@ -347,17 +402,18 @@ class Document(BaseDocument):
"""This method registers the delete rules to apply when removing this
object.
"""
cls._meta['delete_rules'][(document_cls, field_name)] = rule
delete_rules = cls._meta.get('delete_rules') or {}
delete_rules[(document_cls, field_name)] = rule
cls._meta['delete_rules'] = delete_rules
@classmethod
def drop_collection(cls):
"""Drops the entire collection associated with this
:class:`~mongoengine.Document` type from the database.
"""
from mongoengine.queryset import QuerySet
db = cls._get_db()
db.drop_collection(cls._get_collection_name())
QuerySet._reset_already_indexed(cls)
queryset.QuerySet._reset_already_indexed(cls)
class DynamicDocument(Document):
@@ -369,11 +425,16 @@ class DynamicDocument(Document):
:class:`~mongoengine.DynamicField` and data can be attributed to that
field.
..note::
.. note::
There is one caveat on Dynamic Documents: fields cannot start with `_`
"""
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
my_metaclass = TopLevelDocumentMetaclass
__metaclass__ = TopLevelDocumentMetaclass
_dynamic = True
def __delattr__(self, *args, **kwargs):
@@ -392,7 +453,11 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
information about dynamic documents.
"""
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
my_metaclass = DocumentMetaclass
__metaclass__ = DocumentMetaclass
_dynamic = True
def __delattr__(self, *args, **kwargs):

View File

@@ -1,18 +1,24 @@
import datetime
import time
import decimal
import gridfs
import itertools
import re
import time
import urllib2
import urlparse
import uuid
import warnings
from operator import itemgetter
import gridfs
from bson import Binary, DBRef, SON, ObjectId
from mongoengine.python_support import (PY3, bin_type, txt_type,
str_types, StringIO)
from base import (BaseField, ComplexBaseField, ObjectIdField,
ValidationError, get_document, BaseDocument)
from queryset import DO_NOTHING, QuerySet
from document import Document, EmbeddedDocument
from connection import get_db, DEFAULT_CONNECTION_NAME
from operator import itemgetter
try:
@@ -21,12 +27,6 @@ except ImportError:
Image = None
ImageOps = None
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
@@ -49,10 +49,16 @@ class StringField(BaseField):
super(StringField, self).__init__(**kwargs)
def to_python(self, value):
return unicode(value)
if isinstance(value, unicode):
return value
try:
value = value.decode('utf-8')
except:
pass
return value
def validate(self, value):
if not isinstance(value, (str, unicode)):
if not isinstance(value, basestring):
self.error('StringField only accepts string values')
if self.max_length is not None and len(value) > self.max_length:
@@ -97,25 +103,30 @@ class URLField(StringField):
.. versionadded:: 0.3
"""
URL_REGEX = re.compile(
r'^https?://'
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|'
r'localhost|'
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})'
r'(?::\d+)?'
r'(?:/?|[/?]\S+)$', re.IGNORECASE
)
_URL_REGEX = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def __init__(self, verify_exists=False, **kwargs):
def __init__(self, verify_exists=False, url_regex=None, **kwargs):
self.verify_exists = verify_exists
self.url_regex = url_regex or self._URL_REGEX
super(URLField, self).__init__(**kwargs)
def validate(self, value):
if not URLField.URL_REGEX.match(value):
if not self.url_regex.match(value):
self.error('Invalid URL: %s' % value)
return
if self.verify_exists:
import urllib2
warnings.warn(
"The URLField verify_exists argument has intractable security "
"and performance issues. Accordingly, it has been deprecated.",
DeprecationWarning
)
try:
request = urllib2.Request(value)
urllib2.urlopen(request)
@@ -138,6 +149,7 @@ class EmailField(StringField):
def validate(self, value):
if not EmailField.EMAIL_REGEX.match(value):
self.error('Invalid Mail-address: %s' % value)
super(EmailField, self).validate(value)
class IntField(BaseField):
@@ -149,7 +161,11 @@ class IntField(BaseField):
super(IntField, self).__init__(**kwargs)
def to_python(self, value):
return int(value)
try:
value = int(value)
except ValueError:
pass
return value
def validate(self, value):
try:
@@ -164,6 +180,9 @@ class IntField(BaseField):
self.error('Integer value is too large')
def prepare_query_value(self, op, value):
if value is None:
return value
return int(value)
@@ -176,7 +195,11 @@ class FloatField(BaseField):
super(FloatField, self).__init__(**kwargs)
def to_python(self, value):
return float(value)
try:
value = float(value)
except ValueError:
pass
return value
def validate(self, value):
if isinstance(value, int):
@@ -191,6 +214,9 @@ class FloatField(BaseField):
self.error('Float value is too large')
def prepare_query_value(self, op, value):
if value is None:
return value
return float(value)
@@ -205,9 +231,14 @@ class DecimalField(BaseField):
super(DecimalField, self).__init__(**kwargs)
def to_python(self, value):
original_value = value
if not isinstance(value, basestring):
value = unicode(value)
return decimal.Decimal(value)
try:
value = decimal.Decimal(value)
except ValueError:
return original_value
return value
def to_mongo(self, value):
return unicode(value)
@@ -235,7 +266,11 @@ class BooleanField(BaseField):
"""
def to_python(self, value):
return bool(value)
try:
value = bool(value)
except ValueError:
pass
return value
def validate(self, value):
if not isinstance(value, bool):
@@ -366,6 +401,8 @@ class ComplexDateTimeField(StringField):
data = super(ComplexDateTimeField, self).__get__(instance, owner)
if data == None:
return datetime.datetime.now()
if isinstance(data, datetime.datetime):
return data
return self._convert_from_string(data)
def __set__(self, instance, value):
@@ -378,7 +415,11 @@ class ComplexDateTimeField(StringField):
'ComplexDateTimeField')
def to_python(self, value):
return self._convert_from_string(value)
original_value = value
try:
return self._convert_from_string(value)
except:
return original_value
def to_mongo(self, value):
return self._convert_from_datetime(value)
@@ -442,8 +483,9 @@ class GenericEmbeddedDocumentField(BaseField):
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
..note :: You can use the choices param to limit the acceptable
EmbeddedDocument types
.. note ::
You can use the choices param to limit the acceptable
EmbeddedDocument types
"""
def prepare_query_value(self, op, value):
@@ -474,7 +516,10 @@ class GenericEmbeddedDocumentField(BaseField):
class DynamicField(BaseField):
"""Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
"""A truly dynamic field type capable of handling different and varying
types of data.
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
def to_mongo(self, value):
"""Convert a Python type to a MongoDBcompatible type.
@@ -518,6 +563,8 @@ class ListField(ComplexBaseField):
"""A list field that wraps a standard field, allowing multiple instances
of the field to be used as a list in the database.
If using with ReferenceFields see: :ref:`one-to-many-with-listfields`
.. note::
Required means it cannot be empty - as the default for ListFields is []
"""
@@ -656,7 +703,8 @@ class ReferenceField(BaseField):
* NULLIFY - Updates the reference to null.
* CASCADE - Deletes the documents associated with the reference.
* DENY - Prevent the deletion of the reference object.
* PULL - Pull the reference from a :class:`~mongoengine.ListField` of references
* PULL - Pull the reference from a :class:`~mongoengine.ListField`
of references
Alternative syntax for registering delete rules (useful when implementing
bi-directional delete rules)
@@ -669,12 +717,19 @@ class ReferenceField(BaseField):
Bar.register_delete_rule(Foo, 'bar', NULLIFY)
.. note ::
`reverse_delete_rules` do not trigger pre / post delete signals to be
triggered.
.. versionchanged:: 0.5 added `reverse_delete_rule`
"""
def __init__(self, document_type, reverse_delete_rule=DO_NOTHING, **kwargs):
def __init__(self, document_type, dbref=None,
reverse_delete_rule=DO_NOTHING, **kwargs):
"""Initialises the Reference Field.
:param dbref: Store the reference as :class:`~pymongo.dbref.DBRef`
or as the :class:`~pymongo.objectid.ObjectId`.id .
:param reverse_delete_rule: Determines what to do when the referring
object is deleted
"""
@@ -682,6 +737,13 @@ class ReferenceField(BaseField):
if not issubclass(document_type, (Document, basestring)):
self.error('Argument to ReferenceField constructor must be a '
'document class or a string')
if dbref is None:
msg = ("ReferenceFields will default to using ObjectId "
" strings in 0.8, set DBRef=True if this isn't desired")
warnings.warn(msg, FutureWarning)
self.dbref = dbref if dbref is not None else True # To change in 0.8
self.document_type_obj = document_type
self.reverse_delete_rule = reverse_delete_rule
super(ReferenceField, self).__init__(**kwargs)
@@ -704,8 +766,9 @@ class ReferenceField(BaseField):
# Get value from document instance if available
value = instance._data.get(self.name)
# Dereference DBRefs
if isinstance(value, (DBRef)):
if isinstance(value, DBRef):
value = self.document_type._get_db().dereference(value)
if value is not None:
instance._data[self.name] = self.document_type._from_son(value)
@@ -714,6 +777,10 @@ class ReferenceField(BaseField):
def to_mongo(self, document):
if isinstance(document, DBRef):
if not self.dbref:
return document.id
return document
elif not self.dbref and isinstance(document, basestring):
return document
id_field_name = self.document_type._meta['id_field']
@@ -721,7 +788,7 @@ class ReferenceField(BaseField):
if isinstance(document, Document):
# We need the id from the saved object to create the DBRef
id_ = document.id
id_ = document.pk
if id_ is None:
self.error('You can only reference documents once they have'
' been saved to the database')
@@ -729,18 +796,30 @@ class ReferenceField(BaseField):
id_ = document
id_ = id_field.to_mongo(id_)
collection = self.document_type._get_collection_name()
return DBRef(collection, id_)
if self.dbref:
collection = self.document_type._get_collection_name()
return DBRef(collection, id_)
return id_
def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type.
"""
if (not self.dbref and
not isinstance(value, (DBRef, Document, EmbeddedDocument))):
collection = self.document_type._get_collection_name()
value = DBRef(collection, self.document_type.id.to_python(value))
return value
def prepare_query_value(self, op, value):
if value is None:
return None
return self.to_mongo(value)
def validate(self, value):
if not isinstance(value, (self.document_type, DBRef)):
self.error('A ReferenceField only accepts DBRef')
self.error("A ReferenceField only accepts DBRef or documents")
if isinstance(value, Document) and value.id is None:
self.error('You can only reference documents once they have been '
@@ -754,10 +833,12 @@ class GenericReferenceField(BaseField):
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
that will be automatically dereferenced on access (lazily).
..note :: Any documents used as a generic reference must be registered in the
document registry. Importing the model will automatically register it.
.. note ::
* Any documents used as a generic reference must be registered in the
document registry. Importing the model will automatically register
it.
..note :: You can use the choices param to limit the acceptable Document types
* You can use the choices param to limit the acceptable Document types
.. versionadded:: 0.3
"""
@@ -828,16 +909,20 @@ class BinaryField(BaseField):
self.max_bytes = max_bytes
super(BinaryField, self).__init__(**kwargs)
def __set__(self, instance, value):
"""Handle bytearrays in python 3.1"""
if PY3 and isinstance(value, bytearray):
value = bin_type(value)
return super(BinaryField, self).__set__(instance, value)
def to_mongo(self, value):
return Binary(value)
def to_python(self, value):
# Returns str not unicode as this is binary data
return str(value)
def validate(self, value):
if not isinstance(value, str):
self.error('BinaryField only accepts string values')
if not isinstance(value, (bin_type, txt_type, Binary)):
self.error("BinaryField only accepts instances of "
"(%s, %s, Binary)" % (
bin_type.__name__, txt_type.__name__))
if self.max_bytes is not None and len(value) > self.max_bytes:
self.error('Binary value is too long')
@@ -893,9 +978,13 @@ class GridFSProxy(object):
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
def __cmp__(self, other):
return cmp((self.grid_id, self.collection_name, self.db_alias),
(other.grid_id, other.collection_name, other.db_alias))
def __eq__(self, other):
if isinstance(other, GridFSProxy):
return ((self.grid_id == other.grid_id) and
(self.collection_name == other.collection_name) and
(self.db_alias == other.db_alias))
else:
return False
@property
def fs(self):
@@ -1008,7 +1097,8 @@ class FileField(BaseField):
def __set__(self, instance, value):
key = self.name
if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, str):
if ((hasattr(value, 'read') and not
isinstance(value, GridFSProxy)) or isinstance(value, str_types)):
# using "FileField() = file/string" notation
grid_file = instance._data.get(self.name)
# If a file already exists, delete it
@@ -1064,6 +1154,7 @@ class ImageGridFsProxy(GridFSProxy):
try:
img = Image.open(file_obj)
img_format = img.format
except:
raise ValidationError('Invalid image')
@@ -1098,20 +1189,20 @@ class ImageGridFsProxy(GridFSProxy):
if thumbnail:
thumb_id = self._put_thumbnail(thumbnail,
img.format)
img_format)
else:
thumb_id = None
w, h = img.size
io = StringIO()
img.save(io, img.format)
img.save(io, img_format)
io.seek(0)
return super(ImageGridFsProxy, self).put(io,
width=w,
height=h,
format=img.format,
format=img_format,
thumbnail_id=thumb_id,
**kwargs)
@@ -1197,11 +1288,15 @@ class ImageField(FileField):
params_size = ('width', 'height', 'force')
extra_args = dict(size=size, thumbnail_size=thumbnail_size)
for att_name, att in extra_args.items():
if att and (isinstance(att, tuple) or isinstance(att, list)):
setattr(self, att_name, dict(
map(None, params_size, att)))
else:
setattr(self, att_name, None)
value = None
if isinstance(att, (tuple, list)):
if PY3:
value = dict(itertools.zip_longest(params_size, att,
fillvalue=None))
else:
value = dict(map(None, params_size, att))
setattr(self, att_name, value)
super(ImageField, self).__init__(
collection_name=collection_name,
@@ -1243,24 +1338,35 @@ class SequenceField(IntField):
.. versionadded:: 0.5
"""
def __init__(self, collection_name=None, db_alias = None, *args, **kwargs):
def __init__(self, collection_name=None, db_alias=None, sequence_name=None, *args, **kwargs):
self.collection_name = collection_name or 'mongoengine.counters'
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
self.sequence_name = sequence_name
return super(SequenceField, self).__init__(*args, **kwargs)
def generate_new_value(self):
"""
Generate and Increment the counter
"""
sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(),
self.name)
collection = get_db(alias = self.db_alias )[self.collection_name]
sequence_name = self.get_sequence_name()
sequence_id = "%s.%s" % (sequence_name, self.name)
collection = get_db(alias=self.db_alias)[self.collection_name]
counter = collection.find_and_modify(query={"_id": sequence_id},
update={"$inc": {"next": 1}},
new=True,
upsert=True)
return counter['next']
def get_sequence_name(self):
if self.sequence_name:
return self.sequence_name
owner = self.owner_document
if issubclass(owner, Document):
return owner._get_collection_name()
else:
return ''.join('_%s' % c if c.isupper() else c
for c in owner._class_name).strip('_').lower()
def __get__(self, instance, owner):
if instance is None:
@@ -1276,7 +1382,7 @@ class SequenceField(IntField):
instance._data[self.name] = value
instance._mark_as_changed(self.name)
return value
return int(value) if value else None
def __set__(self, instance, value):
@@ -1296,17 +1402,44 @@ class UUIDField(BaseField):
.. versionadded:: 0.6
"""
_binary = None
def __init__(self, **kwargs):
def __init__(self, binary=None, **kwargs):
"""
Store UUID data in the database
:param binary: (optional) boolean store as binary.
.. versionchanged:: 0.6.19
"""
if binary is None:
binary = False
msg = ("UUIDFields will soon default to store as binary, please "
"configure binary=False if you wish to store as a string")
warnings.warn(msg, FutureWarning)
self._binary = binary
super(UUIDField, self).__init__(**kwargs)
def to_python(self, value):
if not isinstance(value, basestring):
value = unicode(value)
return uuid.UUID(value)
if not self._binary:
original_value = value
try:
if not isinstance(value, basestring):
value = unicode(value)
return uuid.UUID(value)
except:
return original_value
return value
def to_mongo(self, value):
return unicode(value)
if not self._binary:
return unicode(value)
return value
def prepare_query_value(self, op, value):
if value is None:
return None
return self.to_mongo(value)
def validate(self, value):
if not isinstance(value, uuid.UUID):

View File

@@ -0,0 +1,61 @@
"""Helper functions and types to aid with Python 2.5 - 3 support."""
import sys
PY3 = sys.version_info[0] == 3
PY25 = sys.version_info[:2] == (2, 5)
UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264
if PY3:
import codecs
from io import BytesIO as StringIO
# return s converted to binary. b('test') should be equivalent to b'test'
def b(s):
return codecs.latin_1_encode(s)[0]
bin_type = bytes
txt_type = str
else:
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
# Conversion to binary only necessary in Python 3
def b(s):
return s
bin_type = str
txt_type = unicode
str_types = (bin_type, txt_type)
if PY25:
def product(*args, **kwds):
pools = map(tuple, args) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x + [y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
reduce = reduce
else:
from itertools import product
from functools import reduce
# For use with Python 2.5
# converts all keys from unicode to str for d and all nested dictionaries
def to_str_keys_recursive(d):
if isinstance(d, list):
for val in d:
if isinstance(val, (dict, list)):
to_str_keys_recursive(val)
elif isinstance(d, dict):
for key, val in d.items():
if isinstance(val, (dict, list)):
to_str_keys_recursive(val)
if isinstance(key, unicode):
d[str(key)] = d.pop(key)
else:
raise ValueError("non list/dict parameter not allowed")

View File

@@ -4,6 +4,11 @@ import copy
import itertools
import operator
from collections import defaultdict
from functools import partial
from mongoengine.python_support import product, reduce
import pymongo
from bson.code import Code
@@ -40,6 +45,10 @@ class OperationError(Exception):
pass
class NotUniqueError(OperationError):
pass
RE_TYPE = type(re.compile(''))
@@ -118,7 +127,7 @@ class QueryTreeTransformerVisitor(QNodeVisitor):
# the necessary parts. Then for each $or part, create a new query
# that ANDs the necessary part with the $or part.
clauses = []
for or_group in itertools.product(*or_groups):
for or_group in product(*or_groups):
q_object = reduce(lambda a, b: a & b, and_parts, Q())
q_object = reduce(lambda a, b: a & b, or_group, q_object)
clauses.append(q_object)
@@ -209,7 +218,7 @@ class QNode(object):
def _combine(self, other, operation):
"""Combine this node with another node into a QCombination object.
"""
if other.empty:
if getattr(other, 'empty', True):
return self
if self.empty:
@@ -327,6 +336,7 @@ class QuerySet(object):
"""
__already_indexed = set()
__dereference = False
def __init__(self, document, collection):
self._document = document
@@ -341,11 +351,14 @@ class QuerySet(object):
self._timeout = True
self._class_check = True
self._slave_okay = False
self._iter = False
self._scalar = []
self._as_pymongo = False
self._as_pymongo_coerce = False
# If inheritance is allowed, only return instances and instances of
# subclasses of the class being used
if document._meta.get('allow_inheritance'):
if document._meta.get('allow_inheritance') != False:
self._initial_query = {'_types': self._document._class_name}
self._loaded_fields = QueryFieldList(always_include=['_cls'])
self._cursor_obj = None
@@ -387,12 +400,13 @@ class QuerySet(object):
or a **-** to determine the index ordering
"""
index_spec = QuerySet._build_index_spec(self._document, key_or_list)
self._collection.ensure_index(
index_spec['fields'],
drop_dups=drop_dups,
background=background,
sparse=index_spec.get('sparse', False),
unique=index_spec.get('unique', False))
index_spec = index_spec.copy()
fields = index_spec.pop('fields')
index_spec['drop_dups'] = drop_dups
index_spec['background'] = background
index_spec.update(kwargs)
self._collection.ensure_index(fields, **index_spec)
return self
def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query):
@@ -435,7 +449,7 @@ class QuerySet(object):
"""
background = self._document._meta.get('index_background', False)
drop_dups = self._document._meta.get('index_drop_dups', False)
index_opts = self._document._meta.get('index_opts', {})
index_opts = self._document._meta.get('index_opts') or {}
index_types = self._document._meta.get('index_types', True)
# determine if an index which we are creating includes
@@ -443,6 +457,7 @@ class QuerySet(object):
# an extra index on _type, as mongodb will use the existing
# index to service queries against _type
types_indexed = False
def includes_types(fields):
first_field = None
if len(fields):
@@ -459,13 +474,15 @@ class QuerySet(object):
background=background, drop_dups=drop_dups, **index_opts)
# Ensure document-defined indexes are created
if self._document._meta['indexes']:
for spec in self._document._meta['indexes']:
types_indexed = types_indexed or includes_types(spec['fields'])
if self._document._meta['index_specs']:
index_spec = self._document._meta['index_specs']
for spec in index_spec:
spec = spec.copy()
fields = spec.pop('fields')
types_indexed = types_indexed or includes_types(fields)
opts = index_opts.copy()
opts['unique'] = spec.get('unique', False)
opts['sparse'] = spec.get('sparse', False)
self._collection.ensure_index(spec['fields'],
opts.update(spec)
self._collection.ensure_index(fields,
background=background, **opts)
# If _types is being used (for polymorphism), it needs an index,
@@ -480,19 +497,30 @@ class QuerySet(object):
self._collection.ensure_index(index_spec,
background=background, **index_opts)
@classmethod
def _build_index_spec(cls, doc_cls, spec):
"""Build a PyMongo index spec from a MongoEngine index spec.
"""
if isinstance(spec, basestring):
spec = {'fields': [spec]}
if isinstance(spec, (list, tuple)):
spec = {'fields': spec}
elif isinstance(spec, (list, tuple)):
spec = {'fields': list(spec)}
elif isinstance(spec, dict):
spec = dict(spec)
index_list = []
use_types = doc_cls._meta.get('allow_inheritance', True)
direction = None
allow_inheritance = doc_cls._meta.get('allow_inheritance') != False
# If sparse - dont include types
use_types = allow_inheritance and not spec.get('sparse', False)
for key in spec['fields']:
# If inherited spec continue
if isinstance(key, (list, tuple)):
continue
# Get ASCENDING direction from +, DESCENDING from -, and GEO2D from *
direction = pymongo.ASCENDING
if key.startswith("-"):
@@ -507,24 +535,23 @@ class QuerySet(object):
parts = key.split('.')
if parts in (['pk'], ['id'], ['_id']):
key = '_id'
fields = []
else:
fields = QuerySet._lookup_field(doc_cls, parts)
parts = [field if field == '_id' else field.db_field for field in fields]
parts = [field if field == '_id' else field.db_field
for field in fields]
key = '.'.join(parts)
index_list.append((key, direction))
# If sparse - dont include types
if spec.get('sparse', False):
use_types = False
# Check if a list field is being used, don't use _types if it is
if use_types and not all(f._index_with_types for f in fields):
use_types = False
# If _types is being used, prepend it to every specified index
index_types = doc_cls._meta.get('index_types', True)
allow_inheritance = doc_cls._meta.get('allow_inheritance')
if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D:
if (spec.get('types', index_types) and use_types
and direction is not pymongo.GEO2D):
index_list.insert(0, ('_types', 1))
spec['fields'] = index_list
@@ -583,11 +610,13 @@ class QuerySet(object):
if self._where_clause:
self._cursor_obj.where(self._where_clause)
# apply default ordering
if self._ordering:
# Apply query ordering
self._cursor_obj.sort(self._ordering)
elif self._document._meta['ordering']:
# Otherwise, apply the ordering from the document model
self.order_by(*self._document._meta['ordering'])
self._cursor_obj.sort(self._ordering)
if self._limit is not None:
self._cursor_obj.limit(self._limit - (self._skip or 0))
@@ -597,7 +626,6 @@ class QuerySet(object):
if self._hint != -1:
self._cursor_obj.hint(self._hint)
return self._cursor_obj
@classmethod
@@ -638,7 +666,7 @@ class QuerySet(object):
from mongoengine.fields import ReferenceField, GenericReferenceField
if isinstance(field, (ReferenceField, GenericReferenceField)):
raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts))
if getattr(field, 'field', None):
if hasattr(getattr(field, 'field', None), 'lookup_member'):
new_field = field.field.lookup_member(field_name)
else:
# Look up subfield on the previous field
@@ -675,6 +703,7 @@ class QuerySet(object):
custom_operators = ['match']
mongo_query = {}
merge_query = defaultdict(list)
for key, value in query.items():
if key == "__raw__":
mongo_query.update(value)
@@ -701,7 +730,7 @@ class QuerySet(object):
cleaned_fields = []
for field in fields:
append_field = True
if isinstance(field, str):
if isinstance(field, basestring):
parts.append(field)
append_field = False
else:
@@ -762,8 +791,23 @@ class QuerySet(object):
key = '.'.join(parts)
if op is None or key not in mongo_query:
mongo_query[key] = value
elif key in mongo_query and isinstance(mongo_query[key], dict):
mongo_query[key].update(value)
elif key in mongo_query:
if key in mongo_query and isinstance(mongo_query[key], dict):
mongo_query[key].update(value)
else:
# Store for manually merging later
merge_query[key].append(value)
# The queryset has been filter in such a way we must manually merge
for k, v in merge_query.items():
merge_query[k].append(mongo_query[k])
del mongo_query[k]
if isinstance(v, list):
value = [{k:val} for val in v]
if '$and' in mongo_query.keys():
mongo_query['$and'].append(value)
else:
mongo_query['$and'] = value
return mongo_query
@@ -803,19 +847,18 @@ class QuerySet(object):
keyword argument called :attr:`defaults`.
.. note:: This requires two separate operations and therefore a
race condition exists. Because there are no transactions in mongoDB
other approaches should be investigated, to ensure you don't
accidently duplicate data when using this method.
race condition exists. Because there are no transactions in mongoDB
other approaches should be investigated, to ensure you don't
accidently duplicate data when using this method.
:param write_options: optional extra keyword arguments used if we
have to create a new document.
Passes any write_options onto :meth:`~mongoengine.Document.save`
.. versionadded:: 0.3
:param auto_save: if the object is to be saved automatically if not found.
.. versionadded:: 0.6
.. versionchanged:: 0.6 - added `auto_save`
.. versionadded:: 0.3
"""
defaults = query.get('defaults', {})
if 'defaults' in query:
@@ -888,7 +931,7 @@ class QuerySet(object):
if not isinstance(doc, self._document):
msg = "Some documents inserted aren't instances of %s" % str(self._document)
raise OperationError(msg)
if doc.pk:
if doc.pk and not doc._created:
msg = "Some documents have ObjectIds use doc.update() instead"
raise OperationError(msg)
raw.append(doc.to_mongo())
@@ -898,8 +941,11 @@ class QuerySet(object):
ids = self._collection.insert(raw, **write_options)
except pymongo.errors.OperationFailure, err:
message = 'Could not save document (%s)'
if u'duplicate key' in unicode(err):
if re.match('^E1100[01] duplicate key', unicode(err)):
# E11000 - duplicate key error index
# E11001 - duplicate key on update
message = u'Tried to save duplicate unique keys (%s)'
raise NotUniqueError(message % unicode(err))
raise OperationError(message % unicode(err))
if not load_bulk:
@@ -944,6 +990,9 @@ class QuerySet(object):
for doc in docs:
doc_map[doc['_id']] = self._get_scalar(
self._document._from_son(doc))
elif self._as_pymongo:
for doc in docs:
doc_map[doc['_id']] = self._get_as_pymongo(doc)
else:
for doc in docs:
doc_map[doc['_id']] = self._document._from_son(doc)
@@ -953,12 +1002,16 @@ class QuerySet(object):
def next(self):
"""Wrap the result in a :class:`~mongoengine.Document` object.
"""
self._iter = True
try:
if self._limit == 0:
raise StopIteration
if self._scalar:
return self._get_scalar(self._document._from_son(
self._cursor.next()))
if self._as_pymongo:
return self._get_as_pymongo(self._cursor.next())
return self._document._from_son(self._cursor.next())
except StopIteration, e:
self.rewind()
@@ -969,6 +1022,7 @@ class QuerySet(object):
.. versionadded:: 0.3
"""
self._iter = False
self._cursor.rewind()
def count(self):
@@ -997,6 +1051,8 @@ class QuerySet(object):
:class:`~bson.code.Code` or string
:param output: output collection name, if set to 'inline' will try to
use :class:`~pymongo.collection.Collection.inline_map_reduce`
This can also be a dictionary containing output options
see: http://docs.mongodb.org/manual/reference/commands/#mapReduce
:param finalize_f: finalize function, an optional function that
performs any post-reduction processing.
:param scope: values to insert into map/reduce global scope. Optional.
@@ -1138,6 +1194,8 @@ class QuerySet(object):
if self._scalar:
return self._get_scalar(self._document._from_son(
self._cursor[key]))
if self._as_pymongo:
return self._get_as_pymongo(self._cursor.next())
return self._document._from_son(self._cursor[key])
raise AttributeError
@@ -1148,9 +1206,10 @@ class QuerySet(object):
.. versionadded:: 0.4
.. versionchanged:: 0.5 - Fixed handling references
.. versionchanged:: 0.6 - Improved db_field refrence handling
"""
from dereference import DeReference
return DeReference()(self._cursor.distinct(field), 1)
return self._dereference(self._cursor.distinct(field), 1,
name=field, instance=self._document)
def only(self, *fields):
"""Load only a subset of this document's fields. ::
@@ -1255,7 +1314,7 @@ class QuerySet(object):
key_list.append((key, direction))
self._ordering = key_list
self._cursor.sort(key_list)
return self
def explain(self, format=False):
@@ -1305,9 +1364,16 @@ class QuerySet(object):
"""
doc = self._document
# Handle deletes where skips or limits have been applied
if self._skip or self._limit:
for doc in self:
doc.delete()
return
delete_rules = doc._meta.get('delete_rules') or {}
# Check for DENY rules before actually deleting/nullifying any other
# references
for rule_entry in doc._meta['delete_rules']:
for rule_entry in delete_rules:
document_cls, field_name = rule_entry
rule = doc._meta['delete_rules'][rule_entry]
if rule == DENY and document_cls.objects(**{field_name + '__in': self}).count() > 0:
@@ -1315,12 +1381,14 @@ class QuerySet(object):
(document_cls.__name__, field_name)
raise OperationError(msg)
for rule_entry in doc._meta['delete_rules']:
for rule_entry in delete_rules:
document_cls, field_name = rule_entry
rule = doc._meta['delete_rules'][rule_entry]
if rule == CASCADE:
ref_q = document_cls.objects(**{field_name + '__in': self})
if doc != document_cls or (doc == document_cls and ref_q.count() > 0):
ref_q_count = ref_q.count()
if (doc != document_cls and ref_q_count > 0
or (doc == document_cls and ref_q_count > 0)):
ref_q.delete(safe=safe)
elif rule == NULLIFY:
document_cls.objects(**{field_name + '__in': self}).update(
@@ -1339,6 +1407,8 @@ class QuerySet(object):
"""
operators = ['set', 'unset', 'inc', 'dec', 'pop', 'push', 'push_all',
'pull', 'pull_all', 'add_to_set']
match_operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
'all', 'size', 'exists', 'not']
mongo_update = {}
for key, value in update.items():
@@ -1362,6 +1432,10 @@ class QuerySet(object):
elif op == 'add_to_set':
op = op.replace('_to_set', 'ToSet')
match = None
if parts[-1] in match_operators:
match = parts.pop()
if _doc_cls:
# Switch field names to proper names [set in Field(name='foo')]
fields = QuerySet._lookup_field(_doc_cls, parts)
@@ -1370,7 +1444,7 @@ class QuerySet(object):
cleaned_fields = []
for field in fields:
append_field = True
if isinstance(field, str):
if isinstance(field, basestring):
# Convert the S operator to $
if field == 'S':
field = '$'
@@ -1384,26 +1458,39 @@ class QuerySet(object):
# Convert value to proper value
field = cleaned_fields[-1]
if op in (None, 'set', 'push', 'pull', 'addToSet'):
if op in (None, 'set', 'push', 'pull'):
if field.required or value is not None:
value = field.prepare_query_value(op, value)
elif op in ('pushAll', 'pullAll'):
value = [field.prepare_query_value(op, v) for v in value]
elif op == 'addToSet':
if isinstance(value, (list, tuple, set)):
value = [field.prepare_query_value(op, v) for v in value]
elif field.required or value is not None:
value = field.prepare_query_value(op, value)
if match:
match = '$' + match
value = {match: value}
key = '.'.join(parts)
if not op:
raise InvalidQueryError("Updates must supply an operation eg: set__FIELD=value")
raise InvalidQueryError("Updates must supply an operation "
"eg: set__FIELD=value")
if 'pull' in op and '.' in key:
# Dot operators don't work on pull operations
# it uses nested dict syntax
if op == 'pullAll':
raise InvalidQueryError("pullAll operations only support a single field depth")
raise InvalidQueryError("pullAll operations only support "
"a single field depth")
parts.reverse()
for key in parts:
value = {key: value}
elif op == 'addToSet' and isinstance(value, list):
value = {key: {"$each": value}}
else:
value = {key: value}
key = '$' + op
@@ -1496,8 +1583,6 @@ class QuerySet(object):
def lookup(obj, name):
chunks = name.split('__')
for chunk in chunks:
if hasattr(obj, '_db_field_map'):
chunk = obj._db_field_map.get(chunk, chunk)
obj = getattr(obj, chunk)
return obj
@@ -1507,6 +1592,48 @@ class QuerySet(object):
return tuple(data)
def _get_as_pymongo(self, row):
# Extract which fields paths we should follow if .fields(...) was
# used. If not, handle all fields.
if not getattr(self, '__as_pymongo_fields', None):
self.__as_pymongo_fields = []
for field in self._loaded_fields.fields - set(['_cls', '_id', '_types']):
self.__as_pymongo_fields.append(field)
while '.' in field:
field, _ = field.rsplit('.', 1)
self.__as_pymongo_fields.append(field)
all_fields = not self.__as_pymongo_fields
def clean(data, path=None):
path = path or ''
if isinstance(data, dict):
new_data = {}
for key, value in data.iteritems():
new_path = '%s.%s' % (path, key) if path else key
if all_fields or new_path in self.__as_pymongo_fields:
new_data[key] = clean(value, path=new_path)
data = new_data
elif isinstance(data, list):
data = [clean(d, path=path) for d in data]
else:
if self._as_pymongo_coerce:
# If we need to coerce types, we need to determine the
# type of this field and use the corresponding .to_python(...)
from mongoengine.fields import EmbeddedDocumentField
obj = self._document
for chunk in path.split('.'):
obj = getattr(obj, chunk, None)
if obj is None:
break
elif isinstance(obj, EmbeddedDocumentField):
obj = obj.document_type
if obj and data is not None:
data = obj.to_python(data)
return data
return clean(row)
def scalar(self, *fields):
"""Instead of returning Document instances, return either a specific
value or a tuple of values in order.
@@ -1529,6 +1656,16 @@ class QuerySet(object):
"""An alias for scalar"""
return self.scalar(*fields)
def as_pymongo(self, coerce_types=False):
"""Instead of returning Document instances, return raw values from
pymongo.
:param coerce_type: Field types (if applicable) would be use to coerce types.
"""
self._as_pymongo = True
self._as_pymongo_coerce = coerce_types
return self
def _sub_js_fields(self, code):
"""When fields are specified with [~fieldname] syntax, where
*fieldname* is the Python name of a field, *fieldname* will be
@@ -1709,10 +1846,11 @@ class QuerySet(object):
def _item_frequencies_map_reduce(self, field, normalize=False):
map_func = """
function() {
path = '{{~%(field)s}}'.split('.');
field = this;
var path = '{{~%(field)s}}'.split('.');
var field = this;
for (p in path) {
if (field)
if (typeof field != 'undefined')
field = field[path[p]];
else
break;
@@ -1721,7 +1859,7 @@ class QuerySet(object):
field.forEach(function(item) {
emit(item, 1);
});
} else if (field) {
} else if (typeof field != 'undefined') {
emit(field, 1);
} else {
emit(null, 1);
@@ -1745,12 +1883,12 @@ class QuerySet(object):
if isinstance(key, float):
if int(key) == key:
key = int(key)
key = str(key)
frequencies[key] = f.value
frequencies[key] = int(f.value)
if normalize:
count = sum(frequencies.values())
frequencies = dict([(k, v / count) for k, v in frequencies.items()])
frequencies = dict([(k, float(v) / count)
for k, v in frequencies.items()])
return frequencies
@@ -1758,31 +1896,28 @@ class QuerySet(object):
"""Uses exec_js to execute"""
freq_func = """
function(path) {
path = path.split('.');
var path = path.split('.');
if (options.normalize) {
var total = 0.0;
db[collection].find(query).forEach(function(doc) {
field = doc;
for (p in path) {
if (field)
field = field[path[p]];
else
break;
}
if (field && field.constructor == Array) {
total += field.length;
} else {
total++;
}
});
}
var total = 0.0;
db[collection].find(query).forEach(function(doc) {
var field = doc;
for (p in path) {
if (field)
field = field[path[p]];
else
break;
}
if (field && field.constructor == Array) {
total += field.length;
} else {
total++;
}
});
var frequencies = {};
var types = {};
var inc = 1.0;
if (options.normalize) {
inc /= total;
}
db[collection].find(query).forEach(function(doc) {
field = doc;
for (p in path) {
@@ -1797,34 +1932,48 @@ class QuerySet(object):
});
} else {
var item = field;
types[item] = item;
frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]);
}
});
return frequencies;
return [total, frequencies, types];
}
"""
data = self.exec_js(freq_func, field, normalize=normalize)
if 'undefined' in data:
data[None] = data['undefined']
del(data['undefined'])
return data
total, data, types = self.exec_js(freq_func, field)
values = dict([(types.get(k), int(v)) for k, v in data.iteritems()])
if normalize:
values = dict([(k, float(v) / total) for k, v in values.items()])
frequencies = {}
for k, v in values.iteritems():
if isinstance(k, float):
if int(k) == k:
k = int(k)
frequencies[k] = v
return frequencies
def __repr__(self):
limit = REPR_OUTPUT_SIZE + 1
start = (0 if self._skip is None else self._skip)
if self._limit is None:
stop = start + limit
if self._limit is not None:
if self._limit - start > limit:
stop = start + limit
else:
stop = self._limit
try:
data = list(self[start:stop])
except pymongo.errors.InvalidOperation:
return ".. queryset mid-iteration .."
"""Provides the string representation of the QuerySet
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
"""
if self._iter:
return '.. queryset mid-iteration ..'
data = []
for i in xrange(REPR_OUTPUT_SIZE + 1):
try:
data.append(self.next())
except StopIteration:
break
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
self.rewind()
return repr(data)
def select_related(self, max_depth=1):
@@ -1833,13 +1982,30 @@ class QuerySet(object):
.. versionadded:: 0.5
"""
from dereference import DeReference
# Make select related work the same for querysets
max_depth += 1
return DeReference()(self, max_depth=max_depth)
return self._dereference(self, max_depth=max_depth)
@property
def _dereference(self):
if not self.__dereference:
from dereference import DeReference
self.__dereference = DeReference() # Cached
return self.__dereference
class QuerySetManager(object):
"""
The default QuerySet Manager.
Custom QuerySet Manager functions can extend this class and users can
add extra queryset functionality. Any custom manager methods must accept a
:class:`~mongoengine.Document` class as its first argument, and a
:class:`~mongoengine.queryset.QuerySet` as its second argument.
The method function should return a :class:`~mongoengine.queryset.QuerySet`
, probably the same one that was passed in, but modified in some way.
"""
get_queryset = None
@@ -1857,13 +2023,16 @@ class QuerySetManager(object):
return self
# owner is the document that contains the QuerySetManager
queryset_class = owner._meta['queryset_class'] or QuerySet
queryset_class = owner._meta.get('queryset_class') or QuerySet
queryset = queryset_class(owner, owner._get_collection())
if self.get_queryset:
if self.get_queryset.func_code.co_argcount == 1:
arg_count = self.get_queryset.func_code.co_argcount
if arg_count == 1:
queryset = self.get_queryset(queryset)
else:
elif arg_count == 2:
queryset = self.get_queryset(owner, queryset)
else:
queryset = partial(self.get_queryset, owner, queryset)
return queryset

View File

@@ -5,7 +5,7 @@
%define srcname mongoengine
Name: python-%{srcname}
Version: 0.6.11
Version: 0.7.9
Release: 1%{?dist}
Summary: A Python Document-Object Mapper for working with MongoDB

View File

@@ -1,13 +1,11 @@
[aliases]
test = nosetests
[nosetests]
verbosity = 2
verbosity = 3
detailed-errors = 1
#with-coverage = 1
cover-html = 1
cover-html-dir = ../htmlcov
cover-package = mongoengine
cover-erase = 1
#cover-erase = 1
#cover-html = 1
#cover-html-dir = ../htmlcov
#cover-package = mongoengine
py3where = build
where = tests
#tests = test_bugfix.py
#tests = test_bugfix.py

View File

@@ -1,27 +1,35 @@
from setuptools import setup, find_packages
import os
import sys
from setuptools import setup, find_packages
DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB"
# Hack to silence atexit traceback in newer python versions
try:
import multiprocessing
except ImportError:
pass
DESCRIPTION = """MongoEngine is a Python Object-Document
Mapper for working with MongoDB."""
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
def get_version(version_tuple):
version = '%s.%s' % (version_tuple[0], version_tuple[1])
if version_tuple[2]:
version = '%s.%s' % (version, version_tuple[2])
return version
if not isinstance(version_tuple[-1], int):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
# Dirty hack to get version number from monogengine/__init__.py - we can't
# import it as it depends on PyMongo and PyMongo isn't installed until this
# file is read
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
print VERSION
print(VERSION)
CLASSIFIERS = [
'Development Status :: 4 - Beta',
@@ -29,18 +37,38 @@ CLASSIFIERS = [
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: Implementation :: CPython",
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
extra_opts = {}
if sys.version_info[0] == 3:
extra_opts['use_2to3'] = True
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker']
extra_opts['packages'] = find_packages(exclude=('tests',))
if "test" in sys.argv or "nosetests" in sys.argv:
extra_opts['packages'].append("tests")
extra_opts['package_data'] = {"tests": ["mongoengine.png"]}
else:
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL']
extra_opts['packages'] = find_packages(exclude=('tests',))
setup(name='mongoengine',
version=VERSION,
packages=find_packages(),
author='Harry Marr',
author_email='harry.marr@{nospam}gmail.com',
maintainer="Ross Lawley",
maintainer_email="ross.lawley@{nospam}gmail.com",
url='http://mongoengine.org/',
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
license='MIT',
include_package_data=True,
description=DESCRIPTION,
@@ -48,5 +76,6 @@ setup(name='mongoengine',
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=['pymongo'],
tests_require=['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL']
test_suite='nose.collector',
**extra_opts
)

View File

@@ -0,0 +1,98 @@
import unittest
import warnings
from mongoengine import *
from mongoengine.tests import query_counter
class TestWarnings(unittest.TestCase):
def setUp(self):
conn = connect(db='mongoenginetest')
self.warning_list = []
self.showwarning_default = warnings.showwarning
warnings.showwarning = self.append_to_warning_list
def append_to_warning_list(self, message, category, *args):
self.warning_list.append({"message": message,
"category": category})
def tearDown(self):
# restore default handling of warnings
warnings.showwarning = self.showwarning_default
def test_allow_inheritance_future_warning(self):
"""Add FutureWarning for future allow_inhertiance default change.
"""
class SimpleBase(Document):
a = IntField()
class InheritedClass(SimpleBase):
b = IntField()
InheritedClass()
self.assertEqual(len(self.warning_list), 1)
warning = self.warning_list[0]
self.assertEqual(FutureWarning, warning["category"])
self.assertTrue("InheritedClass" in str(warning["message"]))
def test_dbref_reference_field_future_warning(self):
class Person(Document):
name = StringField()
parent = ReferenceField('self')
Person.drop_collection()
p1 = Person()
p1.parent = None
p1.save()
p2 = Person(name="Wilson Jr")
p2.parent = p1
p2.save(cascade=False)
self.assertTrue(len(self.warning_list) > 0)
warning = self.warning_list[0]
self.assertEqual(FutureWarning, warning["category"])
self.assertTrue("ReferenceFields will default to using ObjectId"
in str(warning["message"]))
def test_document_save_cascade_future_warning(self):
class Person(Document):
name = StringField()
parent = ReferenceField('self')
Person.drop_collection()
p1 = Person(name="Wilson Snr")
p1.parent = None
p1.save()
p2 = Person(name="Wilson Jr")
p2.parent = p1
p2.parent.name = "Poppa Wilson"
p2.save()
self.assertEqual(len(self.warning_list), 1)
if len(self.warning_list) > 1:
print self.warning_list
warning = self.warning_list[0]
self.assertEqual(FutureWarning, warning["category"])
self.assertTrue("Cascading saves will default to off in 0.8"
in str(warning["message"]))
def test_document_collection_syntax_warning(self):
class NonAbstractBase(Document):
pass
class InheritedDocumentFailTest(NonAbstractBase):
meta = {'collection': 'fail'}
warning = self.warning_list[0]
self.assertEqual(SyntaxWarning, warning["category"])
self.assertEqual('non_abstract_base',
InheritedDocumentFailTest._get_collection_name())

View File

@@ -1,5 +1,8 @@
from __future__ import with_statement
import unittest
from bson import DBRef, ObjectId
from mongoengine import *
from mongoengine.connection import get_db
from mongoengine.tests import query_counter
@@ -39,6 +42,12 @@ class FieldTest(unittest.TestCase):
group_obj = Group.objects.first()
self.assertEqual(q, 1)
len(group_obj._data['members'])
self.assertEqual(q, 1)
len(group_obj.members)
self.assertEqual(q, 2)
[m for m in group_obj.members]
self.assertEqual(q, 2)
@@ -63,6 +72,131 @@ class FieldTest(unittest.TestCase):
User.drop_collection()
Group.drop_collection()
def test_list_item_dereference_dref_false(self):
"""Ensure that DBRef items in ListFields are dereferenced.
"""
class User(Document):
name = StringField()
class Group(Document):
members = ListField(ReferenceField(User, dbref=False))
User.drop_collection()
Group.drop_collection()
for i in xrange(1, 51):
user = User(name='user %s' % i)
user.save()
group = Group(members=User.objects)
group.save()
group.reload() # Confirm reload works
with query_counter() as q:
self.assertEqual(q, 0)
group_obj = Group.objects.first()
self.assertEqual(q, 1)
[m for m in group_obj.members]
self.assertEqual(q, 2)
# Document select_related
with query_counter() as q:
self.assertEqual(q, 0)
group_obj = Group.objects.first().select_related()
self.assertEqual(q, 2)
[m for m in group_obj.members]
self.assertEqual(q, 2)
# Queryset select_related
with query_counter() as q:
self.assertEqual(q, 0)
group_objs = Group.objects.select_related()
self.assertEqual(q, 2)
for group_obj in group_objs:
[m for m in group_obj.members]
self.assertEqual(q, 2)
User.drop_collection()
Group.drop_collection()
def test_handle_old_style_references(self):
"""Ensure that DBRef items in ListFields are dereferenced.
"""
class User(Document):
name = StringField()
class Group(Document):
members = ListField(ReferenceField(User, dbref=True))
User.drop_collection()
Group.drop_collection()
for i in xrange(1, 26):
user = User(name='user %s' % i)
user.save()
group = Group(members=User.objects)
group.save()
group = Group._get_collection().find_one()
# Update the model to change the reference
class Group(Document):
members = ListField(ReferenceField(User, dbref=False))
group = Group.objects.first()
group.members.append(User(name="String!").save())
group.save()
group = Group.objects.first()
self.assertEqual(group.members[0].name, 'user 1')
self.assertEqual(group.members[-1].name, 'String!')
def test_migrate_references(self):
"""Example of migrating ReferenceField storage
"""
# Create some sample data
class User(Document):
name = StringField()
class Group(Document):
author = ReferenceField(User, dbref=True)
members = ListField(ReferenceField(User, dbref=True))
User.drop_collection()
Group.drop_collection()
user = User(name="Ross").save()
group = Group(author=user, members=[user]).save()
raw_data = Group._get_collection().find_one()
self.assertTrue(isinstance(raw_data['author'], DBRef))
self.assertTrue(isinstance(raw_data['members'][0], DBRef))
# Migrate the model definition
class Group(Document):
author = ReferenceField(User, dbref=False)
members = ListField(ReferenceField(User, dbref=False))
# Migrate the data
for g in Group.objects():
g.author = g.author
g.members = g.members
g.save()
group = Group.objects.first()
self.assertEqual(group.author, user)
self.assertEqual(group.members, [user])
raw_data = Group._get_collection().find_one()
self.assertTrue(isinstance(raw_data['author'], ObjectId))
self.assertTrue(isinstance(raw_data['members'][0], ObjectId))
def test_recursive_reference(self):
"""Ensure that ReferenceFields can reference their own documents.
"""
@@ -109,10 +243,10 @@ class FieldTest(unittest.TestCase):
peter = Employee.objects.with_id(peter.id).select_related()
self.assertEqual(q, 2)
self.assertEquals(peter.boss, bill)
self.assertEqual(peter.boss, bill)
self.assertEqual(q, 2)
self.assertEquals(peter.friends, friends)
self.assertEqual(peter.friends, friends)
self.assertEqual(q, 2)
# Queryset select_related
@@ -123,10 +257,10 @@ class FieldTest(unittest.TestCase):
self.assertEqual(q, 2)
for employee in employees:
self.assertEquals(employee.boss, bill)
self.assertEqual(employee.boss, bill)
self.assertEqual(q, 2)
self.assertEquals(employee.friends, friends)
self.assertEqual(employee.friends, friends)
self.assertEqual(q, 2)
def test_circular_reference(self):
@@ -160,7 +294,7 @@ class FieldTest(unittest.TestCase):
daughter.relations.append(self_rel)
daughter.save()
self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
def test_circular_reference_on_self(self):
"""Ensure you can handle circular references
@@ -186,7 +320,7 @@ class FieldTest(unittest.TestCase):
daughter.relations.append(daughter)
daughter.save()
self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
def test_circular_tree_reference(self):
"""Ensure you can handle circular references with more than one level
@@ -228,7 +362,7 @@ class FieldTest(unittest.TestCase):
anna.other.name = "Anna's friends"
anna.save()
self.assertEquals(
self.assertEqual(
"[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]",
"%s" % Person.objects()
)
@@ -781,8 +915,8 @@ class FieldTest(unittest.TestCase):
root.save()
root = root.reload()
self.assertEquals(root.children, [company])
self.assertEquals(company.parents, [root])
self.assertEqual(root.children, [company])
self.assertEqual(company.parents, [root])
def test_dict_in_dbref_instance(self):
@@ -808,9 +942,9 @@ class FieldTest(unittest.TestCase):
room_101.save()
room = Room.objects.first().select_related()
self.assertEquals(room.staffs_with_position[0]['staff'], sarah)
self.assertEquals(room.staffs_with_position[1]['staff'], bob)
self.assertEqual(room.staffs_with_position[0]['staff'], sarah)
self.assertEqual(room.staffs_with_position[1]['staff'], bob)
def test_document_reload_no_inheritance(self):
class Foo(Document):
meta = {'allow_inheritance': False}
@@ -839,5 +973,27 @@ class FieldTest(unittest.TestCase):
foo.save()
foo.reload()
self.assertEquals(type(foo.bar), Bar)
self.assertEquals(type(foo.baz), Baz)
self.assertEqual(type(foo.bar), Bar)
self.assertEqual(type(foo.baz), Baz)
def test_list_lookup_not_checked_in_map(self):
"""Ensure we dereference list data correctly
"""
class Comment(Document):
id = IntField(primary_key=True)
text = StringField()
class Message(Document):
id = IntField(primary_key=True)
comments = ListField(ReferenceField(Comment))
Comment.drop_collection()
Message.drop_collection()
c1 = Comment(id=0, text='zero').save()
c2 = Comment(id=1, text='one').save()
Message(id=1, comments=[c1, c2]).save()
msg = Message.objects.get(id=1)
self.assertEqual(0, msg.comments[0].id)
self.assertEqual(1, msg.comments[1].id)

View File

@@ -1,24 +1,34 @@
# -*- coding: utf-8 -*-
from __future__ import with_statement
import unittest
from nose.plugins.skip import SkipTest
from mongoengine.python_support import PY3
from mongoengine import *
from mongoengine.django.shortcuts import get_document_or_404
from django.http import Http404
from django.template import Context, Template
from django.conf import settings
from django.core.paginator import Paginator
try:
from mongoengine.django.shortcuts import get_document_or_404
settings.configure()
from django.http import Http404
from django.template import Context, Template
from django.conf import settings
from django.core.paginator import Paginator
from django.contrib.sessions.tests import SessionTestsMixin
from mongoengine.django.sessions import SessionStore, MongoSession
settings.configure()
from django.contrib.sessions.tests import SessionTestsMixin
from mongoengine.django.sessions import SessionStore, MongoSession
except Exception, err:
if PY3:
SessionTestsMixin = type # dummy value so no error
SessionStore = None # dummy value so no error
else:
raise err
class QuerySetTest(unittest.TestCase):
def setUp(self):
if PY3:
raise SkipTest('django does not have Python 3 support')
connect(db='mongoenginetest')
class Person(Document):
@@ -99,6 +109,8 @@ class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
backend = SessionStore
def setUp(self):
if PY3:
raise SkipTest('django does not have Python 3 support')
connect(db='mongoenginetest')
MongoSession.drop_collection()
super(MongoDBSessionTest, self).setUp()

File diff suppressed because it is too large Load Diff

View File

@@ -25,14 +25,14 @@ class DynamicDocTest(unittest.TestCase):
p.name = "James"
p.age = 34
self.assertEquals(p.to_mongo(),
self.assertEqual(p.to_mongo(),
{"_types": ["Person"], "_cls": "Person",
"name": "James", "age": 34}
)
p.save()
self.assertEquals(self.Person.objects.first().age, 34)
self.assertEqual(self.Person.objects.first().age, 34)
# Confirm no changes to self.Person
self.assertFalse(hasattr(self.Person, 'age'))
@@ -40,11 +40,11 @@ class DynamicDocTest(unittest.TestCase):
def test_dynamic_document_delta(self):
"""Ensures simple dynamic documents can delta correctly"""
p = self.Person(name="James", age=34)
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {}))
self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {}))
p.doc = 123
del(p.doc)
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1}))
self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1}))
def test_change_scope_of_variable(self):
"""Test changing the scope of a dynamic field has no adverse effects"""
@@ -58,7 +58,7 @@ class DynamicDocTest(unittest.TestCase):
p.save()
p = self.Person.objects.get()
self.assertEquals(p.misc, {'hello': 'world'})
self.assertEqual(p.misc, {'hello': 'world'})
def test_delete_dynamic_field(self):
"""Test deleting a dynamic field works"""
@@ -73,10 +73,10 @@ class DynamicDocTest(unittest.TestCase):
p.save()
p = self.Person.objects.get()
self.assertEquals(p.misc, {'hello': 'world'})
self.assertEqual(p.misc, {'hello': 'world'})
collection = self.db[self.Person._get_collection_name()]
obj = collection.find_one()
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name'])
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name'])
del(p.misc)
p.save()
@@ -85,7 +85,7 @@ class DynamicDocTest(unittest.TestCase):
self.assertFalse(hasattr(p, 'misc'))
obj = collection.find_one()
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name'])
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'name'])
def test_dynamic_document_queries(self):
"""Ensure we can query dynamic fields"""
@@ -94,10 +94,10 @@ class DynamicDocTest(unittest.TestCase):
p.age = 22
p.save()
self.assertEquals(1, self.Person.objects(age=22).count())
self.assertEqual(1, self.Person.objects(age=22).count())
p = self.Person.objects(age=22)
p = p.get()
self.assertEquals(22, p.age)
self.assertEqual(22, p.age)
def test_complex_dynamic_document_queries(self):
class Person(DynamicDocument):
@@ -117,8 +117,8 @@ class DynamicDocTest(unittest.TestCase):
p2.age = 10
p2.save()
self.assertEquals(Person.objects(age__icontains='ten').count(), 2)
self.assertEquals(Person.objects(age__gte=10).count(), 1)
self.assertEqual(Person.objects(age__icontains='ten').count(), 2)
self.assertEqual(Person.objects(age__gte=10).count(), 1)
def test_complex_data_lookups(self):
"""Ensure you can query dynamic document dynamic fields"""
@@ -126,7 +126,7 @@ class DynamicDocTest(unittest.TestCase):
p.misc = {'hello': 'world'}
p.save()
self.assertEquals(1, self.Person.objects(misc__hello='world').count())
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
def test_inheritance(self):
"""Ensure that dynamic document plays nice with inheritance"""
@@ -146,8 +146,8 @@ class DynamicDocTest(unittest.TestCase):
joe_bloggs.age = 20
joe_bloggs.save()
self.assertEquals(1, self.Person.objects(age=20).count())
self.assertEquals(1, Employee.objects(age=20).count())
self.assertEqual(1, self.Person.objects(age=20).count())
self.assertEqual(1, Employee.objects(age=20).count())
joe_bloggs = self.Person.objects.first()
self.assertTrue(isinstance(joe_bloggs, Employee))
@@ -170,7 +170,7 @@ class DynamicDocTest(unittest.TestCase):
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field = embedded_1
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
"embedded_field": {
"_types": ['Embedded'], "_cls": "Embedded",
"string_field": "hello",
@@ -182,11 +182,11 @@ class DynamicDocTest(unittest.TestCase):
doc.save()
doc = Doc.objects.first()
self.assertEquals(doc.embedded_field.__class__, Embedded)
self.assertEquals(doc.embedded_field.string_field, "hello")
self.assertEquals(doc.embedded_field.int_field, 1)
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}])
self.assertEqual(doc.embedded_field.__class__, Embedded)
self.assertEqual(doc.embedded_field.string_field, "hello")
self.assertEqual(doc.embedded_field.int_field, 1)
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
self.assertEqual(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}])
def test_complex_embedded_documents(self):
"""Test complex dynamic embedded documents setups"""
@@ -213,7 +213,7 @@ class DynamicDocTest(unittest.TestCase):
embedded_1.list_field = ['1', 2, embedded_2]
doc.embedded_field = embedded_1
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
"embedded_field": {
"_types": ['Embedded'], "_cls": "Embedded",
"string_field": "hello",
@@ -230,20 +230,20 @@ class DynamicDocTest(unittest.TestCase):
})
doc.save()
doc = Doc.objects.first()
self.assertEquals(doc.embedded_field.__class__, Embedded)
self.assertEquals(doc.embedded_field.string_field, "hello")
self.assertEquals(doc.embedded_field.int_field, 1)
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
self.assertEquals(doc.embedded_field.list_field[0], '1')
self.assertEquals(doc.embedded_field.list_field[1], 2)
self.assertEqual(doc.embedded_field.__class__, Embedded)
self.assertEqual(doc.embedded_field.string_field, "hello")
self.assertEqual(doc.embedded_field.int_field, 1)
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
self.assertEqual(doc.embedded_field.list_field[0], '1')
self.assertEqual(doc.embedded_field.list_field[1], 2)
embedded_field = doc.embedded_field.list_field[2]
self.assertEquals(embedded_field.__class__, Embedded)
self.assertEquals(embedded_field.string_field, "hello")
self.assertEquals(embedded_field.int_field, 1)
self.assertEquals(embedded_field.dict_field, {'hello': 'world'})
self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}])
self.assertEqual(embedded_field.__class__, Embedded)
self.assertEqual(embedded_field.string_field, "hello")
self.assertEqual(embedded_field.int_field, 1)
self.assertEqual(embedded_field.dict_field, {'hello': 'world'})
self.assertEqual(embedded_field.list_field, ['1', 2, {'hello': 'world'}])
def test_delta_for_dynamic_documents(self):
p = self.Person()
@@ -252,18 +252,18 @@ class DynamicDocTest(unittest.TestCase):
p.save()
p.age = 24
self.assertEquals(p.age, 24)
self.assertEquals(p._get_changed_fields(), ['age'])
self.assertEquals(p._delta(), ({'age': 24}, {}))
self.assertEqual(p.age, 24)
self.assertEqual(p._get_changed_fields(), ['age'])
self.assertEqual(p._delta(), ({'age': 24}, {}))
p = self.Person.objects(age=22).get()
p.age = 24
self.assertEquals(p.age, 24)
self.assertEquals(p._get_changed_fields(), ['age'])
self.assertEquals(p._delta(), ({'age': 24}, {}))
self.assertEqual(p.age, 24)
self.assertEqual(p._get_changed_fields(), ['age'])
self.assertEqual(p._delta(), ({'age': 24}, {}))
p.save()
self.assertEquals(1, self.Person.objects(age=24).count())
self.assertEqual(1, self.Person.objects(age=24).count())
def test_delta(self):
@@ -275,40 +275,40 @@ class DynamicDocTest(unittest.TestCase):
doc.save()
doc = Doc.objects.first()
self.assertEquals(doc._get_changed_fields(), [])
self.assertEquals(doc._delta(), ({}, {}))
self.assertEqual(doc._get_changed_fields(), [])
self.assertEqual(doc._delta(), ({}, {}))
doc.string_field = 'hello'
self.assertEquals(doc._get_changed_fields(), ['string_field'])
self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {}))
self.assertEqual(doc._get_changed_fields(), ['string_field'])
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
doc._changed_fields = []
doc.int_field = 1
self.assertEquals(doc._get_changed_fields(), ['int_field'])
self.assertEquals(doc._delta(), ({'int_field': 1}, {}))
self.assertEqual(doc._get_changed_fields(), ['int_field'])
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
doc._changed_fields = []
dict_value = {'hello': 'world', 'ping': 'pong'}
doc.dict_field = dict_value
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {}))
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
doc._changed_fields = []
list_value = ['1', 2, {'hello': 'world'}]
doc.list_field = list_value
self.assertEquals(doc._get_changed_fields(), ['list_field'])
self.assertEquals(doc._delta(), ({'list_field': list_value}, {}))
self.assertEqual(doc._get_changed_fields(), ['list_field'])
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
# Test unsetting
doc._changed_fields = []
doc.dict_field = {}
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
self.assertEquals(doc._delta(), ({}, {'dict_field': 1}))
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
doc._changed_fields = []
doc.list_field = []
self.assertEquals(doc._get_changed_fields(), ['list_field'])
self.assertEquals(doc._delta(), ({}, {'list_field': 1}))
self.assertEqual(doc._get_changed_fields(), ['list_field'])
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
def test_delta_recursive(self):
"""Testing deltaing works with dynamic documents"""
@@ -323,8 +323,8 @@ class DynamicDocTest(unittest.TestCase):
doc.save()
doc = Doc.objects.first()
self.assertEquals(doc._get_changed_fields(), [])
self.assertEquals(doc._delta(), ({}, {}))
self.assertEqual(doc._get_changed_fields(), [])
self.assertEqual(doc._delta(), ({}, {}))
embedded_1 = Embedded()
embedded_1.string_field = 'hello'
@@ -333,7 +333,7 @@ class DynamicDocTest(unittest.TestCase):
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field = embedded_1
self.assertEquals(doc._get_changed_fields(), ['embedded_field'])
self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
embedded_delta = {
'string_field': 'hello',
@@ -341,28 +341,28 @@ class DynamicDocTest(unittest.TestCase):
'dict_field': {'hello': 'world'},
'list_field': ['1', 2, {'hello': 'world'}]
}
self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {}))
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
embedded_delta.update({
'_types': ['Embedded'],
'_cls': 'Embedded',
})
self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {}))
self.assertEqual(doc._delta(), ({'embedded_field': embedded_delta}, {}))
doc.save()
doc.reload()
doc.embedded_field.dict_field = {}
self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field'])
self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
self.assertEqual(doc._get_changed_fields(), ['embedded_field.dict_field'])
self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
doc.save()
doc.reload()
doc.embedded_field.list_field = []
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1}))
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1}))
self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1}))
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1}))
doc.save()
doc.reload()
@@ -373,8 +373,8 @@ class DynamicDocTest(unittest.TestCase):
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field.list_field = ['1', 2, embedded_2]
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEquals(doc.embedded_field._delta(), ({
self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEqual(doc.embedded_field._delta(), ({
'list_field': ['1', 2, {
'_cls': 'Embedded',
'_types': ['Embedded'],
@@ -385,7 +385,7 @@ class DynamicDocTest(unittest.TestCase):
}]
}, {}))
self.assertEquals(doc._delta(), ({
self.assertEqual(doc._delta(), ({
'embedded_field.list_field': ['1', 2, {
'_cls': 'Embedded',
'_types': ['Embedded'],
@@ -398,25 +398,25 @@ class DynamicDocTest(unittest.TestCase):
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, [])
self.assertEquals(doc.embedded_field.list_field[0], '1')
self.assertEquals(doc.embedded_field.list_field[1], 2)
self.assertEqual(doc.embedded_field.list_field[2]._changed_fields, [])
self.assertEqual(doc.embedded_field.list_field[0], '1')
self.assertEqual(doc.embedded_field.list_field[1], 2)
for k in doc.embedded_field.list_field[2]._fields:
self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k])
self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k])
doc.embedded_field.list_field[2].string_field = 'world'
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field'])
self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field'])
self.assertEqual(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world')
self.assertEqual(doc.embedded_field.list_field[2].string_field, 'world')
# Test multiple assignments
doc.embedded_field.list_field[2].string_field = 'hello world'
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEquals(doc.embedded_field._delta(), ({
self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEqual(doc.embedded_field._delta(), ({
'list_field': ['1', 2, {
'_types': ['Embedded'],
'_cls': 'Embedded',
@@ -424,7 +424,7 @@ class DynamicDocTest(unittest.TestCase):
'int_field': 1,
'list_field': ['1', 2, {'hello': 'world'}],
'dict_field': {'hello': 'world'}}]}, {}))
self.assertEquals(doc._delta(), ({
self.assertEqual(doc._delta(), ({
'embedded_field.list_field': ['1', 2, {
'_types': ['Embedded'],
'_cls': 'Embedded',
@@ -435,32 +435,32 @@ class DynamicDocTest(unittest.TestCase):
]}, {}))
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world')
self.assertEqual(doc.embedded_field.list_field[2].string_field, 'hello world')
# Test list native methods
doc.embedded_field.list_field[2].list_field.pop(0)
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
doc.save()
doc.reload()
doc.embedded_field.list_field[2].list_field.append(1)
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
self.assertEqual(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
doc.embedded_field.list_field[2].list_field.sort()
doc.embedded_field.list_field[2].list_field.sort(key=str)# use str as a key to allow comparing uncomperable types
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
self.assertEqual(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
doc.save()
doc.reload()
del(doc.embedded_field.list_field[2].list_field)
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
doc.save()
doc.reload()
@@ -470,8 +470,8 @@ class DynamicDocTest(unittest.TestCase):
doc.reload()
doc.dict_field['embedded'].string_field = 'Hello World'
self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field'])
self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {}))
self.assertEqual(doc._get_changed_fields(), ['dict_field.embedded.string_field'])
self.assertEqual(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {}))
def test_indexes(self):
"""Ensure that indexes are used when meta[indexes] is specified.
@@ -500,3 +500,34 @@ class DynamicDocTest(unittest.TestCase):
self.assertTrue([('_types', 1), ('category', 1), ('date', -1)]
in info)
self.assertTrue([('_types', 1), ('date', -1)] in info)
def test_dynamic_and_embedded(self):
"""Ensure embedded documents play nicely"""
class Address(EmbeddedDocument):
city = StringField()
class Person(DynamicDocument):
name = StringField()
meta = {'allow_inheritance': True}
Person.drop_collection()
Person(name="Ross", address=Address(city="London")).save()
person = Person.objects.first()
person.address.city = "Lundenne"
person.save()
self.assertEqual(Person.objects.first().address.city, "Lundenne")
person = Person.objects.first()
person.address = Address(city="Londinium")
person.save()
self.assertEqual(Person.objects.first().address.city, "Londinium")
person = Person.objects.first()
person.age = 35
person.save()
self.assertEqual(Person.objects.first().age, 35)

View File

@@ -1,20 +1,24 @@
# -*- coding: utf-8 -*-
from __future__ import with_statement
import datetime
import os
import unittest
import uuid
import StringIO
import tempfile
import gridfs
from decimal import Decimal
from bson import Binary, DBRef, ObjectId
import gridfs
from nose.plugins.skip import SkipTest
from mongoengine import *
from mongoengine.connection import get_db
from mongoengine.base import _document_registry, NotRegistered
from mongoengine.python_support import PY3, b, StringIO, bin_type
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
class FieldTest(unittest.TestCase):
def setUp(self):
@@ -123,10 +127,23 @@ class FieldTest(unittest.TestCase):
self.assertEqual(ret.int_fld, None)
self.assertEqual(ret.flt_fld, None)
# Return current time if retrived value is None.
self.assert_(isinstance(ret.comp_dt_fld, datetime.datetime))
self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime))
self.assertRaises(ValidationError, ret.validate)
def test_int_and_float_ne_operator(self):
class TestDocument(Document):
int_fld = IntField()
float_fld = FloatField()
TestDocument.drop_collection()
TestDocument(int_fld=None, float_fld=None).save()
TestDocument(int_fld=1, float_fld=1).save()
self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count())
self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count())
def test_object_id_validation(self):
"""Ensure that invalid values cannot be assigned to string fields.
"""
@@ -258,25 +275,56 @@ class FieldTest(unittest.TestCase):
person.admin = 'Yes'
self.assertRaises(ValidationError, person.validate)
def test_uuid_validation(self):
"""Ensure that invalid values cannot be assigned to UUID fields.
def test_uuid_field_string(self):
"""Test UUID fields storing as String
"""
class Person(Document):
api_key = UUIDField()
api_key = UUIDField(binary=False)
Person.drop_collection()
uu = uuid.uuid4()
Person(api_key=uu).save()
self.assertEqual(1, Person.objects(api_key=uu).count())
self.assertEqual(uu, Person.objects.first().api_key)
person = Person()
# any uuid type is valid
person.api_key = uuid.uuid4()
person.validate()
person.api_key = uuid.uuid1()
person.validate()
valid = (uuid.uuid4(), uuid.uuid1())
for api_key in valid:
person.api_key = api_key
person.validate()
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
'9d159858-549b-4975-9f98-dd2f987c113')
for api_key in invalid:
person.api_key = api_key
self.assertRaises(ValidationError, person.validate)
def test_uuid_field_binary(self):
"""Test UUID fields storing as Binary object
"""
class Person(Document):
api_key = UUIDField(binary=True)
Person.drop_collection()
uu = uuid.uuid4()
Person(api_key=uu).save()
self.assertEqual(1, Person.objects(api_key=uu).count())
self.assertEqual(uu, Person.objects.first().api_key)
person = Person()
valid = (uuid.uuid4(), uuid.uuid1())
for api_key in valid:
person.api_key = api_key
person.validate()
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
'9d159858-549b-4975-9f98-dd2f987c113')
for api_key in invalid:
person.api_key = api_key
self.assertRaises(ValidationError, person.validate)
# last g cannot belong to an hex number
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113g'
self.assertRaises(ValidationError, person.validate)
# short strings don't validate
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113'
self.assertRaises(ValidationError, person.validate)
def test_datetime_validation(self):
"""Ensure that invalid values cannot be assigned to datetime fields.
@@ -313,7 +361,7 @@ class FieldTest(unittest.TestCase):
log.date = datetime.date.today()
log.save()
log.reload()
self.assertEquals(log.date.date(), datetime.date.today())
self.assertEqual(log.date.date(), datetime.date.today())
LogEntry.drop_collection()
@@ -324,8 +372,8 @@ class FieldTest(unittest.TestCase):
log.date = d1
log.save()
log.reload()
self.assertNotEquals(log.date, d1)
self.assertEquals(log.date, d2)
self.assertNotEqual(log.date, d1)
self.assertEqual(log.date, d2)
# Post UTC - microseconds are rounded (down) nearest millisecond
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999)
@@ -333,35 +381,19 @@ class FieldTest(unittest.TestCase):
log.date = d1
log.save()
log.reload()
self.assertNotEquals(log.date, d1)
self.assertEquals(log.date, d2)
self.assertNotEqual(log.date, d1)
self.assertEqual(log.date, d2)
# Pre UTC dates microseconds below 1000 are dropped
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
d2 = datetime.datetime(1969, 12, 31, 23, 59, 59)
log.date = d1
log.save()
log.reload()
self.assertNotEquals(log.date, d1)
self.assertEquals(log.date, d2)
# Pre UTC microseconds above 1000 is wonky.
# log.date has an invalid microsecond value so I can't construct
# a date to compare.
#
# However, the timedelta is predicable with pre UTC timestamps
# It always adds 16 seconds and [777216-776217] microseconds
for i in xrange(1001, 3113, 33):
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i)
if not PY3:
# Pre UTC dates microseconds below 1000 are dropped
# This does not seem to be true in PY3
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
d2 = datetime.datetime(1969, 12, 31, 23, 59, 59)
log.date = d1
log.save()
log.reload()
self.assertNotEquals(log.date, d1)
delta = log.date - d1
self.assertEquals(delta.seconds, 16)
microseconds = 777216 - (i % 1000)
self.assertEquals(delta.microseconds, microseconds)
self.assertNotEqual(log.date, d1)
self.assertEqual(log.date, d2)
LogEntry.drop_collection()
@@ -380,21 +412,21 @@ class FieldTest(unittest.TestCase):
log.date = d1
log.save()
log.reload()
self.assertEquals(log.date, d1)
self.assertEqual(log.date, d1)
# Post UTC - microseconds are rounded (down) nearest millisecond - with default datetimefields
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999)
log.date = d1
log.save()
log.reload()
self.assertEquals(log.date, d1)
self.assertEqual(log.date, d1)
# Pre UTC dates microseconds below 1000 are dropped - with default datetimefields
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
log.date = d1
log.save()
log.reload()
self.assertEquals(log.date, d1)
self.assertEqual(log.date, d1)
# Pre UTC microseconds above 1000 is wonky - with default datetimefields
# log.date has an invalid microsecond value so I can't construct
@@ -404,7 +436,7 @@ class FieldTest(unittest.TestCase):
log.date = d1
log.save()
log.reload()
self.assertEquals(log.date, d1)
self.assertEqual(log.date, d1)
log1 = LogEntry.objects.get(date=d1)
self.assertEqual(log, log1)
@@ -425,7 +457,7 @@ class FieldTest(unittest.TestCase):
log.save()
log1 = LogEntry.objects.get(date=d1)
self.assertEquals(log, log1)
self.assertEqual(log, log1)
LogEntry.drop_collection()
@@ -613,13 +645,13 @@ class FieldTest(unittest.TestCase):
post.info = [{'test': 3}]
post.save()
self.assertEquals(BlogPost.objects.count(), 3)
self.assertEquals(BlogPost.objects.filter(info__exact='test').count(), 1)
self.assertEquals(BlogPost.objects.filter(info__0__test='test').count(), 1)
self.assertEqual(BlogPost.objects.count(), 3)
self.assertEqual(BlogPost.objects.filter(info__exact='test').count(), 1)
self.assertEqual(BlogPost.objects.filter(info__0__test='test').count(), 1)
# Confirm handles non strings or non existing keys
self.assertEquals(BlogPost.objects.filter(info__0__test__exact='5').count(), 0)
self.assertEquals(BlogPost.objects.filter(info__100__test__exact='test').count(), 0)
self.assertEqual(BlogPost.objects.filter(info__0__test__exact='5').count(), 0)
self.assertEqual(BlogPost.objects.filter(info__100__test__exact='test').count(), 0)
BlogPost.drop_collection()
def test_list_field_passed_in_value(self):
@@ -634,7 +666,7 @@ class FieldTest(unittest.TestCase):
foo = Foo(bars=[])
foo.bars.append(bar)
self.assertEquals(repr(foo.bars), '[<Bar: Bar object>]')
self.assertEqual(repr(foo.bars), '[<Bar: Bar object>]')
def test_list_field_strict(self):
@@ -719,20 +751,20 @@ class FieldTest(unittest.TestCase):
self.assertTrue(isinstance(e2.mapping[1], IntegerSetting))
# Test querying
self.assertEquals(Simple.objects.filter(mapping__1__value=42).count(), 1)
self.assertEquals(Simple.objects.filter(mapping__2__number=1).count(), 1)
self.assertEquals(Simple.objects.filter(mapping__2__complex__value=42).count(), 1)
self.assertEquals(Simple.objects.filter(mapping__2__list__0__value=42).count(), 1)
self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 1)
self.assertEqual(Simple.objects.filter(mapping__1__value=42).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__2__number=1).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__2__complex__value=42).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__2__list__0__value=42).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 1)
# Confirm can update
Simple.objects().update(set__mapping__1=IntegerSetting(value=10))
self.assertEquals(Simple.objects.filter(mapping__1__value=10).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__1__value=10).count(), 1)
Simple.objects().update(
set__mapping__2__list__1=StringSetting(value='Boo'))
self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 0)
self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1)
self.assertEqual(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 0)
self.assertEqual(Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1)
Simple.drop_collection()
@@ -771,19 +803,19 @@ class FieldTest(unittest.TestCase):
post.info = {'details': {'test': 3}}
post.save()
self.assertEquals(BlogPost.objects.count(), 3)
self.assertEquals(BlogPost.objects.filter(info__title__exact='test').count(), 1)
self.assertEquals(BlogPost.objects.filter(info__details__test__exact='test').count(), 1)
self.assertEqual(BlogPost.objects.count(), 3)
self.assertEqual(BlogPost.objects.filter(info__title__exact='test').count(), 1)
self.assertEqual(BlogPost.objects.filter(info__details__test__exact='test').count(), 1)
# Confirm handles non strings or non existing keys
self.assertEquals(BlogPost.objects.filter(info__details__test__exact=5).count(), 0)
self.assertEquals(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0)
self.assertEqual(BlogPost.objects.filter(info__details__test__exact=5).count(), 0)
self.assertEqual(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0)
post = BlogPost.objects.create(info={'title': 'original'})
post.info.update({'title': 'updated'})
post.save()
post.reload()
self.assertEquals('updated', post.info['title'])
self.assertEqual('updated', post.info['title'])
BlogPost.drop_collection()
@@ -836,19 +868,19 @@ class FieldTest(unittest.TestCase):
self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting))
# Test querying
self.assertEquals(Simple.objects.filter(mapping__someint__value=42).count(), 1)
self.assertEquals(Simple.objects.filter(mapping__nested_dict__number=1).count(), 1)
self.assertEquals(Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1)
self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1)
self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1)
self.assertEqual(Simple.objects.filter(mapping__someint__value=42).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__nested_dict__number=1).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1)
self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1)
# Confirm can update
Simple.objects().update(
set__mapping={"someint": IntegerSetting(value=10)})
Simple.objects().update(
set__mapping__nested_dict__list__1=StringSetting(value='Boo'))
self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0)
self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1)
self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0)
self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1)
Simple.drop_collection()
@@ -933,6 +965,19 @@ class FieldTest(unittest.TestCase):
doc = self.db.test.find_one()
self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2)
def test_map_field_lookup(self):
"""Ensure MapField lookups succeed on Fields without a lookup method"""
class Log(Document):
name = StringField()
visited = MapField(DateTimeField())
Log.drop_collection()
Log(name="wilson", visited={'friends': datetime.datetime.now()}).save()
self.assertEqual(1, Log.objects(
visited__friends__exists=True).count())
def test_embedded_db_field(self):
class Embedded(EmbeddedDocument):
@@ -1042,6 +1087,52 @@ class FieldTest(unittest.TestCase):
User.drop_collection()
BlogPost.drop_collection()
def test_dbref_reference_fields(self):
class Person(Document):
name = StringField()
parent = ReferenceField('self', dbref=True)
Person.drop_collection()
p1 = Person(name="John").save()
Person(name="Ross", parent=p1).save()
col = Person._get_collection()
data = col.find_one({'name': 'Ross'})
self.assertEqual(data['parent'], DBRef('person', p1.pk))
p = Person.objects.get(name="Ross")
self.assertEqual(p.parent, p1)
def test_dbref_to_mongo(self):
class Person(Document):
name = StringField()
parent = ReferenceField('self', dbref=False)
p1 = Person._from_son({'name': "Yakxxx",
'parent': "50a234ea469ac1eda42d347d"})
mongoed = p1.to_mongo()
self.assertTrue(isinstance(mongoed['parent'], ObjectId))
def test_objectid_reference_fields(self):
class Person(Document):
name = StringField()
parent = ReferenceField('self', dbref=False)
Person.drop_collection()
p1 = Person(name="John").save()
Person(name="Ross", parent=p1).save()
col = Person._get_collection()
data = col.find_one({'name': 'Ross'})
self.assertEqual(data['parent'], p1.pk)
p = Person.objects.get(name="Ross")
self.assertEqual(p.parent, p1)
def test_list_item_dereference(self):
"""Ensure that DBRef items in ListFields are dereferenced.
"""
@@ -1078,6 +1169,7 @@ class FieldTest(unittest.TestCase):
boss = ReferenceField('self')
friends = ListField(ReferenceField('self'))
Employee.drop_collection()
bill = Employee(name='Bill Lumbergh')
bill.save()
@@ -1201,7 +1293,41 @@ class FieldTest(unittest.TestCase):
class BlogPost(Document):
title = StringField()
author = ReferenceField(Member)
author = ReferenceField(Member, dbref=False)
Member.drop_collection()
BlogPost.drop_collection()
m1 = Member(user_num=1)
m1.save()
m2 = Member(user_num=2)
m2.save()
post1 = BlogPost(title='post 1', author=m1)
post1.save()
post2 = BlogPost(title='post 2', author=m2)
post2.save()
post = BlogPost.objects(author=m1).first()
self.assertEqual(post.id, post1.id)
post = BlogPost.objects(author=m2).first()
self.assertEqual(post.id, post2.id)
Member.drop_collection()
BlogPost.drop_collection()
def test_reference_query_conversion_dbref(self):
"""Ensure that ReferenceFields can be queried using objects and values
of the type of the primary key of the referenced object.
"""
class Member(Document):
user_num = IntField(primary_key=True)
class BlogPost(Document):
title = StringField()
author = ReferenceField(Member, dbref=True)
Member.drop_collection()
BlogPost.drop_collection()
@@ -1345,7 +1471,7 @@ class FieldTest(unittest.TestCase):
Person.drop_collection()
Person(name="Wilson Jr").save()
self.assertEquals(repr(Person.objects(city=None)),
self.assertEqual(repr(Person.objects(city=None)),
"[<Person: Person object>]")
@@ -1423,7 +1549,7 @@ class FieldTest(unittest.TestCase):
content_type = StringField()
blob = BinaryField()
BLOB = '\xe6\x00\xc4\xff\x07'
BLOB = b('\xe6\x00\xc4\xff\x07')
MIME_TYPE = 'application/octet-stream'
Attachment.drop_collection()
@@ -1433,7 +1559,7 @@ class FieldTest(unittest.TestCase):
attachment_1 = Attachment.objects().first()
self.assertEqual(MIME_TYPE, attachment_1.content_type)
self.assertEqual(BLOB, attachment_1.blob)
self.assertEqual(BLOB, bin_type(attachment_1.blob))
Attachment.drop_collection()
@@ -1460,18 +1586,30 @@ class FieldTest(unittest.TestCase):
attachment_required = AttachmentRequired()
self.assertRaises(ValidationError, attachment_required.validate)
attachment_required.blob = '\xe6\x00\xc4\xff\x07'
attachment_required.blob = Binary(b('\xe6\x00\xc4\xff\x07'))
attachment_required.validate()
attachment_size_limit = AttachmentSizeLimit(blob='\xe6\x00\xc4\xff\x07')
attachment_size_limit = AttachmentSizeLimit(blob=b('\xe6\x00\xc4\xff\x07'))
self.assertRaises(ValidationError, attachment_size_limit.validate)
attachment_size_limit.blob = '\xe6\x00\xc4\xff'
attachment_size_limit.blob = b('\xe6\x00\xc4\xff')
attachment_size_limit.validate()
Attachment.drop_collection()
AttachmentRequired.drop_collection()
AttachmentSizeLimit.drop_collection()
def test_binary_field_primary(self):
class Attachment(Document):
id = BinaryField(primary_key=True)
Attachment.drop_collection()
att = Attachment(id=uuid.uuid4().bytes).save()
att.delete()
self.assertEqual(0, Attachment.objects.count())
def test_choices_validation(self):
"""Ensure that value is in a container of allowed values.
"""
@@ -1572,16 +1710,16 @@ class FieldTest(unittest.TestCase):
"""Ensure that file fields can be written to and their data retrieved
"""
class PutFile(Document):
file = FileField()
the_file = FileField()
class StreamFile(Document):
file = FileField()
the_file = FileField()
class SetFile(Document):
file = FileField()
the_file = FileField()
text = 'Hello, World!'
more_text = 'Foo Bar'
text = b('Hello, World!')
more_text = b('Foo Bar')
content_type = 'text/plain'
PutFile.drop_collection()
@@ -1589,68 +1727,68 @@ class FieldTest(unittest.TestCase):
SetFile.drop_collection()
putfile = PutFile()
putfile.file.put(text, content_type=content_type)
putfile.the_file.put(text, content_type=content_type)
putfile.save()
putfile.validate()
result = PutFile.objects.first()
self.assertTrue(putfile == result)
self.assertEquals(result.file.read(), text)
self.assertEquals(result.file.content_type, content_type)
result.file.delete() # Remove file from GridFS
self.assertEqual(result.the_file.read(), text)
self.assertEqual(result.the_file.content_type, content_type)
result.the_file.delete() # Remove file from GridFS
PutFile.objects.delete()
# Ensure file-like objects are stored
putfile = PutFile()
putstring = StringIO.StringIO()
putstring = StringIO()
putstring.write(text)
putstring.seek(0)
putfile.file.put(putstring, content_type=content_type)
putfile.the_file.put(putstring, content_type=content_type)
putfile.save()
putfile.validate()
result = PutFile.objects.first()
self.assertTrue(putfile == result)
self.assertEquals(result.file.read(), text)
self.assertEquals(result.file.content_type, content_type)
result.file.delete()
self.assertEqual(result.the_file.read(), text)
self.assertEqual(result.the_file.content_type, content_type)
result.the_file.delete()
streamfile = StreamFile()
streamfile.file.new_file(content_type=content_type)
streamfile.file.write(text)
streamfile.file.write(more_text)
streamfile.file.close()
streamfile.the_file.new_file(content_type=content_type)
streamfile.the_file.write(text)
streamfile.the_file.write(more_text)
streamfile.the_file.close()
streamfile.save()
streamfile.validate()
result = StreamFile.objects.first()
self.assertTrue(streamfile == result)
self.assertEquals(result.file.read(), text + more_text)
self.assertEquals(result.file.content_type, content_type)
result.file.seek(0)
self.assertEquals(result.file.tell(), 0)
self.assertEquals(result.file.read(len(text)), text)
self.assertEquals(result.file.tell(), len(text))
self.assertEquals(result.file.read(len(more_text)), more_text)
self.assertEquals(result.file.tell(), len(text + more_text))
result.file.delete()
self.assertEqual(result.the_file.read(), text + more_text)
self.assertEqual(result.the_file.content_type, content_type)
result.the_file.seek(0)
self.assertEqual(result.the_file.tell(), 0)
self.assertEqual(result.the_file.read(len(text)), text)
self.assertEqual(result.the_file.tell(), len(text))
self.assertEqual(result.the_file.read(len(more_text)), more_text)
self.assertEqual(result.the_file.tell(), len(text + more_text))
result.the_file.delete()
# Ensure deleted file returns None
self.assertTrue(result.file.read() == None)
self.assertTrue(result.the_file.read() == None)
setfile = SetFile()
setfile.file = text
setfile.the_file = text
setfile.save()
setfile.validate()
result = SetFile.objects.first()
self.assertTrue(setfile == result)
self.assertEquals(result.file.read(), text)
self.assertEqual(result.the_file.read(), text)
# Try replacing file with new one
result.file.replace(more_text)
result.the_file.replace(more_text)
result.save()
result.validate()
result = SetFile.objects.first()
self.assertTrue(setfile == result)
self.assertEquals(result.file.read(), more_text)
result.file.delete()
self.assertEqual(result.the_file.read(), more_text)
result.the_file.delete()
PutFile.drop_collection()
StreamFile.drop_collection()
@@ -1658,7 +1796,7 @@ class FieldTest(unittest.TestCase):
# Make sure FileField is optional and not required
class DemoFile(Document):
file = FileField()
the_file = FileField()
DemoFile.objects.create()
@@ -1670,7 +1808,7 @@ class FieldTest(unittest.TestCase):
GridDocument.drop_collection()
with tempfile.TemporaryFile() as f:
f.write("Hello World!")
f.write(b("Hello World!"))
f.flush()
# Test without default
@@ -1681,48 +1819,48 @@ class FieldTest(unittest.TestCase):
doc_b = GridDocument.objects.with_id(doc_a.id)
doc_b.the_file.replace(f, filename='doc_b')
doc_b.save()
self.assertNotEquals(doc_b.the_file.grid_id, None)
self.assertNotEqual(doc_b.the_file.grid_id, None)
# Test it matches
doc_c = GridDocument.objects.with_id(doc_b.id)
self.assertEquals(doc_b.the_file.grid_id, doc_c.the_file.grid_id)
self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id)
# Test with default
doc_d = GridDocument(the_file='')
doc_d = GridDocument(the_file=b(''))
doc_d.save()
doc_e = GridDocument.objects.with_id(doc_d.id)
self.assertEquals(doc_d.the_file.grid_id, doc_e.the_file.grid_id)
self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id)
doc_e.the_file.replace(f, filename='doc_e')
doc_e.save()
doc_f = GridDocument.objects.with_id(doc_e.id)
self.assertEquals(doc_e.the_file.grid_id, doc_f.the_file.grid_id)
self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id)
db = GridDocument._get_db()
grid_fs = gridfs.GridFS(db)
self.assertEquals(['doc_b', 'doc_e'], grid_fs.list())
self.assertEqual(['doc_b', 'doc_e'], grid_fs.list())
def test_file_uniqueness(self):
"""Ensure that each instance of a FileField is unique
"""
class TestFile(Document):
name = StringField()
file = FileField()
the_file = FileField()
# First instance
testfile = TestFile()
testfile.name = "Hello, World!"
testfile.file.put('Hello, World!')
testfile.save()
test_file = TestFile()
test_file.name = "Hello, World!"
test_file.the_file.put(b('Hello, World!'))
test_file.save()
# Second instance
testfiledupe = TestFile()
data = testfiledupe.file.read() # Should be None
test_file_dupe = TestFile()
data = test_file_dupe.the_file.read() # Should be None
self.assertTrue(testfile.name != testfiledupe.name)
self.assertTrue(testfile.file.read() != data)
self.assertTrue(test_file.name != test_file_dupe.name)
self.assertTrue(test_file.the_file.read() != data)
TestFile.drop_collection()
@@ -1730,18 +1868,28 @@ class FieldTest(unittest.TestCase):
"""Ensure that a boolean test of a FileField indicates its presence
"""
class TestFile(Document):
file = FileField()
the_file = FileField()
testfile = TestFile()
self.assertFalse(bool(testfile.file))
testfile.file = 'Hello, World!'
testfile.file.content_type = 'text/plain'
testfile.save()
self.assertTrue(bool(testfile.file))
test_file = TestFile()
self.assertFalse(bool(test_file.the_file))
test_file.the_file = b('Hello, World!')
test_file.the_file.content_type = 'text/plain'
test_file.save()
self.assertTrue(bool(test_file.the_file))
TestFile.drop_collection()
def test_file_cmp(self):
"""Test comparing against other types"""
class TestFile(Document):
the_file = FileField()
test_file = TestFile()
self.assertFalse(test_file.the_file in [{"test": 1}])
def test_image_field(self):
if PY3:
raise SkipTest('PIL does not have Python 3 support')
class TestImage(Document):
image = ImageField()
@@ -1754,15 +1902,17 @@ class FieldTest(unittest.TestCase):
t = TestImage.objects.first()
self.assertEquals(t.image.format, 'PNG')
self.assertEqual(t.image.format, 'PNG')
w, h = t.image.size
self.assertEquals(w, 371)
self.assertEquals(h, 76)
self.assertEqual(w, 371)
self.assertEqual(h, 76)
t.image.delete()
def test_image_field_resize(self):
if PY3:
raise SkipTest('PIL does not have Python 3 support')
class TestImage(Document):
image = ImageField(size=(185, 37))
@@ -1775,15 +1925,40 @@ class FieldTest(unittest.TestCase):
t = TestImage.objects.first()
self.assertEquals(t.image.format, 'PNG')
self.assertEqual(t.image.format, 'PNG')
w, h = t.image.size
self.assertEquals(w, 185)
self.assertEquals(h, 37)
self.assertEqual(w, 185)
self.assertEqual(h, 37)
t.image.delete()
def test_image_field_resize_force(self):
if PY3:
raise SkipTest('PIL does not have Python 3 support')
class TestImage(Document):
image = ImageField(size=(185, 37, True))
TestImage.drop_collection()
t = TestImage()
t.image.put(open(TEST_IMAGE_PATH, 'r'))
t.save()
t = TestImage.objects.first()
self.assertEqual(t.image.format, 'PNG')
w, h = t.image.size
self.assertEqual(w, 185)
self.assertEqual(h, 37)
t.image.delete()
def test_image_field_thumbnail(self):
if PY3:
raise SkipTest('PIL does not have Python 3 support')
class TestImage(Document):
image = ImageField(thumbnail_size=(92, 18))
@@ -1796,39 +1971,38 @@ class FieldTest(unittest.TestCase):
t = TestImage.objects.first()
self.assertEquals(t.image.thumbnail.format, 'PNG')
self.assertEquals(t.image.thumbnail.width, 92)
self.assertEquals(t.image.thumbnail.height, 18)
self.assertEqual(t.image.thumbnail.format, 'PNG')
self.assertEqual(t.image.thumbnail.width, 92)
self.assertEqual(t.image.thumbnail.height, 18)
t.image.delete()
def test_file_multidb(self):
register_connection('testfiles', 'testfiles')
register_connection('test_files', 'test_files')
class TestFile(Document):
name = StringField()
file = FileField(db_alias="testfiles",
collection_name="macumba")
the_file = FileField(db_alias="test_files",
collection_name="macumba")
TestFile.drop_collection()
# delete old filesystem
get_db("testfiles").macumba.files.drop()
get_db("testfiles").macumba.chunks.drop()
get_db("test_files").macumba.files.drop()
get_db("test_files").macumba.chunks.drop()
# First instance
testfile = TestFile()
testfile.name = "Hello, World!"
testfile.file.put('Hello, World!',
test_file = TestFile()
test_file.name = "Hello, World!"
test_file.the_file.put(b('Hello, World!'),
name="hello.txt")
testfile.save()
test_file.save()
data = get_db("testfiles").macumba.files.find_one()
self.assertEquals(data.get('name'), 'hello.txt')
data = get_db("test_files").macumba.files.find_one()
self.assertEqual(data.get('name'), 'hello.txt')
testfile = TestFile.objects.first()
self.assertEquals(testfile.file.read(),
'Hello, World!')
test_file = TestFile.objects.first()
self.assertEqual(test_file.the_file.read(),
b('Hello, World!'))
def test_geo_indexes(self):
"""Ensure that indexes are created automatically for GeoPointFields.
@@ -1903,6 +2077,27 @@ class FieldTest(unittest.TestCase):
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
self.assertEqual(c['next'], 10)
def test_sequence_field_sequence_name(self):
class Person(Document):
id = SequenceField(primary_key=True, sequence_name='jelly')
name = StringField()
self.db['mongoengine.counters'].drop()
Person.drop_collection()
for x in xrange(10):
p = Person(name="Person %s" % x)
p.save()
c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'})
self.assertEqual(c['next'], 10)
ids = [i.id for i in Person.objects]
self.assertEqual(ids, range(1, 11))
c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'})
self.assertEqual(c['next'], 10)
def test_multiple_sequence_fields(self):
class Person(Document):
id = SequenceField(primary_key=True)
@@ -1990,6 +2185,28 @@ class FieldTest(unittest.TestCase):
c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'})
self.assertEqual(c['next'], 10)
def test_embedded_sequence_field(self):
class Comment(EmbeddedDocument):
id = SequenceField()
content = StringField(required=True)
class Post(Document):
title = StringField(required=True)
comments = ListField(EmbeddedDocumentField(Comment))
self.db['mongoengine.counters'].drop()
Post.drop_collection()
Post(title="MongoEngine",
comments=[Comment(content="NoSQL Rocks"),
Comment(content="MongoEngine Rocks")]).save()
c = self.db['mongoengine.counters'].find_one({'_id': 'comment.id'})
self.assertEqual(c['next'], 2)
post = Post.objects.first()
self.assertEqual(1, post.comments[0].id)
self.assertEqual(2, post.comments[1].id)
def test_generic_embedded_document(self):
class Car(EmbeddedDocument):
name = StringField()
@@ -2089,31 +2306,43 @@ class FieldTest(unittest.TestCase):
post.comments.append(Comment(content='hello', author=bob))
post.comments.append(Comment(author=bob))
self.assertRaises(ValidationError, post.validate)
try:
post.validate()
except ValidationError, error:
pass
# ValidationError.errors property
self.assertTrue(hasattr(error, 'errors'))
self.assertTrue(isinstance(error.errors, dict))
self.assertTrue('comments' in error.errors)
self.assertTrue(1 in error.errors['comments'])
self.assertTrue(isinstance(error.errors['comments'][1]['content'],
ValidationError))
# ValidationError.errors property
self.assertTrue(hasattr(error, 'errors'))
self.assertTrue(isinstance(error.errors, dict))
self.assertTrue('comments' in error.errors)
self.assertTrue(1 in error.errors['comments'])
self.assertTrue(isinstance(error.errors['comments'][1]['content'],
ValidationError))
# ValidationError.schema property
error_dict = error.to_dict()
self.assertTrue(isinstance(error_dict, dict))
self.assertTrue('comments' in error_dict)
self.assertTrue(1 in error_dict['comments'])
self.assertTrue('content' in error_dict['comments'][1])
self.assertEqual(error_dict['comments'][1]['content'],
u'Field is required')
# ValidationError.schema property
error_dict = error.to_dict()
self.assertTrue(isinstance(error_dict, dict))
self.assertTrue('comments' in error_dict)
self.assertTrue(1 in error_dict['comments'])
self.assertTrue('content' in error_dict['comments'][1])
self.assertEquals(error_dict['comments'][1]['content'],
u'Field is required ("content")')
post.comments[1].content = 'here we go'
post.validate()
def test_email_field_honors_regex(self):
class User(Document):
email = EmailField(regex=r'\w+@example.com')
# Fails regex validation
user = User(email='me@foo.com')
self.assertRaises(ValidationError, user.validate)
# Passes regex validation
user = User(email='me@example.com')
self.assertTrue(user.validate() is None)
if __name__ == '__main__':
unittest.main()

View File

@@ -1,16 +1,19 @@
# -*- coding: utf-8 -*-
from __future__ import with_statement
import unittest
import pymongo
from bson import ObjectId
from datetime import datetime, timedelta
import pymongo
from bson import ObjectId
from mongoengine import *
from mongoengine.connection import get_connection
from mongoengine.python_support import PY3
from mongoengine.tests import query_counter
from mongoengine.queryset import (QuerySet, QuerySetManager,
MultipleObjectsReturned, DoesNotExist,
QueryFieldList)
from mongoengine import *
from mongoengine.connection import get_connection
from mongoengine.tests import query_counter
class QuerySetTest(unittest.TestCase):
@@ -21,6 +24,8 @@ class QuerySetTest(unittest.TestCase):
name = StringField()
age = IntField()
meta = {'allow_inheritance': True}
Person.drop_collection()
self.Person = Person
def test_initialisation(self):
@@ -225,6 +230,30 @@ class QuerySetTest(unittest.TestCase):
Blog.drop_collection()
def test_chaining(self):
class A(Document):
pass
class B(Document):
a = ReferenceField(A)
A.drop_collection()
B.drop_collection()
a1 = A().save()
a2 = A().save()
B(a=a1).save()
# Works
q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query
# Doesn't work
q2 = B.objects.filter(a__in=[a1, a2])
q2 = q2.filter(a=a1)._query
self.assertEqual(q1, q2)
def test_update_write_options(self):
"""Test that passing write_options works"""
@@ -239,11 +268,11 @@ class QuerySetTest(unittest.TestCase):
self.Person.objects.update(set__name='Ross', write_options=write_options)
author = self.Person.objects.first()
self.assertEquals(author.name, 'Ross')
self.assertEqual(author.name, 'Ross')
self.Person.objects.update_one(set__name='Test User', write_options=write_options)
author = self.Person.objects.first()
self.assertEquals(author.name, 'Test User')
self.assertEqual(author.name, 'Test User')
def test_update_update_has_a_value(self):
"""Test to ensure that update is passed a value to update to"""
@@ -332,8 +361,8 @@ class QuerySetTest(unittest.TestCase):
BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1)
post = BlogPost.objects.first()
self.assertEquals(post.comments[1].by, 'jane')
self.assertEquals(post.comments[1].votes, 8)
self.assertEqual(post.comments[1].by, 'jane')
self.assertEqual(post.comments[1].votes, 8)
# Currently the $ operator only applies to the first matched item in
# the query
@@ -346,7 +375,7 @@ class QuerySetTest(unittest.TestCase):
Simple.objects(x=2).update(inc__x__S=1)
simple = Simple.objects.first()
self.assertEquals(simple.x, [1, 3, 3, 2])
self.assertEqual(simple.x, [1, 3, 3, 2])
Simple.drop_collection()
# You can set multiples
@@ -358,10 +387,10 @@ class QuerySetTest(unittest.TestCase):
Simple.objects(x=3).update(set__x__S=0)
s = Simple.objects()
self.assertEquals(s[0].x, [1, 2, 0, 4])
self.assertEquals(s[1].x, [2, 0, 4, 5])
self.assertEquals(s[2].x, [0, 4, 5, 6])
self.assertEquals(s[3].x, [4, 5, 6, 7])
self.assertEqual(s[0].x, [1, 2, 0, 4])
self.assertEqual(s[1].x, [2, 0, 4, 5])
self.assertEqual(s[2].x, [0, 4, 5, 6])
self.assertEqual(s[3].x, [4, 5, 6, 7])
# Using "$unset" with an expression like this "array.$" will result in
# the array item becoming None, not being removed.
@@ -369,14 +398,14 @@ class QuerySetTest(unittest.TestCase):
Simple(x=[1, 2, 3, 4, 3, 2, 3, 4]).save()
Simple.objects(x=3).update(unset__x__S=1)
simple = Simple.objects.first()
self.assertEquals(simple.x, [1, 2, None, 4, 3, 2, 3, 4])
self.assertEqual(simple.x, [1, 2, None, 4, 3, 2, 3, 4])
# Nested updates arent supported yet..
def update_nested():
Simple.drop_collection()
Simple(x=[{'test': [1, 2, 3, 4]}]).save()
Simple.objects(x__test=2).update(set__x__S__test__S=3)
self.assertEquals(simple.x, [1, 2, 3, 4])
self.assertEqual(simple.x, [1, 2, 3, 4])
self.assertRaises(OperationError, update_nested)
Simple.drop_collection()
@@ -406,8 +435,32 @@ class QuerySetTest(unittest.TestCase):
BlogPost.objects(comments__by="joe").update(set__comments__S__votes=Vote(score=4))
post = BlogPost.objects.first()
self.assertEquals(post.comments[0].by, 'joe')
self.assertEquals(post.comments[0].votes.score, 4)
self.assertEqual(post.comments[0].by, 'joe')
self.assertEqual(post.comments[0].votes.score, 4)
def test_updates_can_have_match_operators(self):
class Post(Document):
title = StringField(required=True)
tags = ListField(StringField())
comments = ListField(EmbeddedDocumentField("Comment"))
class Comment(EmbeddedDocument):
content = StringField()
name = StringField(max_length=120)
vote = IntField()
Post.drop_collection()
comm1 = Comment(content="very funny indeed", name="John S", vote=1)
comm2 = Comment(content="kind of funny", name="Mark P", vote=0)
Post(title='Fun with MongoEngine', tags=['mongodb', 'mongoengine'],
comments=[comm1, comm2]).save()
Post.objects().update_one(pull__comments__vote__lt=1)
self.assertEqual(1, len(Post.objects.first().comments))
def test_mapfield_update(self):
"""Ensure that the MapField can be updated."""
@@ -496,7 +549,7 @@ class QuerySetTest(unittest.TestCase):
Blog.drop_collection()
# Recreates the collection
# Recreates the collection
self.assertEqual(0, Blog.objects.count())
with query_counter() as q:
@@ -538,6 +591,10 @@ class QuerySetTest(unittest.TestCase):
self.assertRaises(OperationError, throw_operation_error)
# Test can insert new doc
new_post = Blog(title="code", id=ObjectId())
Blog.objects.insert(new_post)
# test handles other classes being inserted
def throw_operation_error_wrong_doc():
class Author(Document):
@@ -561,7 +618,7 @@ class QuerySetTest(unittest.TestCase):
Blog.drop_collection()
blog1 = Blog(title="code", posts=[post1, post2])
obj_id = Blog.objects.insert(blog1, load_bulk=False)
self.assertEquals(obj_id.__class__.__name__, 'ObjectId')
self.assertEqual(obj_id.__class__.__name__, 'ObjectId')
Blog.drop_collection()
post3 = Post(comments=[comment1, comment1])
@@ -573,12 +630,70 @@ class QuerySetTest(unittest.TestCase):
def throw_operation_error_not_unique():
Blog.objects.insert([blog2, blog3], safe=True)
self.assertRaises(OperationError, throw_operation_error_not_unique)
self.assertRaises(NotUniqueError, throw_operation_error_not_unique)
self.assertEqual(Blog.objects.count(), 2)
Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True})
self.assertEqual(Blog.objects.count(), 3)
def test_get_changed_fields_query_count(self):
class Person(Document):
name = StringField()
owns = ListField(ReferenceField('Organization'))
projects = ListField(ReferenceField('Project'))
class Organization(Document):
name = StringField()
owner = ReferenceField('Person')
employees = ListField(ReferenceField('Person'))
class Project(Document):
name = StringField()
Person.drop_collection()
Organization.drop_collection()
Project.drop_collection()
r1 = Project(name="r1").save()
r2 = Project(name="r2").save()
r3 = Project(name="r3").save()
p1 = Person(name="p1", projects=[r1, r2]).save()
p2 = Person(name="p2", projects=[r2]).save()
o1 = Organization(name="o1", employees=[p1]).save()
with query_counter() as q:
self.assertEqual(q, 0)
fresh_o1 = Organization.objects.get(id=o1.id)
self.assertEqual(1, q)
fresh_o1._get_changed_fields()
self.assertEqual(1, q)
with query_counter() as q:
self.assertEqual(q, 0)
fresh_o1 = Organization.objects.get(id=o1.id)
fresh_o1.save()
self.assertEqual(q, 2)
with query_counter() as q:
self.assertEqual(q, 0)
fresh_o1 = Organization.objects.get(id=o1.id)
fresh_o1.save(cascade=False)
self.assertEqual(q, 2)
with query_counter() as q:
self.assertEqual(q, 0)
fresh_o1 = Organization.objects.get(id=o1.id)
fresh_o1.employees.append(p2)
fresh_o1.save(cascade=False)
self.assertEqual(q, 3)
def test_slave_okay(self):
"""Ensures that a query can take slave_okay syntax
@@ -636,17 +751,38 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(people1, people2)
self.assertEqual(people1, people3)
def test_repr_iteration(self):
"""Ensure that QuerySet __repr__ can handle loops
"""
self.Person(name='Person 1').save()
self.Person(name='Person 2').save()
def test_repr(self):
"""Test repr behavior isnt destructive"""
queryset = self.Person.objects
self.assertEquals('[<Person: Person object>, <Person: Person object>]', repr(queryset))
for person in queryset:
self.assertEquals('.. queryset mid-iteration ..', repr(queryset))
class Doc(Document):
number = IntField()
def __repr__(self):
return "<Doc: %s>" % self.number
Doc.drop_collection()
for i in xrange(1000):
Doc(number=i).save()
docs = Doc.objects.order_by('number')
self.assertEqual(docs.count(), 1000)
self.assertEqual(len(docs), 1000)
docs_string = "%s" % docs
self.assertTrue("Doc: 0" in docs_string)
self.assertEqual(docs.count(), 1000)
self.assertEqual(len(docs), 1000)
# Limit and skip
self.assertEqual('[<Doc: 1>, <Doc: 2>, <Doc: 3>]', "%s" % docs[1:4])
self.assertEqual(docs.count(), 3)
self.assertEqual(len(docs), 3)
for doc in docs:
self.assertEqual('.. queryset mid-iteration ..', repr(docs))
def test_regex_query_shortcuts(self):
"""Ensure that contains, startswith, endswith, etc work.
@@ -748,7 +884,11 @@ class QuerySetTest(unittest.TestCase):
def test_filter_chaining(self):
"""Ensure filters can be chained together.
"""
class Blog(Document):
id = StringField(unique=True, primary_key=True)
class BlogPost(Document):
blog = ReferenceField(Blog)
title = StringField()
is_published = BooleanField()
published_date = DateTimeField()
@@ -757,13 +897,24 @@ class QuerySetTest(unittest.TestCase):
def published(doc_cls, queryset):
return queryset(is_published=True)
blog_post_1 = BlogPost(title="Blog Post #1",
Blog.drop_collection()
BlogPost.drop_collection()
blog_1 = Blog(id="1")
blog_2 = Blog(id="2")
blog_3 = Blog(id="3")
blog_1.save()
blog_2.save()
blog_3.save()
blog_post_1 = BlogPost(blog=blog_1, title="Blog Post #1",
is_published = True,
published_date=datetime(2010, 1, 5, 0, 0 ,0))
blog_post_2 = BlogPost(title="Blog Post #2",
blog_post_2 = BlogPost(blog=blog_2, title="Blog Post #2",
is_published = True,
published_date=datetime(2010, 1, 6, 0, 0 ,0))
blog_post_3 = BlogPost(title="Blog Post #3",
blog_post_3 = BlogPost(blog=blog_3, title="Blog Post #3",
is_published = True,
published_date=datetime(2010, 1, 7, 0, 0 ,0))
@@ -777,7 +928,29 @@ class QuerySetTest(unittest.TestCase):
published_date__lt=datetime(2010, 1, 7, 0, 0 ,0))
self.assertEqual(published_posts.count(), 2)
blog_posts = BlogPost.objects
blog_posts = blog_posts.filter(blog__in=[blog_1, blog_2])
blog_posts = blog_posts.filter(blog=blog_3)
self.assertEqual(blog_posts.count(), 0)
BlogPost.drop_collection()
Blog.drop_collection()
def test_raw_and_merging(self):
class Doc(Document):
pass
raw_query = Doc.objects(__raw__={'deleted': False,
'scraped': 'yes',
'$nor': [{'views.extracted': 'no'},
{'attachments.views.extracted':'no'}]
})._query
expected = {'deleted': False, '_types': 'Doc', 'scraped': 'yes',
'$nor': [{'views.extracted': 'no'},
{'attachments.views.extracted': 'no'}]}
self.assertEqual(expected, raw_query)
def test_ordering(self):
"""Ensure default ordering is applied and can be overridden.
@@ -1003,27 +1176,27 @@ class QuerySetTest(unittest.TestCase):
# first three
numbers = Numbers.objects.fields(slice__n=3).get()
self.assertEquals(numbers.n, [0, 1, 2])
self.assertEqual(numbers.n, [0, 1, 2])
# last three
numbers = Numbers.objects.fields(slice__n=-3).get()
self.assertEquals(numbers.n, [-3, -2, -1])
self.assertEqual(numbers.n, [-3, -2, -1])
# skip 2, limit 3
numbers = Numbers.objects.fields(slice__n=[2, 3]).get()
self.assertEquals(numbers.n, [2, 3, 4])
self.assertEqual(numbers.n, [2, 3, 4])
# skip to fifth from last, limit 4
numbers = Numbers.objects.fields(slice__n=[-5, 4]).get()
self.assertEquals(numbers.n, [-5, -4, -3, -2])
self.assertEqual(numbers.n, [-5, -4, -3, -2])
# skip to fifth from last, limit 10
numbers = Numbers.objects.fields(slice__n=[-5, 10]).get()
self.assertEquals(numbers.n, [-5, -4, -3, -2, -1])
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
# skip to fifth from last, limit 10 dict method
numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get()
self.assertEquals(numbers.n, [-5, -4, -3, -2, -1])
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
def test_slicing_nested_fields(self):
"""Ensure that query slicing an embedded array works.
@@ -1043,27 +1216,27 @@ class QuerySetTest(unittest.TestCase):
# first three
numbers = Numbers.objects.fields(slice__embedded__n=3).get()
self.assertEquals(numbers.embedded.n, [0, 1, 2])
self.assertEqual(numbers.embedded.n, [0, 1, 2])
# last three
numbers = Numbers.objects.fields(slice__embedded__n=-3).get()
self.assertEquals(numbers.embedded.n, [-3, -2, -1])
self.assertEqual(numbers.embedded.n, [-3, -2, -1])
# skip 2, limit 3
numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get()
self.assertEquals(numbers.embedded.n, [2, 3, 4])
self.assertEqual(numbers.embedded.n, [2, 3, 4])
# skip to fifth from last, limit 4
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get()
self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2])
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2])
# skip to fifth from last, limit 10
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get()
self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2, -1])
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
# skip to fifth from last, limit 10 dict method
numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get()
self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2, -1])
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
def test_find_embedded(self):
"""Ensure that an embedded document is properly returned from a query.
@@ -1147,7 +1320,6 @@ class QuerySetTest(unittest.TestCase):
published_posts = (post1, post2, post3, post5, post6)
self.assertTrue(all(obj.id in posts for obj in published_posts))
# Check Q object combination
date = datetime(2010, 1, 10)
q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True))
@@ -1206,6 +1378,42 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection()
def test_raw_query_and_Q_objects(self):
"""
Test raw plays nicely
"""
class Foo(Document):
name = StringField()
a = StringField()
b = StringField()
c = StringField()
meta = {
'allow_inheritance': False
}
query = Foo.objects(__raw__={'$nor': [{'name': 'bar'}]})._query
self.assertEqual(query, {'$nor': [{'name': 'bar'}]})
q1 = {'$or': [{'a': 1}, {'b': 1}]}
query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query
self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1})
def test_q_merge_queries_edge_case(self):
class User(Document):
email = EmailField(required=False)
name = StringField()
User.drop_collection()
pk = ObjectId()
User(email='example@example.com', pk=pk).save()
self.assertEqual(1, User.objects.filter(
Q(email='example@example.com') |
Q(name='John Doe')
).limit(2).filter(pk=pk).count())
def test_exec_js_query(self):
"""Ensure that queries are properly formed for use in exec_js.
"""
@@ -1304,7 +1512,7 @@ class QuerySetTest(unittest.TestCase):
# Test template style
code = "{{~comments.content}}"
sub_code = BlogPost.objects._sub_js_fields(code)
self.assertEquals("cmnts.body", sub_code)
self.assertEqual("cmnts.body", sub_code)
BlogPost.drop_collection()
@@ -1345,12 +1553,15 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(1, BlogPost.objects.count())
def test_reverse_delete_rule_cascade_self_referencing(self):
"""Ensure self-referencing CASCADE deletes do not result in infinite loop
"""Ensure self-referencing CASCADE deletes do not result in infinite
loop
"""
class Category(Document):
name = StringField()
parent = ReferenceField('self', reverse_delete_rule=CASCADE)
Category.drop_collection()
num_children = 3
base = Category(name='Root')
base.save()
@@ -1367,13 +1578,13 @@ class QuerySetTest(unittest.TestCase):
child_child.save()
tree_size = 1 + num_children + (num_children * num_children)
self.assertEquals(tree_size, Category.objects.count())
self.assertEquals(num_children, Category.objects(parent=base).count())
self.assertEqual(tree_size, Category.objects.count())
self.assertEqual(num_children, Category.objects(parent=base).count())
# The delete should effectively wipe out the Category collection
# without resulting in infinite parent-child cascade recursion
base.delete()
self.assertEquals(0, Category.objects.count())
self.assertEqual(0, Category.objects.count())
def test_reverse_delete_rule_nullify(self):
"""Ensure nullification of references to deleted documents.
@@ -1449,6 +1660,40 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(post.authors, [me])
self.assertEqual(another.authors, [])
def test_delete_with_limits(self):
class Log(Document):
pass
Log.drop_collection()
for i in xrange(10):
Log().save()
Log.objects()[3:5].delete()
self.assertEqual(8, Log.objects.count())
def test_delete_with_limit_handles_delete_rules(self):
"""Ensure cascading deletion of referring documents from the database.
"""
class BlogPost(Document):
content = StringField()
author = ReferenceField(self.Person, reverse_delete_rule=CASCADE)
BlogPost.drop_collection()
me = self.Person(name='Test User')
me.save()
someoneelse = self.Person(name='Some-one Else')
someoneelse.save()
BlogPost(content='Watching TV', author=me).save()
BlogPost(content='Chilling out', author=me).save()
BlogPost(content='Pro Testing', author=someoneelse).save()
self.assertEqual(3, BlogPost.objects.count())
self.Person.objects()[:1].delete()
self.assertEqual(1, BlogPost.objects.count())
def test_update(self):
"""Ensure that atomic updates work properly.
"""
@@ -1499,7 +1744,7 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection()
def test_update_push_and_pull(self):
def test_update_push_and_pull_add_to_set(self):
"""Ensure that the 'pull' update operation works correctly.
"""
class BlogPost(Document):
@@ -1532,6 +1777,23 @@ class QuerySetTest(unittest.TestCase):
post.reload()
self.assertEqual(post.tags, ["code", "mongodb"])
def test_add_to_set_each(self):
class Item(Document):
name = StringField(required=True)
description = StringField(max_length=50)
parents = ListField(ReferenceField('self'))
Item.drop_collection()
item = Item(name='test item').save()
parent_1 = Item(name='parent 1').save()
parent_2 = Item(name='parent 2').save()
item.update(add_to_set__parents=[parent_1, parent_2, parent_1])
item.reload()
self.assertEqual([parent_1, parent_2], item.parents)
def test_pull_nested(self):
class User(Document):
@@ -1617,7 +1879,7 @@ class QuerySetTest(unittest.TestCase):
BlogPost.objects(slug="test-2").update_one(set__tags__0__name="python")
post.reload()
self.assertEquals(post.tags[0].name, 'python')
self.assertEqual(post.tags[0].name, 'python')
BlogPost.objects(slug="test-2").update_one(pop__tags=-1)
post.reload()
@@ -1644,7 +1906,7 @@ class QuerySetTest(unittest.TestCase):
set__authors__S=Author(name="Ross"))
message = message.reload()
self.assertEquals(message.authors[0].name, "Ross")
self.assertEqual(message.authors[0].name, "Ross")
Message.objects(authors__name="Ross").update_one(
set__authors=[Author(name="Harry"),
@@ -1652,9 +1914,9 @@ class QuerySetTest(unittest.TestCase):
Author(name="Adam")])
message = message.reload()
self.assertEquals(message.authors[0].name, "Harry")
self.assertEquals(message.authors[1].name, "Ross")
self.assertEquals(message.authors[2].name, "Adam")
self.assertEqual(message.authors[0].name, "Harry")
self.assertEqual(message.authors[1].name, "Ross")
self.assertEqual(message.authors[2].name, "Adam")
def test_order_by(self):
"""Ensure that QuerySets may be ordered.
@@ -1675,6 +1937,22 @@ class QuerySetTest(unittest.TestCase):
ages = [p.age for p in self.Person.objects.order_by('-name')]
self.assertEqual(ages, [30, 40, 20])
def test_order_by_chaining(self):
"""Ensure that an order_by query chains properly and allows .only()
"""
self.Person(name="User A", age=20).save()
self.Person(name="User B", age=40).save()
self.Person(name="User C", age=30).save()
only_age = self.Person.objects.order_by('-age').only('age')
names = [p.name for p in only_age]
ages = [p.age for p in only_age]
# The .only('age') clause should mean that all names are None
self.assertEqual(names, [None, None, None])
self.assertEqual(ages, [40, 30, 20])
def test_confirm_order_by_reference_wont_work(self):
"""Ordering by reference is not possible. Use map / reduce.. or
denormalise"""
@@ -1734,10 +2012,10 @@ class QuerySetTest(unittest.TestCase):
results = list(results)
self.assertEqual(len(results), 4)
music = filter(lambda r: r.key == "music", results)[0]
music = list(filter(lambda r: r.key == "music", results))[0]
self.assertEqual(music.value, 2)
film = filter(lambda r: r.key == "film", results)[0]
film = list(filter(lambda r: r.key == "film", results))[0]
self.assertEqual(film.value, 3)
BlogPost.drop_collection()
@@ -1956,9 +2234,9 @@ class QuerySetTest(unittest.TestCase):
# Check item_frequencies works for non-list fields
def test_assertions(f):
self.assertEqual(set(['1', '2']), set(f.keys()))
self.assertEqual(f['1'], 1)
self.assertEqual(f['2'], 2)
self.assertEqual(set([1, 2]), set(f.keys()))
self.assertEqual(f[1], 1)
self.assertEqual(f[2], 2)
exec_js = BlogPost.objects.item_frequencies('hits')
map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True)
@@ -2037,15 +2315,15 @@ class QuerySetTest(unittest.TestCase):
Person(name="Wilson Jr").save()
freq = Person.objects.item_frequencies('city')
self.assertEquals(freq, {'CRB': 1.0, None: 1.0})
self.assertEqual(freq, {'CRB': 1.0, None: 1.0})
freq = Person.objects.item_frequencies('city', normalize=True)
self.assertEquals(freq, {'CRB': 0.5, None: 0.5})
self.assertEqual(freq, {'CRB': 0.5, None: 0.5})
freq = Person.objects.item_frequencies('city', map_reduce=True)
self.assertEquals(freq, {'CRB': 1.0, None: 1.0})
self.assertEqual(freq, {'CRB': 1.0, None: 1.0})
freq = Person.objects.item_frequencies('city', normalize=True, map_reduce=True)
self.assertEquals(freq, {'CRB': 0.5, None: 0.5})
self.assertEqual(freq, {'CRB': 0.5, None: 0.5})
def test_item_frequencies_with_null_embedded(self):
class Data(EmbeddedDocument):
@@ -2058,7 +2336,6 @@ class QuerySetTest(unittest.TestCase):
data = EmbeddedDocumentField(Data, required=True)
extra = EmbeddedDocumentField(Extra)
Person.drop_collection()
p = Person()
@@ -2071,10 +2348,56 @@ class QuerySetTest(unittest.TestCase):
p.save()
ot = Person.objects.item_frequencies('extra.tag', map_reduce=False)
self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
self.assertEqual(ot, {None: 1.0, u'friend': 1.0})
ot = Person.objects.item_frequencies('extra.tag', map_reduce=True)
self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
self.assertEqual(ot, {None: 1.0, u'friend': 1.0})
def test_item_frequencies_with_0_values(self):
class Test(Document):
val = IntField()
Test.drop_collection()
t = Test()
t.val = 0
t.save()
ot = Test.objects.item_frequencies('val', map_reduce=True)
self.assertEqual(ot, {0: 1})
ot = Test.objects.item_frequencies('val', map_reduce=False)
self.assertEqual(ot, {0: 1})
def test_item_frequencies_with_False_values(self):
class Test(Document):
val = BooleanField()
Test.drop_collection()
t = Test()
t.val = False
t.save()
ot = Test.objects.item_frequencies('val', map_reduce=True)
self.assertEqual(ot, {False: 1})
ot = Test.objects.item_frequencies('val', map_reduce=False)
self.assertEqual(ot, {False: 1})
def test_item_frequencies_normalize(self):
class Test(Document):
val = IntField()
Test.drop_collection()
for i in xrange(50):
Test(val=1).save()
for i in xrange(20):
Test(val=2).save()
freqs = Test.objects.item_frequencies('val', map_reduce=False, normalize=True)
self.assertEqual(freqs, {1: 50.0/70, 2: 20.0/70})
freqs = Test.objects.item_frequencies('val', map_reduce=True, normalize=True)
self.assertEqual(freqs, {1: 50.0/70, 2: 20.0/70})
def test_average(self):
"""Ensure that field can be averaged correctly.
@@ -2134,7 +2457,29 @@ class QuerySetTest(unittest.TestCase):
foo = Foo(bar=bar)
foo.save()
self.assertEquals(Foo.objects.distinct("bar"), [bar])
self.assertEqual(Foo.objects.distinct("bar"), [bar])
def test_distinct_handles_references_to_alias(self):
register_connection('testdb', 'mongoenginetest2')
class Foo(Document):
bar = ReferenceField("Bar")
meta = {'db_alias': 'testdb'}
class Bar(Document):
text = StringField()
meta = {'db_alias': 'testdb'}
Bar.drop_collection()
Foo.drop_collection()
bar = Bar(text="hi")
bar.save()
foo = Foo(bar=bar)
foo.save()
self.assertEqual(Foo.objects.distinct("bar"), [bar])
def test_custom_manager(self):
"""Ensure that custom QuerySetManager instances work as expected.
@@ -2145,28 +2490,29 @@ class QuerySetTest(unittest.TestCase):
date = DateTimeField(default=datetime.now)
@queryset_manager
def objects(doc_cls, queryset):
return queryset(deleted=False)
def objects(cls, qryset):
opts = {"deleted": False}
return qryset(**opts)
@queryset_manager
def music_posts(doc_cls, queryset):
return queryset(tags='music', deleted=False).order_by('-date')
def music_posts(doc_cls, queryset, deleted=False):
return queryset(tags='music',
deleted=deleted).order_by('date')
BlogPost.drop_collection()
post1 = BlogPost(tags=['music', 'film'])
post1.save()
post2 = BlogPost(tags=['music'])
post2.save()
post3 = BlogPost(tags=['film', 'actors'])
post3.save()
post4 = BlogPost(tags=['film', 'actors'], deleted=True)
post4.save()
post1 = BlogPost(tags=['music', 'film']).save()
post2 = BlogPost(tags=['music']).save()
post3 = BlogPost(tags=['film', 'actors']).save()
post4 = BlogPost(tags=['film', 'actors', 'music'], deleted=True).save()
self.assertEqual([p.id for p in BlogPost.objects],
self.assertEqual([p.id for p in BlogPost.objects()],
[post1.id, post2.id, post3.id])
self.assertEqual([p.id for p in BlogPost.music_posts],
[post2.id, post1.id])
self.assertEqual([p.id for p in BlogPost.music_posts()],
[post1.id, post2.id])
self.assertEqual([p.id for p in BlogPost.music_posts(True)],
[post4.id])
BlogPost.drop_collection()
@@ -2291,30 +2637,48 @@ class QuerySetTest(unittest.TestCase):
"""Ensure that index_types will, when disabled, prevent _types
being added to all indices.
"""
class BlogPost(Document):
class BloggPost(Document):
date = DateTimeField()
meta = {'index_types': False,
'indexes': ['-date']}
# Indexes are lazy so use list() to perform query
list(BlogPost.objects)
info = BlogPost.objects._collection.index_information()
list(BloggPost.objects)
info = BloggPost.objects._collection.index_information()
info = [value['key'] for key, value in info.iteritems()]
self.assertTrue([('_types', 1)] not in info)
self.assertTrue([('date', -1)] in info)
BlogPost.drop_collection()
BloggPost.drop_collection()
class BlogPost(Document):
class BloggPost(Document):
title = StringField()
meta = {'allow_inheritance': False}
# _types is not used on objects where allow_inheritance is False
list(BlogPost.objects)
info = BlogPost.objects._collection.index_information()
list(BloggPost.objects)
info = BloggPost.objects._collection.index_information()
self.assertFalse([('_types', 1)] in info.values())
BlogPost.drop_collection()
BloggPost.drop_collection()
def test_types_index_with_pk(self):
class Comment(EmbeddedDocument):
comment_id = IntField(required=True)
try:
class BlogPost(Document):
comments = EmbeddedDocumentField(Comment)
meta = {'indexes': [{'fields': ['pk', 'comments.comment_id'],
'unique': True}]}
except UnboundLocalError:
self.fail('Unbound local error at types index + pk definition')
info = BlogPost.objects._collection.index_information()
info = [value['key'] for key, value in info.iteritems()]
index_item = [(u'_types', 1), (u'_id', 1), (u'comments.comment_id', 1)]
self.assertTrue(index_item in info)
def test_dict_with_custom_baseclass(self):
"""Ensure DictField working with custom base clases.
@@ -2593,8 +2957,8 @@ class QuerySetTest(unittest.TestCase):
Post().save()
Post(is_published=True).save()
self.assertEquals(Post.objects.count(), 2)
self.assertEquals(Post.published.count(), 1)
self.assertEqual(Post.objects.count(), 2)
self.assertEqual(Post.published.count(), 1)
Post.drop_collection()
@@ -2760,10 +3124,10 @@ class QuerySetTest(unittest.TestCase):
Number(n=3).save()
numbers = [n.n for n in Number.objects.order_by('-n')]
self.assertEquals([3, 2, 1], numbers)
self.assertEqual([3, 2, 1], numbers)
numbers = [n.n for n in Number.objects.order_by('+n')]
self.assertEquals([1, 2, 3], numbers)
self.assertEqual([1, 2, 3], numbers)
Number.drop_collection()
@@ -3006,6 +3370,19 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(plist[1], (20, False))
self.assertEqual(plist[2], (30, True))
def test_scalar_primary_key(self):
class SettingValue(Document):
key = StringField(primary_key=True)
value = StringField()
SettingValue.drop_collection()
s = SettingValue(key="test", value="test value")
s.save()
val = SettingValue.objects.scalar('key', 'value')
self.assertEqual(list(val), [('test', 'test value')])
def test_scalar_cursor_behaviour(self):
"""Ensure that a query returns a valid set of results.
"""
@@ -3072,15 +3449,22 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(len(self.Person.objects.scalar('name')), 55)
self.assertEqual("A0", "%s" % self.Person.objects.order_by('name').scalar('name').first())
self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0])
self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by('age').scalar('name')[1:3])
self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by('age').scalar('name')[51:53])
if PY3:
self.assertEqual("['A1', 'A2']", "%s" % self.Person.objects.order_by('age').scalar('name')[1:3])
self.assertEqual("['A51', 'A52']", "%s" % self.Person.objects.order_by('age').scalar('name')[51:53])
else:
self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by('age').scalar('name')[1:3])
self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by('age').scalar('name')[51:53])
# with_id and in_bulk
person = self.Person.objects.order_by('name').first()
self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').with_id(person.id))
pks = self.Person.objects.order_by('age').scalar('pk')[1:3]
self.assertEqual("[u'A1', u'A2']", "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values()))
if PY3:
self.assertEqual("['A1', 'A2']", "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values()))
else:
self.assertEqual("[u'A1', u'A2']", "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values()))
class QTest(unittest.TestCase):
@@ -3327,6 +3711,38 @@ class QueryFieldListTest(unittest.TestCase):
ak = list(Bar.objects(foo__match={'shape': "square", "color": "purple"}))
self.assertEqual([b1], ak)
def test_as_pymongo(self):
from decimal import Decimal
class User(Document):
id = ObjectIdField('_id')
name = StringField()
age = IntField()
price = DecimalField()
User.drop_collection()
User(name="Bob Dole", age=89, price=Decimal('1.11')).save()
User(name="Barack Obama", age=51, price=Decimal('2.22')).save()
users = User.objects.only('name', 'price').as_pymongo()
results = list(users)
self.assertTrue(isinstance(results[0], dict))
self.assertTrue(isinstance(results[1], dict))
self.assertEqual(results[0]['name'], 'Bob Dole')
self.assertEqual(results[0]['price'], '1.11')
self.assertEqual(results[1]['name'], 'Barack Obama')
self.assertEqual(results[1]['price'], '2.22')
# Test coerce_types
users = User.objects.only('name', 'price').as_pymongo(coerce_types=True)
results = list(users)
self.assertTrue(isinstance(results[0], dict))
self.assertTrue(isinstance(results[1], dict))
self.assertEqual(results[0]['name'], 'Bob Dole')
self.assertEqual(results[0]['price'], Decimal('1.11'))
self.assertEqual(results[1]['name'], 'Barack Obama')
self.assertEqual(results[1]['price'], Decimal('2.22'))
if __name__ == '__main__':
unittest.main()

View File

@@ -1,4 +1,5 @@
import unittest
import pymongo
from pymongo import ReadPreference, ReplicaSetConnection
@@ -26,7 +27,7 @@ class ConnectionTest(unittest.TestCase):
if not isinstance(conn, ReplicaSetConnection):
return
self.assertEquals(conn.read_preference, ReadPreference.SECONDARY_ONLY)
self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_ONLY)
if __name__ == '__main__':
unittest.main()

View File

@@ -212,9 +212,9 @@ class SignalTests(unittest.TestCase):
# The output of this signal is not entirely deterministic. The reloaded
# object will have an object ID. Hence, we only check part of the output
self.assertEquals(signal_output[3],
self.assertEqual(signal_output[3],
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]")
self.assertEquals(signal_output[-2:],
self.assertEqual(signal_output[-2:],
["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
"Is loaded",])