Compare commits

...

294 Commits
v0.6.6 ... 0.7

Author SHA1 Message Date
Ross Lawley
31ec7907b5 Fixing py3 compat 2012-10-01 20:01:43 +00:00
Ross Lawley
12f3f8c694 Added chaining regression test (MongoEngine/mongoengine#135) 2012-10-01 16:27:59 +00:00
Ross Lawley
79098e997e Updated test 2012-10-01 16:20:48 +00:00
Ross Lawley
dc1849bad5 Unicode fix for repr (MongoEngine/mongoengine#133) 2012-10-01 16:15:25 +00:00
Ross Lawley
e2d826c412 Allow updates with match operators (MongoEngine/mongoengine#144) 2012-10-01 15:26:34 +00:00
Ross Lawley
e6d796832e Unicode fix reverted but can have custom validator
MongoEngine/mongoengine#136
2012-10-01 14:48:53 +00:00
Ross Lawley
6f0a6df4f6 Fix loop 2012-10-01 14:23:05 +00:00
Ross Lawley
7a877a00d5 Updated URLField
handle unicode and custom validator (MongoEngine/mongoengine#136)
2012-10-01 13:59:15 +00:00
Ross Lawley
e8604d100e Added Garry Polley to contributors list
hmarr/mongoengine#573
2012-10-01 10:20:28 +00:00
Ross Lawley
1647441ce8 Merge branch 'master' of github.com:hmarr/mongoengine 2012-10-01 10:17:01 +00:00
Ross Lawley
9f8d6b3a00 Merge pull request #573 from garrypolley/master
Allow Django  AuthenticationBackends to work with Django user

Thanks @garrypolley
2012-10-01 00:24:52 -07:00
Garry Polley
4b2ad25405 can now use AuthenticationBackends with permissions. 2012-09-27 10:21:05 -05:00
Ross Lawley
3ce163b1a0 Updates to the readme 2012-09-27 10:29:24 +00:00
Ross Lawley
7c1ee28f13 Added CONTRIBUTING.rst 2012-09-27 10:26:22 +00:00
Ross Lawley
2645e43da1 Merge branch 'master' into 0.7 2012-09-27 08:30:36 +00:00
Ross Lawley
59bfe551a3 Updated docs thanks @mitar 2012-09-27 08:29:49 +00:00
Ross Lawley
e2c78047b1 Moved injection of Exceptions to top level 2012-09-26 10:43:14 +00:00
Ross Lawley
6a4351e44f Fixed reload issue with ReferenceField where dbref=False (MongoEngine/mongoengine#138) 2012-09-24 18:49:29 +00:00
Ross Lawley
adb60ef1ac Improved import cache 2012-09-24 18:45:02 +00:00
Ross Lawley
3090adac04 Fixed objectId for DBRef 2012-09-24 11:37:54 +00:00
Ross Lawley
b9253d86cc Updated travis.yml 2012-09-19 18:53:31 +00:00
Ross Lawley
ab4d4e6230 Fix ReferenceField dbref = False 2012-09-18 21:37:45 +00:00
Ross Lawley
7cd38c56c6 Merge branch 'master' of github.com:MongoEngine/mongoengine into 0.7 2012-09-14 10:19:04 +00:00
Ross Lawley
864053615b Updated docs 2012-09-14 10:18:44 +00:00
Ross Lawley
db2366f112 Merge pull request #126 from mahmoudhossam/patch-1
Update docs/index.rst
2012-09-13 06:18:06 -07:00
Ross Lawley
4defc82192 Version Bump 2012-09-11 15:16:39 +00:00
Ross Lawley
5949970a95 Fixed index inheritance issues
firmed up testcases (MongoEngine/mongoengine#123) (MongoEngine/mongoengine#125)
2012-09-11 15:14:37 +00:00
Mahmoud Hossam
0ea4abda81 Update docs/index.rst
Correct link for the source.
2012-09-11 12:38:40 +03:00
Ross Lawley
5c6035d636 Updated upgrade docs for BinaryFields 2012-09-11 08:56:32 +00:00
Ross Lawley
a2183e3dcc Reverted EmbeddedDocuments meta handling.
You now can turn off inheritance (MongoEngine/mongoengine#119)
2012-09-07 13:23:46 +01:00
Ross Lawley
99637151b5 Updated version 2012-09-05 10:24:25 +01:00
Ross Lawley
a8e787c120 Update index spec generation so its not destructive (MongoEngine/mongoengine#113) 2012-09-04 14:39:19 +01:00
Ross Lawley
53339c7c72 Version bump 2012-09-04 08:21:44 +01:00
Ross Lawley
3534bf7d70 Fixed index spec inheritance (MongoEngine/mongoengine#111) 2012-09-04 08:20:19 +01:00
Ross Lawley
1cf3989664 Updated setup.py 2012-09-04 08:10:56 +01:00
Ross Lawley
fd296918da Fixing readme for pypi 2012-09-03 15:51:36 +01:00
Ross Lawley
8ad1f03dc5 Updated travis 2012-09-03 15:23:30 +01:00
Ross Lawley
fe7e17dbd5 Updated 0.7 branch 2012-09-03 15:21:52 +01:00
Ross Lawley
d582394a42 Merge branch 'master' into 0.7 2012-09-03 15:20:17 +01:00
Ross Lawley
02ef0df019 Updated travis.yml 2012-09-03 13:29:29 +01:00
Ross Lawley
0dfd6aa518 Updated travis.yml - now testing multiple pymongos 2012-09-03 13:22:08 +01:00
Ross Lawley
0b23bc9cf2 python 2.6.4 and lower cannot handle unicode keys passed to __init__ (MongoEngine/mongoengine#101) 2012-09-03 13:10:06 +01:00
Ross Lawley
f108c4288e Updated queryset.delete so you can use with skip / limit (MongoEngine/mongoengine#107) 2012-09-03 12:53:50 +01:00
Ross Lawley
9b9696aefd Updated index creation allows kwargs to be passed through refs (MongoEngine/mongoengine#104) 2012-09-03 12:29:58 +01:00
Ross Lawley
576e198ece Fixed Q object merge edge case (MongoEngine/mongoengine#109) 2012-09-03 11:33:30 +01:00
Ross Lawley
52f85aab18 Merge branch 'master' of https://github.com/dimonb/mongoengine
Conflicts:
	mongoengine/base.py
2012-09-03 11:00:41 +01:00
Dmitry Balabanov
ab60fd0490 sharded collection document reload testcase 2012-08-30 14:37:11 +04:00
Dmitry Balabanov
d79ae30f31 fix object reload with shard_key in meta 2012-08-30 14:20:53 +04:00
Dmitry Balabanov
f27debe7f9 Respect sharding key when delete object from collection 2012-08-30 12:40:44 +04:00
Ross Lawley
735e043ff6 RC Version Bump 2012-08-24 16:36:40 +01:00
Ross Lawley
6e7f2b73cf Fix VERSION 2012-08-24 16:36:17 +01:00
Ross Lawley
d645ce9745 Updated version calculation 2012-08-24 15:08:37 +01:00
Ross Lawley
7c08c140da Updated upgrade documents 2012-08-24 14:18:45 +01:00
Ross Lawley
81d402dc17 Fixing tests 2012-08-24 13:58:38 +01:00
Ross Lawley
966fa12358 Updated docs for map_reduce MongoEngine/mongoengine#67 2012-08-24 13:47:00 +01:00
Ross Lawley
87792e1921 Test checking can save if not included
ref: MongoEngine/mongoengine#70
2012-08-24 11:45:04 +01:00
Ross Lawley
4c8296acc6 Added testcase for MongoEngine/mongoengine#77 2012-08-24 11:02:32 +01:00
Ross Lawley
9989da07ed Updated AUTHORS and changelog.rst
ref: MongoEngine/mongoenginei#62
2012-08-24 10:40:45 +01:00
Ross Lawley
1c5e6a3425 NotUniqueError gracefully replacing ambiguous OperationError when appropriate 2012-08-24 10:38:10 +01:00
Ross Lawley
eedf908770 Update name ref: MongoEngine/mongoengine#96 2012-08-24 09:33:56 +01:00
Ross Lawley
5c9ef41403 Updated AUTHORS (MongoEngine/mongoengine#85) 2012-08-24 09:32:15 +01:00
Ross Lawley
0bf2ad5b67 Use pk in ReferenceField fixes MongoEngine/mongoengine#85 2012-08-24 09:30:08 +01:00
Ross Lawley
a0e3f382cd Added testcase ref MongoEngine/mongoengine#92 2012-08-24 09:20:14 +01:00
Ross Lawley
f09c39b5d7 Updated AUTHORS and chaneglog.rst
refs MongoEngine/mongoengine#92
2012-08-24 09:10:13 +01:00
Ross Lawley
89c67bf259 Merge branch 'master' of https://github.com/nikitinsm/mongoengine 2012-08-24 09:08:35 +01:00
Ross Lawley
ea666d4607 Updated AUTHORS and changelog.rst
Refs: MongoEngine/mongoengine#88
2012-08-24 09:00:39 +01:00
Ross Lawley
b8af154439 Merge pull request #88 from akolechkin/master
Fix for UnboundLocalError in composite index with pk field
2012-08-24 00:58:09 -07:00
Ross Lawley
f594ece32a Fixed MRO issue in base.py MongoEngine/mongoengine#95 2012-08-24 08:54:54 +01:00
Ross Lawley
03beb6852a Merge branch 'master' of https://github.com/elasticsales/mongoengine 2012-08-24 08:53:26 +01:00
Anthony Nemitz
ab9e9a3329 adding comment to the MRO test case 2012-08-23 18:09:51 -07:00
Anthony Nemitz
a4b09344af test case for multiple inheritance raising MRO exception 2012-08-23 18:08:12 -07:00
Ross Lawley
8cb8aa392c Updated tests 2012-08-23 17:08:23 +01:00
Ross Lawley
3255519792 Updating test 2012-08-23 16:31:22 +01:00
Sergey
7e64bb2503 Update mongoengine/fields.py
mistake
2012-08-23 14:32:27 +04:00
Sergey
86a78402c3 Update mongoengine/fields.py 2012-08-23 14:29:29 +04:00
Ross Lawley
ba276452fb Fix tests 2012-08-23 11:09:07 +01:00
Ross Lawley
4ffa8d0124 Updated ReferenceField's to optionally store ObjectId strings.
This will become the default in 0.8 (MongoEngine/mongoengine#89)
2012-08-23 11:02:38 +01:00
Anton Kolechkin
4bc5082681 fix for UnboundLocalError when use the composite index with primary key field 2012-08-23 11:58:04 +07:00
Anton Kolechkin
0e3c34e1da test for composite index with pk, i used EmbeddedDocument because this is the only issue when it's needed 2012-08-23 11:57:22 +07:00
Ross Lawley
658b3784ae Split out warning tests as they are order dependent 2012-08-22 09:44:32 +01:00
Ross Lawley
0526f577ff Embedded Documents still can inherit fields MongoEngine/mongoengine#84 2012-08-22 09:27:18 +01:00
Ross Lawley
bb1b9bc1d3 Fixing api docs 2012-08-21 17:49:12 +01:00
Ross Lawley
b1eeb77ddc Added FutureWarning - save will default to cascade=False in 0.8 2012-08-21 17:45:51 +01:00
Ross Lawley
999d4a7676 Fixed broken test 2012-08-21 17:29:38 +01:00
Ross Lawley
1b80193aac Added example of indexing embedded fields
MongoEngine/mongoengine#75
2012-08-20 17:50:18 +01:00
Ross Lawley
be8d39a48c Updated changelog / AUTHORS (MongoEngine/mongoengine#80) 2012-08-20 17:35:19 +01:00
Ross Lawley
a2f3d70f28 Merge pull request #80 from biszkoptwielki/master
Image resize fails when force flag is set. Fix & unittest
2012-08-20 09:34:05 -07:00
mikolaj
676a7bf712 Image resize fails when Froce flag is set 2012-08-18 17:27:22 +01:00
Ross Lawley
e990a6c70c Improve unicode key handling for PY25 2012-08-17 16:13:45 +01:00
Ross Lawley
90fa0f6c4a Add flexibility for fields handling bad data (MongoEngine/mongoengine#78) 2012-08-17 16:02:33 +01:00
Ross Lawley
22010d7d95 Updated benchmark stats 2012-08-17 15:04:09 +01:00
Ross Lawley
66279bd90f Post refactor cleanups (ref: meta cleanups) 2012-08-17 11:58:57 +01:00
Ross Lawley
19da228855 Cleaned up the metaclasses for documents
Refactored and clarified intent and
tidied up
2012-08-17 11:53:46 +01:00
Ross Lawley
9e67941bad Use weakref proxies in base lists / dicts (MongoEngine/mongoengine#74) 2012-08-14 10:34:17 +01:00
Ross Lawley
0454fc74e9 Updated changelog 2012-08-14 10:32:26 +01:00
Ross Lawley
2f6b1c7611 Improved queryset filtering (hmarr/mongoengine#554) 2012-08-13 17:29:33 +01:00
Ross Lawley
f00bed6058 Updated test cases for dynamic docs 2012-08-13 16:53:36 +01:00
Ross Lawley
529c522594 Updated changelog - fixed dynamic document issue 2012-08-13 16:42:51 +01:00
Ross Lawley
2bb9493fcf Updated doc 2012-08-13 15:05:01 +01:00
Ross Lawley
839ed8a64a Added test for abstract shard key 2012-08-13 14:57:35 +01:00
Ross Lawley
017a31ffd0 Merge branch 'master' of github.com:MongoEngine/mongoengine 2012-08-07 13:24:27 +01:00
Ross Lawley
83b961c84d Merge pull request #64 from alonho/abstract_shard_key
propagate the shard_key from abstract base classes' meta
2012-08-07 05:24:45 -07:00
Ross Lawley
fa07423ca5 Removing python 2.4 code 2012-08-07 13:10:56 +01:00
Ross Lawley
dd4af2df81 Python 2.5 and 3.1 support confirmed 2012-08-07 13:07:51 +01:00
Ross Lawley
44bd8cb85b Merge branch 'master' of https://github.com/LaineHerron/mongoengine into 0.7 2012-08-07 10:45:01 +01:00
Ross Lawley
52d80ac23c Merge branch 'master' into 0.7
Conflicts:
	mongoengine/base.py
2012-08-07 10:41:49 +01:00
Alon Horev
43a5d73e14 propagate the shard_key from abstract base classes' meta 2012-08-07 09:12:39 +00:00
Ross Lawley
abc764951d Merge branch 'master' into dev
Conflicts:
	mongoengine/queryset.py
2012-08-07 10:07:54 +01:00
Ross Lawley
9cc6164026 Version bump 2012-08-07 10:05:01 +01:00
Ross Lawley
475488b9f2 Added support for distinct and db_alias (MongoEngine/mongoengine#59) 2012-08-07 10:04:05 +01:00
Ross Lawley
95b1783834 Updated changelog 2012-08-07 09:31:51 +01:00
Anthony Nemitz
12c8b5c0b9 Make chained querysets work if constraining the same fields.
Refs hmarr/mongoengine#554
2012-08-07 08:59:56 +01:00
Ross Lawley
f99b7a811b Fixed error in Binary Field 2012-08-07 08:53:58 +01:00
Laine
0575abab23 mend 2012-08-03 15:09:50 -07:00
Laine
9eebcf7beb Merge branch 'master' of https://github.com/LaineHerron/mongoengine
Conflicts:
	tests/test_document.py
2012-08-03 15:08:21 -07:00
Laine
ed74477150 made compatable with python 2.5 2012-08-03 15:04:59 -07:00
Ross Lawley
2801b38c75 Version Bump 2012-08-03 14:36:31 +01:00
Ross Lawley
dc3fea875e Merge branch 'master' of github.com:MongoEngine/mongoengine 2012-08-03 12:56:27 +01:00
Ross Lawley
aab8c2b687 Merge pull request #57 from filipd/patch-1
Added reference to the official repository in the README
2012-08-03 04:56:55 -07:00
Filip Dupanović
3577773af3 Added reference to the official repository 2012-08-03 14:55:18 +03:00
Laine
dd023edc0f made compatable with python 2.5 2012-08-02 15:30:21 -07:00
Ross Lawley
8ac9e6dc19 Updated the documents 2012-08-02 14:11:02 +01:00
Ross Lawley
f45d4d781d Updated test for py3.2 2012-08-02 09:26:33 +01:00
Ross Lawley
c95652d6a8 Updated AUTHORS 2012-08-02 09:23:15 +01:00
Ross Lawley
97b37f75d3 Travis - Not sure python 3.3 support is there yet 2012-08-02 09:22:12 +01:00
Ross Lawley
95dae48778 Fixing build 2012-08-02 09:20:41 +01:00
Ross Lawley
73635033bd Updated travis config 2012-08-02 09:10:42 +01:00
Ross Lawley
c1619d2a62 Fixed image_field attributes regression 2012-08-02 08:47:38 +01:00
Ross Lawley
b87ef982f6 Merge branch 'master' into dev 2012-08-02 08:33:17 +01:00
Laine
91aa90ad4a Added Python 3 support to MongoEngine 2012-08-01 17:21:48 -07:00
Ross Lawley
4b3cea9e78 Added Binary support to UUID (MongoEngine/mongoengine#47) 2012-08-01 16:03:33 +01:00
Ross Lawley
2420b5e937 Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46) 2012-08-01 15:14:56 +01:00
Ross Lawley
f23a976bea Added Tommi Komulainen to the contributors list
refs MongoEngine/mongoengine#48
2012-08-01 15:01:21 +01:00
Ross Lawley
4226cd08f1 Updated Changelog 2012-08-01 15:00:14 +01:00
Ross Lawley
7a230f1693 Merge branch 'master' of github.com:MongoEngine/mongoengine 2012-08-01 14:58:06 +01:00
Ross Lawley
a43d0d4612 Fixed BinaryField python value issue (MongoEngine/mongoengine#48) 2012-08-01 14:57:46 +01:00
Ross Lawley
78a40a0c70 Merge pull request #41 from wpjunior/patch-13
Small fix in SequenceField
2012-08-01 06:26:58 -07:00
Ross Lawley
2c69d8f0b0 Updated License 2012-08-01 13:54:24 +01:00
Ross Lawley
0018c38b83 Fixed queryset manager issue (MongoEngine/mongoengine#52) 2012-08-01 13:51:51 +01:00
Ross Lawley
8df81571fc Fixed FileField comparision
Refs hmarr/mongoengine#547
2012-08-01 13:28:28 +01:00
Ross Lawley
d1add62a06 More updates 2012-08-01 13:11:36 +01:00
Ross Lawley
c419f3379a Style changes 2012-07-30 15:43:53 +01:00
Ross Lawley
69d57209f7 Minor 2012-07-30 13:35:45 +01:00
Ross Lawley
7ca81d6fb8 Fixes 2012-07-30 13:00:42 +01:00
Ross Lawley
8a046bfa5d Merge branch 'master' into dev 2012-07-27 09:29:10 +01:00
Ross Lawley
3628a7653c Squashed commit of the following:
commit 48f988acd7
Merge: 6526923 1304f27
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Thu Jul 26 08:17:45 2012 -0700

    Merge pull request #44 from faulkner/fix-notes

    Proper syntax for RST notes (so they actually render).

commit 6526923345
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Thu Jul 26 16:00:32 2012 +0100

    Fixed recursion loading bug in _get_changed_fields

    fixes hmarr/mongoengine#548

commit 24fd1acce6
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Thu Jul 26 14:14:10 2012 +0100

    Version bump

commit cbb9235dc5
Merge: 6459d4c 19ec2c9
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Wed Jul 25 15:12:34 2012 +0100

    Merge branch 'master' of github.com:hmarr/mongoengine

commit 19ec2c9bc9
Merge: 3070e0b 601f0eb
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Wed Jul 25 07:12:07 2012 -0700

    Merge pull request #545 from maxcountryman/patch-1

    Correcting typo in DynamicField docstring

commit 6459d4c0b6
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Wed Jul 25 14:55:10 2012 +0100

    Fixed issue with custom queryset manager expecting explict variable names

    If using / expecting kwargs you have to call the queryset manager
    explicitly.

commit 1304f2721f
Author: Chris Faulkner <thefaulkner@gmail.com>
Date:   Tue Jul 24 14:06:43 2012 -0700

    Proper syntax for RST notes (so they actually render).

commit 598ffd3e5c
Author: Ross Lawley <ross.lawley@gmail.com>
Date:   Mon Jul 23 15:32:02 2012 +0100

    Fixed documentation

commit 601f0eb168
Author: Max Countryman <maxc@me.com>
Date:   Fri Jul 20 19:12:43 2012 -0700

    Correcting typo in DynamicField docstring
2012-07-26 22:50:39 +01:00
Ross Lawley
48f988acd7 Merge pull request #44 from faulkner/fix-notes
Proper syntax for RST notes (so they actually render).
2012-07-26 08:17:45 -07:00
Ross Lawley
6526923345 Fixed recursion loading bug in _get_changed_fields
fixes hmarr/mongoengine#548
2012-07-26 16:00:32 +01:00
Ross Lawley
24fd1acce6 Version bump 2012-07-26 14:14:10 +01:00
Ross Lawley
cbb9235dc5 Merge branch 'master' of github.com:hmarr/mongoengine 2012-07-25 15:12:34 +01:00
Ross Lawley
19ec2c9bc9 Merge pull request #545 from maxcountryman/patch-1
Correcting typo in DynamicField docstring
2012-07-25 07:12:07 -07:00
Ross Lawley
6459d4c0b6 Fixed issue with custom queryset manager expecting explict variable names
If using / expecting kwargs you have to call the queryset manager
explicitly.
2012-07-25 14:55:10 +01:00
Chris Faulkner
1304f2721f Proper syntax for RST notes (so they actually render). 2012-07-24 14:06:43 -07:00
Wilson Júnior
8bde0c0e53 Small fix in SequenceField 2012-07-23 12:31:47 -03:00
Ross Lawley
598ffd3e5c Fixed documentation 2012-07-23 15:32:02 +01:00
Ross Lawley
1a4533a9cf Minor perf update 2012-07-23 14:46:48 +01:00
Max Countryman
601f0eb168 Correcting typo in DynamicField docstring 2012-07-20 19:12:43 -07:00
Ross Lawley
3070e0bf5d Fix for inheritance bug and db_alias 2012-07-20 10:34:08 +01:00
Ross Lawley
83c11a9834 Version bump 2012-07-19 16:12:21 +01:00
Ross Lawley
5c912b930e Removed tests testing MongoDB not mongoengine 2012-07-19 16:03:29 +01:00
Ross Lawley
1b17fb0ae7 Updated validation error messages
refs hmarr/mongoengine#539
2012-07-19 15:04:12 +01:00
Ross Lawley
d83e67c121 Added support for null / zero / false values in item_frequencies
refs /MongoEngine/mongoengine#40
2012-07-19 12:08:07 +01:00
Ross Lawley
ae39ed94c9 Fixed cascade save edge case
refs MongoEngine/mongoengine#40
2012-07-19 11:52:26 +01:00
Ross Lawley
1e51180d42 Fixed geo index creation bug
fixes MongoEngine/mongoengine#36
2012-07-19 11:39:52 +01:00
Ross Lawley
87ba69d02e Updated changelog 2012-07-19 10:35:37 +01:00
Ross Lawley
8879d5560b Added support for args / kwargs and queryset_manager
Closes MongoEngine/mongoengine#37
2012-07-19 10:32:33 +01:00
Ross Lawley
c1621ee39c Merge pull request #39 from wpjunior/tests2
More one test
2012-07-18 06:10:26 -07:00
Ross Lawley
b0aa98edb4 Deref list custom id fix 2012-07-18 14:09:24 +01:00
Wilson Júnior
a7a2fe0216 added more tests 2012-07-18 06:37:23 -03:00
Ross Lawley
8e50f5fa3c Version bump 2012-07-11 16:59:24 +01:00
Ross Lawley
31793520bf Updated changelog / AUTHORS
refs hmarr/mongoengine#529
2012-07-11 16:38:15 +01:00
Ross Lawley
0b6b0368c5 Merge branch 'master' of https://github.com/elasticsales/mongoengine 2012-07-11 16:36:35 +01:00
Ross Lawley
d1d30a9280 Added test and updated changelog
refs hmarr/mongoengine#527
2012-07-11 16:34:28 +01:00
Ross Lawley
420c6f2d1e Merge branch 'patch-10' of https://github.com/wpjunior/mongoengine 2012-07-11 16:33:16 +01:00
Ross Lawley
34f06c4971 Updated changelog / AUTHORS
refs hmarr/mongoengine#524
2012-07-11 16:27:43 +01:00
Ross Lawley
9cc4bbd49d Merge branch 'patch-1' of https://github.com/daevaorn/mongoengine 2012-07-11 16:26:50 +01:00
Ross Lawley
f66b312869 Updated api docs
fixes hmarr/mongoengine#526
2012-07-11 16:25:40 +01:00
Ross Lawley
2405ba8708 Updated Changelog / AUTHORS
refs hmarr/mongoengine#531
2012-07-11 16:11:13 +01:00
Ross Lawley
a91b6bff8b Merge branch 'master' of https://github.com/agonzalezro/mongoengine 2012-07-11 16:09:33 +01:00
Ross Lawley
450dc11a68 Unicode fixes
refs hmarr/mongoengine#533 MongoEngine/mongoengine#32
2012-07-11 16:01:24 +01:00
Ross Lawley
1ce2f84ce5 Updated docs regarding fields
refs hmarr/mongoengine#535
2012-07-11 15:56:34 +01:00
Ross Lawley
f55b241cfa Trying to bump travis 2012-07-11 15:45:31 +01:00
Ross Lawley
34d08ce8ef Only dereference fields than need it
Fixes MongoEngine/mongoengine#31
2012-07-11 15:23:27 +01:00
Ross Lawley
4f5aa8c43b Fixed config / added test 2012-07-11 14:29:35 +01:00
Ross Lawley
27b375060d Updated changelog / AUTHORS
refs MongoEngine/mongoengine#32
2012-07-11 14:25:38 +01:00
Ross Lawley
cbfdc401f7 Merge pull request #32 from jaimeirurzun/master
Fix _transform_update to accept unicode fields

Thanks jaimeirurzun
2012-07-11 06:24:29 -07:00
Ross Lawley
b58bf3e0ce Added support for addToSet and each
fixes MongoEngine/mongoengine#33
2012-07-11 14:22:50 +01:00
Jaime Irurzun
1fff7e9aca Fix _transform_update to accept unicode fields 2012-07-10 10:40:14 +01:00
Álex González
494b981b13 Default value for direction 2012-07-02 10:05:25 +02:00
Álex González
dd93995bd0 Forced cast to list 2012-07-02 10:01:22 +02:00
Thomas Steinacher
b3bb4add9c Fix error dict with nested validation. 2012-06-27 13:46:06 -07:00
Wilson Júnior
d305e71c27 Fixes for __ne operator in IntField and FloatField 2012-06-25 15:53:42 -03:00
Alexander Koshelev
0d92baa670 Exclude tests from installation 2012-06-24 03:08:49 +04:00
Ross Lawley
7a1b110f62 Added Tristan Escalada to authors
refs #hmarr/mongoengine#520
2012-06-23 22:24:09 +01:00
Ross Lawley
db8df057ce Merge pull request #520 from tescalada/patch-1
documentation typo: inheritence
2012-06-23 14:23:07 -07:00
Ross Lawley
5d8ffded40 Fixed issue with embedded_docs and db_fields
Bumped version also
refs: hmarr/mongoengine#523
2012-06-23 22:19:02 +01:00
Ross Lawley
07f3e5356d Updated changelog / AUTHORS
refs: hmarr/mongoengine#522
2012-06-23 21:46:31 +01:00
Ross Lawley
1ece62f960 Merge branch 'unicode-fix' of https://github.com/aparajita/mongoengine 2012-06-23 21:43:09 +01:00
Ross Lawley
056c604dc3 Fixes __repr__ modifying the cursor
Fixes MongoEngine/mongoengine#30
2012-06-22 16:22:27 +01:00
Aparajita Fishman
2d08eec093 Fix conversion of StringField value to unicode, replace outdated (str, unicode) check with unicode 2012-06-21 18:57:14 -07:00
Tristan Escalada
614b590551 documentation typo: inheritence
inheritence corrected to inheritance
only in the documentation, not in the code
2012-06-19 17:08:28 -03:00
Ross Lawley
6d90ce250a Version bump 2012-06-19 17:01:28 +01:00
Ross Lawley
ea31846a19 Fixes scalar lookups for primary_key
fixes hmarr/mongoengine#519
2012-06-19 16:59:18 +01:00
Ross Lawley
e6317776c1 Fixes DBRef handling in _delta
refs: hmarr/mongoengine#518
2012-06-19 16:45:23 +01:00
Ross Lawley
efeaba39a4 Version bump 2012-06-19 14:34:16 +01:00
Ross Lawley
1a97dfd479 Better fix for .save() _delta issue with DbRefs
refs: hmarr/mongoengine#518
2012-06-19 14:05:53 +01:00
Ross Lawley
9fecf2b303 Fixed inconsistency handling None values field attrs
fixes hmarr/mongoengine#505
2012-06-19 11:22:12 +01:00
Ross Lawley
3d0d2f48ad Fixed map_field embedded db_field bug
fixes hmarr/mongoengine#512
2012-06-19 10:57:43 +01:00
Ross Lawley
581605e0e2 Added test case for _delta
refs: hmarr/mongoengine#518
2012-06-19 10:08:56 +01:00
Ross Lawley
45d3a7f6ff Updated Changelog 2012-06-19 09:49:55 +01:00
Ross Lawley
7ca2ea0766 Fixes .save _delta issue with DBRefs
Fixes hmarr/mongoengine#518
2012-06-19 09:49:22 +01:00
Ross Lawley
89220c142b Fixed django test class
refs hmarr/mongoengine#506
2012-06-18 21:18:40 +01:00
Ross Lawley
c73ce3d220 Updated changelog / AUTHORS
refs hmarr/mongoengine#511
2012-06-18 21:13:55 +01:00
Ross Lawley
b0f127af4e Merge branch 'master' of https://github.com/andreyfedoseev/mongoengine 2012-06-18 21:12:52 +01:00
Ross Lawley
766d54795f Merge branch 'master' of https://github.com/MeirKriheli/mongoengine
Conflicts:
	docs/changelog.rst
2012-06-18 21:10:14 +01:00
Ross Lawley
bd41c6eea4 Updated changelog & AUTHORS
refs hmarr/mongoengine#517
2012-06-18 21:04:41 +01:00
Ross Lawley
2435786713 Merge branch 'master' of https://github.com/shaunduncan/mongoengine 2012-06-18 20:55:32 +01:00
Ross Lawley
9e7ea64bd2 Fixed db_field load error
Fixes mongoengine/MongoEngine#45
2012-06-18 20:49:33 +01:00
Ross Lawley
89a6eee6af Fixes cascading saves with filefields
fixes #24 #25
2012-06-18 16:45:14 +01:00
Shaun Duncan
2ec1476e50 Adding test case for self-referencing documents with cascade deletes 2012-06-16 11:05:23 -04:00
Shaun Duncan
2d9b581f34 Adding check if cascade delete is self-referencing. If so, prevent
recursing if there are no objects to evaluate
2012-06-15 15:42:19 -04:00
Harry Marr
5bb63f645b Fix minor typo w/ FloatField 2012-06-08 19:24:10 +02:00
Meir Kriheli
a856c7cc37 Fix formatting of the docstring 2012-06-07 12:36:14 +03:00
Meir Kriheli
26db9d8a9d Documentation for PULL reverse_delete_rule 2012-06-07 12:32:02 +03:00
Meir Kriheli
8060179f6d Implement PULL reverse_delete_rule 2012-06-07 12:16:00 +03:00
Meir Kriheli
77ebd87fed Test PULL reverse_delete_rule 2012-06-07 12:02:19 +03:00
Valentin Gorbunov
e4bc92235d test_save_max_recursion_not_hit_with_file_field added 2012-06-06 15:48:16 +04:00
Ross Lawley
27a4d83ce8 Remove comment - it was wrong 2012-05-29 17:32:41 +01:00
Ross Lawley
ece9b902f8 Setup.py cleanups 2012-05-29 17:32:14 +01:00
Ross Lawley
65a2f8a68b Updated configs 2012-05-29 17:06:03 +01:00
Ross Lawley
9c212306b8 Updated setup / added datetime test 2012-05-29 16:24:25 +01:00
Ross Lawley
1fdc7ce6bb Releasing Version 0.6.10 2012-05-23 08:58:43 +01:00
Andrey Fedoseev
0b22c140c5 Add sensible __eq__ method to EmbeddedDocument 2012-05-22 22:31:59 +06:00
Ross Lawley
944aa45459 Updated changelog 2012-05-21 15:21:45 +01:00
Ross Lawley
c9842ba13a Fix base classes to return
fixes hmarr/mongoengine#507
2012-05-21 15:20:46 +01:00
Ross Lawley
8840680303 Promoted BaseDynamicField to DynamicField
closes mongoengine/mongoengine#22
2012-05-17 21:54:17 +01:00
Ross Lawley
376b9b1316 updated the readme 2012-05-17 21:14:25 +01:00
Ross Lawley
54bb1cb3d9 Updated travis settings and Readme 2012-05-17 16:59:50 +01:00
Ross Lawley
43468b474e Adding travis support 2012-05-17 16:49:13 +01:00
Ross Lawley
28a957c684 Version bump 2012-05-14 12:43:00 +01:00
Ross Lawley
ec5ddbf391 Fixed sparse indexes with inheritance
fixes hmarr/mongoengine#497
2012-05-14 12:06:25 +01:00
Ross Lawley
bab186e195 Reverted document.delete auto gridfs delete 2012-05-14 12:02:07 +01:00
Ross Lawley
bc7e874476 Version 0.6.8 2012-05-09 20:53:18 +01:00
Ross Lawley
97114b5948 Fix for FileField losing ref without default
fixes hmarr/mongoengine#458
2012-05-09 20:50:11 +01:00
Ross Lawley
45e015d71d Added test for keys with spaces 2012-05-09 20:49:34 +01:00
Ross Lawley
0ff6531953 Updated changelog 2012-05-09 15:39:34 +01:00
Ross Lawley
ba298c3cfc Save can be used in assignment 2012-05-09 15:37:07 +01:00
Ross Lawley
0479bea40b Cleaned up GridFS
refs hmarr/mongoengine#465
2012-05-09 15:35:28 +01:00
Ross Lawley
a536097804 Added support for pull operations on nested EmbeddedDocs
fixes mongoengine/mongoengine#16
2012-05-09 14:38:53 +01:00
Ross Lawley
bbefd0fdf9 Added example of bi directional delete rules + test
refs mongoengine/mongoengine#15
2012-05-09 13:54:33 +01:00
Ross Lawley
2aa8b04c21 Implemented Choices for GenericReferenceFields
Refs mongoengine/mongoengine#13
2012-05-09 13:21:53 +01:00
Ross Lawley
aeebdfec51 Implemented Choices for GenericEmbeddedDocuments
Refs mongoengine/mongoengine#13
2012-05-09 12:58:45 +01:00
Ross Lawley
debfcdf498 Updated docs re: required pymongo version
refs #472
2012-05-09 12:19:59 +01:00
Ross Lawley
5c4b33e8e6 Made note stronger re: race condition
Refs #478
2012-05-09 12:13:42 +01:00
Ross Lawley
eb54037b66 Added note that get_or_create contains a race condition
Refs #478
2012-05-09 12:08:00 +01:00
Ross Lawley
f48af8db3b Django 1.4 first session save lost data
fixes #477
2012-05-09 12:00:05 +01:00
Ross Lawley
97c5b957dd Updated docs regarding GridFS
refs #492
2012-05-09 11:39:30 +01:00
Ross Lawley
95e7397803 Merge branch 'master' of https://github.com/deignacio/mongoengine 2012-05-09 11:32:26 +01:00
Ross Lawley
43a989978a Merge branch 'patch-8' of https://github.com/wpjunior/mongoengine 2012-05-09 11:21:54 +01:00
Ross Lawley
27734a7c26 Updated Author / changelog 2012-05-09 11:04:05 +01:00
Anthony Nemitz
dd786d6fc4 fix for #494 2012-05-09 02:54:08 -07:00
Ross Lawley
be1c28fc45 Updated changelog 2012-05-08 18:26:04 +01:00
Ross Lawley
20e41b3523 Make the user model extendable 2012-05-08 18:23:51 +01:00
David Ignacio
e07ecc5cf8 Cleanup referenced GridFS files when a document is deleted
Note that drop_collection is not modified since there is no
guarantee that a GridFS collection holds files for only one
Document class.  Otherwise you could drop files for other fields
or documents accidentally.
2012-05-05 01:33:08 -04:00
Wilson Júnior
3360b72531 Small fixes for GenericReferenceField 2012-05-04 14:58:06 -03:00
Ross Lawley
233b13d670 Version bump 2012-05-01 12:03:33 +01:00
Ross Lawley
5bcbb4fdaa Properly fixed indexing on _id for covered indexes
refs  #4
2012-05-01 12:02:45 +01:00
Ross Lawley
dbe2f5f2b8 Updated setup.py 2012-05-01 11:48:57 +01:00
Ross Lawley
ca8b58d66d Fixed indexing on _id for covered indexes
fixes #4
2012-05-01 11:27:37 +01:00
Ross Lawley
f80f0b416f Updated changelog 2012-05-01 11:04:01 +01:00
Ross Lawley
d7765511ee Invalid DB Data now raises an InvalidDocumentError
fixes #2
2012-05-01 11:03:23 +01:00
Ross Lawley
0240a09056 Cleaned up ValidationError Refs #459 2012-05-01 10:14:16 +01:00
Ross Lawley
ab15c4eec9 Merge branch 'master' of https://github.com/adamreeve/mongoengine into 459 2012-05-01 09:42:25 +01:00
Ross Lawley
4ce1ba81a6 Merge branch 'dev-disable-indexing' of https://github.com/colinhowe/mongoengine
Conflicts:
	mongoengine/queryset.py
2012-05-01 09:37:01 +01:00
Ross Lawley
530440b333 Fixed replicaset_connection test 2012-05-01 09:14:38 +01:00
Ross Lawley
b80fda36af Ensure session save is safe 2012-04-27 14:57:07 +01:00
Ross Lawley
42d24263ef Updated changelog 2012-04-27 10:39:35 +01:00
Ross Lawley
1e2797e7ce Merge branch 'master' of github.com:MongoEngine/mongoengine 2012-04-27 10:34:31 +01:00
Ross Lawley
f7075766fc Merge pull request #11 from gregbanks/insert_kwargs
add parameters to QuerySet.insert to allow control of lower level behavior
2012-04-27 02:34:03 -07:00
Ross Lawley
5647ca70bb Fix variable name bug 2012-04-27 09:52:57 +01:00
Ross Lawley
2b8aa6bafc Fixes read_preference
Fixes mongoengine/mongoengine#10
2012-04-27 09:15:05 +01:00
Greg Banks
410443471c TopLevelDocumentMetaClass sets _meta['index_opts'] not _meta['index_options'] 2012-04-26 14:03:30 -07:00
Greg Banks
0bb9781b91 add "safe" and "write_options" parameters to QuerySet.insert similar to Document.save 2012-04-26 13:56:52 -07:00
Ross Lawley
2769d6d7ca Updated Authors / Changelog 2012-04-25 12:43:17 +01:00
Ross Lawley
120b9433c2 Merge branch 'master' of https://github.com/swashbuckler/mongoengine 2012-04-25 12:40:53 +01:00
Ross Lawley
605092bd88 Updated change log / AUTHORS
Thanks Greg Banks
2012-04-25 12:36:37 +01:00
Ross Lawley
a4a8c94374 Merge branch 'master' of https://github.com/gregbanks/mongoengine 2012-04-25 12:31:11 +01:00
Ross Lawley
0e93f6c0db Updated changelog 2012-04-25 12:26:31 +01:00
Jona Andersen
c474ca0f13 Allow File-like objects to be stored.
No longer demands FileField be an actual instance of file, but instead
checks whether object has a 'read' attribute. Fixes read() on
GridFSProxy to return an empty string on read failure, or None if file
does not exist.
2012-04-22 13:49:18 +02:00
Greg Banks
49a66ba81a whoops, don't dereference all references as the first type encountered 2012-04-12 11:42:10 -07:00
Greg Banks
a1d43fecd9 fix for issue 473 2012-04-11 16:37:22 -07:00
Colin Howe
7e376b40bb Add new meta option to Document: allow_index_creation.
Defaults to True. If set to False then MongoEngine will not ensure indexes exist
2012-03-19 20:27:08 +00:00
Adam Reeve
540a0cc59c List all document errors when raising a ValidationError 2012-03-09 22:20:59 +13:00
Adam Reeve
83eb4f6b16 Document the ValidationError class 2012-03-09 22:20:54 +13:00
41 changed files with 4582 additions and 1473 deletions

3
.gitignore vendored
View File

@@ -13,4 +13,5 @@ env/
.settings
.project
.pydevproject
tests/bugfix.py
tests/test_bugfix.py
htmlcov/

29
.travis.yml Normal file
View File

@@ -0,0 +1,29 @@
# http://travis-ci.org/#!/MongoEngine/mongoengine
language: python
services: mongodb
python:
- 2.5
- 2.6
- 2.7
- 3.1
- 3.2
env:
- PYMONGO=dev
- PYMONGO=2.3
- PYMONGO=2.2
install:
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/; fi
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi
- if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
- if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi
- python setup.py install
script:
- python setup.py test
notifications:
irc: "irc.freenode.org#mongoengine"
branches:
only:
- master
- 0.7

26
AUTHORS
View File

@@ -8,6 +8,7 @@ Florian Schlachter <flori@n-schlachter.de>
Steve Challis <steve@stevechallis.com>
Wilson Júnior <wilsonpjunior@gmail.com>
Dan Crosta https://github.com/dcrosta
Laine Herron https://github.com/LaineHerron
CONTRIBUTORS
@@ -99,4 +100,27 @@ that much better:
* Robert Kajic
* Jacob Peddicord
* Nils Hasenbanck
* mostlystatic
* mostlystatic
* Greg Banks
* swashbuckler
* Adam Reeve
* Anthony Nemitz
* deignacio
* shaunduncan
* Meir Kriheli
* Andrey Fedoseev
* aparajita
* Tristan Escalada
* Alexander Koshelev
* Jaime Irurzun
* Alexandre González
* Thomas Steinacher
* Tommi Komulainen
* Peter Landry
* biszkoptwielki
* Anton Kolechkin
* Sergey Nikitin
* psychogenic
* Stefan Wójcik
* dimonb
* Garry Polley

61
CONTRIBUTING.rst Normal file
View File

@@ -0,0 +1,61 @@
Contributing to MongoEngine
===========================
MongoEngine has a large `community
<https://raw.github.com/MongoEngine/mongoengine/master/AUTHORS>`_ and
contributions are always encouraged. Contributions can be as simple as
minor tweaks to the documentation. Please read these guidelines before
sending a pull request.
Bugfixes and New Features
-------------------------
Before starting to write code, look for existing `tickets
<https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one
<https://github.com/MongoEngine/mongoengine/issues>`_ for your specific
issue or feature request. That way you avoid working on something
that might not be of interest or that has already been addressed. If in doubt
post to the `user group <http://groups.google.com/group/mongoengine-users>`
Supported Interpreters
----------------------
PyMongo supports CPython 2.5 and newer. Language
features not supported by all interpreters can not be used.
Please also ensure that your code is properly converted by
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.
Style Guide
-----------
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
including 4 space indents and 79 character line limits.
Testing
-------
All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
and any pull requests are automatically tested by Travis. Any pull requests
without tests will take longer to be integrated and might be refused.
General Guidelines
------------------
- Avoid backward breaking changes if at all possible.
- Write inline documentation for new classes and methods.
- Write tests and make sure they pass (make sure you have a mongod
running on the default port, then execute ``python setup.py test``
from the cmd line to run the test suite).
- Add yourself to AUTHORS.rst :)
Documentation
-------------
To contribute to the `API documentation
<http://docs.mongoengine.org/en/latest/apireference.html>`_
just make your changes to the inline documentation of the appropriate
`source code <https://github.com/MongoEngine/mongoengine>`_ or `rst file
<https://github.com/MongoEngine/mongoengine/tree/master/docs>`_ in a
branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_.
You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_
button.

View File

@@ -1,5 +1,5 @@
Copyright (c) 2009-2010 Harry Marr
Copyright (c) 2009-2012 See AUTHORS
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
@@ -8,10 +8,10 @@ copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND

View File

@@ -2,9 +2,13 @@
MongoEngine
===========
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
:Repository: https://github.com/MongoEngine/mongoengine
:Author: Harry Marr (http://github.com/hmarr)
:Maintainer: Ross Lawley (http://github.com/rozza)
.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
:target: http://travis-ci.org/MongoEngine/mongoengine
About
=====
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
@@ -22,7 +26,7 @@ setup.py install``.
Dependencies
============
- pymongo 1.1+
- pymongo 2.1.1+
- sphinx (optional - for documentation generation)
Examples
@@ -59,11 +63,6 @@ Some simple examples of what MongoEngine code looks like::
... print 'Link:', post.url
... print
...
=== Using MongoEngine ===
See the tutorial
=== MongoEngine Docs ===
Link: hmarr.com/mongoengine
>>> len(BlogPost.objects)
2
@@ -81,7 +80,7 @@ Some simple examples of what MongoEngine code looks like::
Tests
=====
To run the test suite, ensure you are running a local instance of MongoDB on
the standard port, and run ``python setup.py test``.
the standard port, and run: ``python setup.py test``.
Community
=========
@@ -89,10 +88,8 @@ Community
<http://groups.google.com/group/mongoengine-users>`_
- `MongoEngine Developers mailing list
<http://groups.google.com/group/mongoengine-dev>`_
- `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_
- `#mongoengine IRC channel <http://webchat.freenode.net/?channels=mongoengine>`_
Contributing
============
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
contribute to the project, fork it on GitHub and send a pull request, all
contributions and suggestions are welcome!
We welcome contributions! see the`Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_

View File

@@ -28,47 +28,64 @@ def main():
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo
1.1141769886
3.86744189262
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine
2.37724113464
6.23374891281
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
1.92479610443
5.33027005196
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
pass - No Cascade
0.5.X
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo
1.10552310944
3.89597702026
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine
16.5169169903
21.7735359669
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
14.9446101189
19.8670389652
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
14.912801981
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, force=True
14.9617750645
pass - No Cascade
Performance
0.6.X
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo
1.10072994232
3.81559205055
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine
5.27341103554
10.0446798801
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
4.49365401268
9.51354718208
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
4.43459296227
9.02567505836
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, force=True
4.40114378929
8.44933390617
0.7.X
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo
3.78801012039
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine
9.73050498962
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
8.33456707001
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
8.37778115273
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, force=True
8.36906409264
"""
setup = """

View File

@@ -31,6 +31,9 @@ Documents
.. autoclass:: mongoengine.document.MapReduceDocument
:members:
.. autoclass:: mongoengine.ValidationError
:members:
Querying
========
@@ -44,25 +47,28 @@ Querying
Fields
======
.. autoclass:: mongoengine.StringField
.. autoclass:: mongoengine.URLField
.. autoclass:: mongoengine.EmailField
.. autoclass:: mongoengine.IntField
.. autoclass:: mongoengine.FloatField
.. autoclass:: mongoengine.DecimalField
.. autoclass:: mongoengine.DateTimeField
.. autoclass:: mongoengine.BinaryField
.. autoclass:: mongoengine.BooleanField
.. autoclass:: mongoengine.ComplexDateTimeField
.. autoclass:: mongoengine.ListField
.. autoclass:: mongoengine.SortedListField
.. autoclass:: mongoengine.DateTimeField
.. autoclass:: mongoengine.DecimalField
.. autoclass:: mongoengine.DictField
.. autoclass:: mongoengine.DynamicField
.. autoclass:: mongoengine.EmailField
.. autoclass:: mongoengine.EmbeddedDocumentField
.. autoclass:: mongoengine.FileField
.. autoclass:: mongoengine.FloatField
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
.. autoclass:: mongoengine.GenericReferenceField
.. autoclass:: mongoengine.GeoPointField
.. autoclass:: mongoengine.ImageField
.. autoclass:: mongoengine.IntField
.. autoclass:: mongoengine.ListField
.. autoclass:: mongoengine.MapField
.. autoclass:: mongoengine.ObjectIdField
.. autoclass:: mongoengine.ReferenceField
.. autoclass:: mongoengine.GenericReferenceField
.. autoclass:: mongoengine.EmbeddedDocumentField
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
.. autoclass:: mongoengine.BooleanField
.. autoclass:: mongoengine.FileField
.. autoclass:: mongoengine.BinaryField
.. autoclass:: mongoengine.GeoPointField
.. autoclass:: mongoengine.SequenceField
.. autoclass:: mongoengine.SortedListField
.. autoclass:: mongoengine.StringField
.. autoclass:: mongoengine.URLField
.. autoclass:: mongoengine.UUIDField

View File

@@ -2,6 +2,173 @@
Changelog
=========
Changes in 0.7.X
================
- Unicode fix for repr (MongoEngine/mongoengine#133)
- Allow updates with match operators (MongoEngine/mongoengine#144)
- Updated URLField - now can have a override the regex (MongoEngine/mongoengine#136)
- Allow Django AuthenticationBackends to work with Django user (hmarr/mongoengine#573)
- Fixed reload issue with ReferenceField where dbref=False (MongoEngine/mongoengine#138)
Changes in 0.7.5
================
- ReferenceFields with dbref=False use ObjectId instead of strings (MongoEngine/mongoengine#134)
See ticket for upgrade notes (https://github.com/MongoEngine/mongoengine/issues/134)
Changes in 0.7.4
================
- Fixed index inheritance issues - firmed up testcases (MongoEngine/mongoengine#123) (MongoEngine/mongoengine#125)
Changes in 0.7.3
================
- Reverted EmbeddedDocuments meta handling - now can turn off inheritance (MongoEngine/mongoengine#119)
Changes in 0.7.2
================
- Update index spec generation so its not destructive (MongoEngine/mongoengine#113)
Changes in 0.7.1
=================
- Fixed index spec inheritance (MongoEngine/mongoengine#111)
Changes in 0.7.0
=================
- Updated queryset.delete so you can use with skip / limit (MongoEngine/mongoengine#107)
- Updated index creation allows kwargs to be passed through refs (MongoEngine/mongoengine#104)
- Fixed Q object merge edge case (MongoEngine/mongoengine#109)
- Fixed reloading on sharded documents (hmarr/mongoengine#569)
- Added NotUniqueError for duplicate keys (MongoEngine/mongoengine#62)
- Added custom collection / sequence naming for SequenceFields (MongoEngine/mongoengine#92)
- Fixed UnboundLocalError in composite index with pk field (MongoEngine/mongoengine#88)
- Updated ReferenceField's to optionally store ObjectId strings
this will become the default in 0.8 (MongoEngine/mongoengine#89)
- Added FutureWarning - save will default to `cascade=False` in 0.8
- Added example of indexing embedded document fields (MongoEngine/mongoengine#75)
- Fixed ImageField resizing when forcing size (MongoEngine/mongoengine#80)
- Add flexibility for fields handling bad data (MongoEngine/mongoengine#78)
- Embedded Documents no longer handle meta definitions
- Use weakref proxies in base lists / dicts (MongoEngine/mongoengine#74)
- Improved queryset filtering (hmarr/mongoengine#554)
- Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561)
- Fixed abstract classes and shard keys (MongoEngine/mongoengine#64)
- Fixed Python 2.5 support
- Added Python 3 support (thanks to Laine Heron)
Changes in 0.6.20
=================
- Added support for distinct and db_alias (MongoEngine/mongoengine#59)
- Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554)
- Fixed BinaryField lookup re (MongoEngine/mongoengine#48)
Changes in 0.6.19
=================
- Added Binary support to UUID (MongoEngine/mongoengine#47)
- Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46)
- Fixed BinaryField python value issue (MongoEngine/mongoengine#48)
- Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41)
- Fixed queryset manager issue (MongoEngine/mongoengine#52)
- Fixed FileField comparision (hmarr/mongoengine#547)
Changes in 0.6.18
=================
- Fixed recursion loading bug in _get_changed_fields
Changes in 0.6.17
=================
- Fixed issue with custom queryset manager expecting explict variable names
Changes in 0.6.16
=================
- Fixed issue where db_alias wasn't inherited
Changes in 0.6.15
=================
- Updated validation error messages
- Added support for null / zero / false values in item_frequencies
- Fixed cascade save edge case
- Fixed geo index creation through reference fields
- Added support for args / kwargs when using @queryset_manager
- Deref list custom id fix
Changes in 0.6.14
=================
- Fixed error dict with nested validation
- Fixed Int/Float fields and not equals None
- Exclude tests from installation
- Allow tuples for index meta
- Fixed use of str in instance checks
- Fixed unicode support in transform update
- Added support for add_to_set and each
Changes in 0.6.13
=================
- Fixed EmbeddedDocument db_field validation issue
- Fixed StringField unicode issue
- Fixes __repr__ modifying the cursor
Changes in 0.6.12
=================
- Fixes scalar lookups for primary_key
- Fixes error with _delta handling DBRefs
Changes in 0.6.11
==================
- Fixed inconsistency handling None values field attrs
- Fixed map_field embedded db_field issue
- Fixed .save() _delta issue with DbRefs
- Fixed Django TestCase
- Added cmp to Embedded Document
- Added PULL reverse_delete_rule
- Fixed CASCADE delete bug
- Fixed db_field data load error
- Fixed recursive save with FileField
Changes in 0.6.10
=================
- Fixed basedict / baselist to return super(..)
- Promoted BaseDynamicField to DynamicField
Changes in 0.6.9
================
- Fixed sparse indexes on inherited docs
- Removed FileField auto deletion, needs more work maybe 0.7
Changes in 0.6.8
================
- Fixed FileField losing reference when no default set
- Removed possible race condition from FileField (grid_file)
- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()`
- Added support for pull operations on nested EmbeddedDocuments
- Added support for choices with GenericReferenceFields
- Added support for choices with GenericEmbeddedDocumentFields
- Fixed Django 1.4 sessions first save data loss
- FileField now automatically delete files on .delete()
- Fix for GenericReference to_mongo method
- Fixed connection regression
- Updated Django User document, now allows inheritance
Changes in 0.6.7
================
- Fixed indexing on '_id' or 'pk' or 'id'
- Invalid data from the DB now raises a InvalidDocumentError
- Cleaned up the Validation Error - docs and code
- Added meta `auto_create_index` so you can disable index creation
- Added write concern options to inserts
- Fixed typo in meta for index options
- Bug fix Read preference now passed correctly
- Added support for File like objects for GridFS
- Fix for #473 - Dereferencing abstracts
Changes in 0.6.6
================
- Django 1.4 fixed (finally)
- Added tests for Django
Changes in 0.6.5
================
- More Django updates
Changes in 0.6.4
================

View File

@@ -62,28 +62,31 @@ not provided. Default values may optionally be a callable, which will be called
to retrieve the value (such as in the above example). The field types available
are as follows:
* :class:`~mongoengine.StringField`
* :class:`~mongoengine.URLField`
* :class:`~mongoengine.EmailField`
* :class:`~mongoengine.IntField`
* :class:`~mongoengine.FloatField`
* :class:`~mongoengine.DecimalField`
* :class:`~mongoengine.DateTimeField`
* :class:`~mongoengine.BinaryField`
* :class:`~mongoengine.BooleanField`
* :class:`~mongoengine.ComplexDateTimeField`
* :class:`~mongoengine.ListField`
* :class:`~mongoengine.SortedListField`
* :class:`~mongoengine.DateTimeField`
* :class:`~mongoengine.DecimalField`
* :class:`~mongoengine.DictField`
* :class:`~mongoengine.DynamicField`
* :class:`~mongoengine.EmailField`
* :class:`~mongoengine.EmbeddedDocumentField`
* :class:`~mongoengine.FileField`
* :class:`~mongoengine.FloatField`
* :class:`~mongoengine.GenericEmbeddedDocumentField`
* :class:`~mongoengine.GenericReferenceField`
* :class:`~mongoengine.GeoPointField`
* :class:`~mongoengine.ImageField`
* :class:`~mongoengine.IntField`
* :class:`~mongoengine.ListField`
* :class:`~mongoengine.MapField`
* :class:`~mongoengine.ObjectIdField`
* :class:`~mongoengine.ReferenceField`
* :class:`~mongoengine.GenericReferenceField`
* :class:`~mongoengine.EmbeddedDocumentField`
* :class:`~mongoengine.GenericEmbeddedDocumentField`
* :class:`~mongoengine.BooleanField`
* :class:`~mongoengine.FileField`
* :class:`~mongoengine.BinaryField`
* :class:`~mongoengine.GeoPointField`
* :class:`~mongoengine.SequenceField`
* :class:`~mongoengine.SortedListField`
* :class:`~mongoengine.StringField`
* :class:`~mongoengine.URLField`
* :class:`~mongoengine.UUIDField`
Field arguments
---------------
@@ -98,7 +101,7 @@ arguments can be set on all fields:
:attr:`required` (Default: False)
If set to True and the field is not set on the document instance, a
:class:`~mongoengine.base.ValidationError` will be raised when the document is
:class:`~mongoengine.ValidationError` will be raised when the document is
validated.
:attr:`default` (Default: None)
@@ -256,6 +259,35 @@ as the constructor's argument::
content = StringField()
.. _one-to-many-with-listfields:
One to Many with ListFields
'''''''''''''''''''''''''''
If you are implementing a one to many relationship via a list of references,
then the references are stored as DBRefs and to query you need to pass an
instance of the object to the query::
class User(Document):
name = StringField()
class Page(Document):
content = StringField()
authors = ListField(ReferenceField(User))
bob = User(name="Bob Jones").save()
john = User(name="John Smith").save()
Page(content="Test Page", authors=[bob, john]).save()
Page(content="Another Page", authors=[john]).save()
# Find all pages Bob authored
Page.objects(authors__in=[bob])
# Find all pages that both Bob and John have authored
Page.objects(authors__all=[bob, john])
Dealing with deletion of referred documents
'''''''''''''''''''''''''''''''''''''''''''
By default, MongoDB doesn't check the integrity of your data, so deleting
@@ -289,6 +321,10 @@ Its value can take any of the following constants:
:const:`mongoengine.CASCADE`
Any object containing fields that are refererring to the object being deleted
are deleted first.
:const:`mongoengine.PULL`
Removes the reference to the object (using MongoDB's "pull" operation)
from any object's fields of
:class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`).
.. warning::
@@ -308,6 +344,10 @@ Its value can take any of the following constants:
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
.. warning::
Signals are not triggered when doing cascading updates / deletes - if this
is required you must manually handle the update / delete.
Generic reference fields
''''''''''''''''''''''''
A second kind of reference field also exists,
@@ -429,13 +469,18 @@ If a dictionary is passed then the following options are available:
Whether the index should be sparse.
:attr:`unique` (Default: False)
Whether the index should be sparse.
Whether the index should be unique.
.. note ::
To index embedded files / dictionary fields use 'dot' notation eg:
`rank.title`
.. warning::
Inheritance adds extra indices.
If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`.
Inheritance adds extra indices.
If don't need inheritance for a document turn inheritance off -
see :ref:`document-inheritance`.
Geospatial indexes

View File

@@ -91,5 +91,5 @@ is an alias to :attr:`id`::
.. note::
If you define your own primary key field, the field implicitly becomes
required, so a :class:`ValidationError` will be thrown if you don't provide
it.
required, so a :class:`~mongoengine.ValidationError` will be thrown if
you don't provide it.

View File

@@ -65,7 +65,7 @@ Deleting stored files is achieved with the :func:`delete` method::
marmot.photo.delete()
.. note::
.. warning::
The FileField in a Document actually only stores the ID of a file in a
separate GridFS collection. This means that deleting a document

View File

@@ -232,7 +232,7 @@ custom manager methods as you like::
BlogPost(title='test1', published=False).save()
BlogPost(title='test2', published=True).save()
assert len(BlogPost.objects) == 2
assert len(BlogPost.live_posts) == 1
assert len(BlogPost.live_posts()) == 1
Custom QuerySets
================
@@ -243,11 +243,16 @@ a document, set ``queryset_class`` to the custom class in a
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
class AwesomerQuerySet(QuerySet):
pass
def get_awesome(self):
return self.filter(awesome=True)
class Page(Document):
meta = {'queryset_class': AwesomerQuerySet}
# To call:
Page.objects.get_awesome()
.. versionadded:: 0.4
Aggregation

View File

@@ -50,4 +50,11 @@ Example usage::
signals.post_save.connect(Author.post_save, sender=Author)
ReferenceFields and signals
---------------------------
Currently `reverse_delete_rules` do not trigger signals on the other part of
the relationship. If this is required you must manually handled the
reverse deletion.
.. _blinker: http://pypi.python.org/pypi/blinker

View File

@@ -34,10 +34,10 @@ To get help with using MongoEngine, use the `MongoEngine Users mailing list
Contributing
------------
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ and
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ and
contributions are always encouraged. Contributions can be as simple as
minor tweaks to this documentation. To contribute, fork the project on
`GitHub <http://github.com/hmarr/mongoengine>`_ and send a
`GitHub <http://github.com/MongoEngine/mongoengine>`_ and send a
pull request.
Also, you can join the developers' `mailing list

View File

@@ -2,18 +2,86 @@
Upgrading
=========
0.6 to 0.7
==========
Cascade saves
-------------
Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set
to True. This is because in 0.8 it will default to False. If you require
cascading saves then either set it in the `meta` or pass
via `save` eg ::
# At the class level:
class Person(Document):
meta = {'cascade': True}
# Or in code:
my_document.save(cascade=True)
.. note ::
Remember: cascading saves **do not** cascade through lists.
ReferenceFields
---------------
ReferenceFields now can store references as ObjectId strings instead of DBRefs.
This will become the default in 0.8 and if `dbref` is not set a `FutureWarning`
will be raised.
To explicitly continue to use DBRefs change the `dbref` flag
to True ::
class Person(Document):
groups = ListField(ReferenceField(Group, dbref=True))
To migrate to using strings instead of DBRefs you will have to manually
migrate ::
# Step 1 - Migrate the model definition
class Group(Document):
author = ReferenceField(User, dbref=False)
members = ListField(ReferenceField(User, dbref=False))
# Step 2 - Migrate the data
for g in Group.objects():
g.author = g.author
g.members = g.members
g.save()
item_frequencies
----------------
In the 0.6 series we added support for null / zero / false values in
item_frequencies. A side effect was to return keys in the value they are
stored in rather than as string representations. Your code may need to be
updated to handle native types rather than strings keys for the results of
item frequency queries.
BinaryFields
------------
Binary fields have been updated so that they are native binary types. If you
previously were doing `str` comparisons with binary field values you will have
to update and wrap the value in a `str`.
0.5 to 0.6
==========
Embedded Documents - if you had a `pk` field you will have to rename it from `_id`
to `pk` as pk is no longer a property of Embedded Documents.
Embedded Documents - if you had a `pk` field you will have to rename it from
`_id` to `pk` as pk is no longer a property of Embedded Documents.
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
an InvalidDocument error as they aren't currently supported.
Document._get_subclasses - Is no longer used and the class method has been removed.
Document._get_subclasses - Is no longer used and the class method has been
removed.
Document.objects.with_id - now raises an InvalidQueryError if used with a filter.
Document.objects.with_id - now raises an InvalidQueryError if used with a
filter.
FutureWarning - A future warning has been added to all inherited classes that
don't define `allow_inheritance` in their meta.
@@ -37,11 +105,11 @@ human-readable name for the option.
PyMongo / MongoDB
-----------------
map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output
parameters, have been depreciated.
map reduce now requires pymongo 1.11+- The pymongo `merge_output` and
`reduce_output` parameters, have been depreciated.
More methods now use map_reduce as db.eval is not supported for sharding as such
the following have been changed:
More methods now use map_reduce as db.eval is not supported for sharding as
such the following have been changed:
* :meth:`~mongoengine.queryset.QuerySet.sum`
* :meth:`~mongoengine.queryset.QuerySet.average`
@@ -51,8 +119,8 @@ the following have been changed:
Default collection naming
-------------------------
Previously it was just lowercase, its now much more pythonic and readable as its
lowercase and underscores, previously ::
Previously it was just lowercase, its now much more pythonic and readable as
its lowercase and underscores, previously ::
class MyAceDocument(Document):
pass
@@ -88,7 +156,8 @@ Alternatively, you can rename your collections eg ::
failure = False
collection_names = [d._get_collection_name() for d in _document_registry.values()]
collection_names = [d._get_collection_name()
for d in _document_registry.values()]
for new_style_name in collection_names:
if not new_style_name: # embedded documents don't have collections
@@ -106,7 +175,8 @@ Alternatively, you can rename your collections eg ::
old_style_name, new_style_name)
else:
db[old_style_name].rename(new_style_name)
print "Renamed: %s to %s" % (old_style_name, new_style_name)
print "Renamed: %s to %s" % (old_style_name,
new_style_name)
if failure:
print "Upgrading collection names failed"

View File

@@ -12,13 +12,12 @@ from signals import *
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
queryset.__all__ + signals.__all__)
VERSION = (0, 6, 6)
VERSION = (0, 7, 5)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
if isinstance(VERSION[-1], basestring):
return '.'.join(map(str, VERSION[:-1])) + VERSION[-1]
return '.'.join(map(str, VERSION))
__version__ = get_version()

File diff suppressed because it is too large Load Diff

View File

@@ -63,7 +63,10 @@ def register_connection(alias, name, host='localhost', port=27017,
'password': uri_dict.get('password'),
'read_preference': read_preference,
})
if "replicaSet" in host:
conn_settings['replicaSet'] = True
conn_settings.update(kwargs)
_connection_settings[alias] = conn_settings
@@ -112,7 +115,11 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
# Discard port since it can't be used on ReplicaSetConnection
conn_settings.pop('port', None)
# Discard replicaSet if not base string
if not isinstance(conn_settings['replicaSet'], basestring):
conn_settings.pop('replicaSet', None)
connection_class = ReplicaSetConnection
try:
_connections[alias] = connection_class(**conn_settings)
except Exception, e:

View File

@@ -31,15 +31,34 @@ class DeReference(object):
items = [i for i in items]
self.max_depth = max_depth
doc_type = None
if instance and instance._fields:
doc_type = instance._fields[name].field
doc_type = instance._fields.get(name)
if hasattr(doc_type, 'field'):
doc_type = doc_type.field
if isinstance(doc_type, ReferenceField):
field = doc_type
doc_type = doc_type.document_type
if all([i.__class__ == doc_type for i in items]):
is_list = not hasattr(items, 'items')
if is_list and all([i.__class__ == doc_type for i in items]):
return items
elif not is_list and all([i.__class__ == doc_type
for i in items.values()]):
return items
elif not field.dbref:
if not hasattr(items, 'items'):
items = [field.to_python(v)
if not isinstance(v, (DBRef, Document)) else v
for v in items]
else:
items = dict([
(k, field.to_python(v))
if not isinstance(v, (DBRef, Document)) else (k, v)
for k, v in items.iteritems()]
)
self.reference_map = self._find_references(items)
self.object_map = self._fetch_objects(doc_type=doc_type)
@@ -112,6 +131,10 @@ class DeReference(object):
for ref in references:
if '_cls' in ref:
doc = get_document(ref["_cls"])._from_son(ref)
elif doc_type is None:
doc = get_document(
''.join(x.capitalize()
for x in col.split('_')))._from_son(ref)
else:
doc = doc_type._from_son(ref)
object_map[doc.id] = doc
@@ -143,7 +166,7 @@ class DeReference(object):
return self.object_map.get(items['_ref'].id, items)
elif '_types' in items and '_cls' in items:
doc = get_document(items['_cls'])._from_son(items)
doc._data = self._attach_objects(doc._data, depth, doc, name)
doc._data = self._attach_objects(doc._data, depth, doc, None)
return doc
if not hasattr(items, 'items'):
@@ -162,7 +185,7 @@ class DeReference(object):
else:
data[k] = v
if k in self.object_map:
if k in self.object_map and not is_list:
data[k] = self.object_map[k]
elif hasattr(v, '_fields'):
for field_name, field in v._fields.iteritems():

View File

@@ -3,6 +3,8 @@ import datetime
from mongoengine import *
from django.utils.encoding import smart_str
from django.contrib.auth.models import _user_get_all_permissions
from django.contrib.auth.models import _user_has_perm
from django.contrib.auth.models import AnonymousUser
from django.utils.translation import ugettext_lazy as _
@@ -66,6 +68,7 @@ class User(Document):
verbose_name=_('date joined'))
meta = {
'allow_inheritance': True,
'indexes': [
{'fields': ['username'], 'unique': True}
]
@@ -103,6 +106,25 @@ class User(Document):
"""
return check_password(raw_password, self.password)
def get_all_permissions(self, obj=None):
return _user_get_all_permissions(self, obj)
def has_perm(self, perm, obj=None):
"""
Returns True if the user has the specified permission. This method
queries all available auth backends, but returns immediately if any
backend returns True. Thus, a user who has permission from a single
auth backend is assumed to have permission in general. If an object is
provided, permissions for this specific object are checked.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
# Otherwise we need to check the backends.
return _user_has_perm(self, perm, obj)
@classmethod
def create_user(cls, username, password, email=None):
"""Create (and save) a new user with the given username, password and

View File

@@ -55,7 +55,7 @@ class SessionStore(SessionBase):
def save(self, must_create=False):
if self.session_key is None:
self.create()
self._session_key = self._get_new_session_key()
s = MongoSession(session_key=self.session_key)
s.session_data = self.encode(self._get_session(no_load=must_create))
s.expire_date = self.get_expiry_date()

View File

@@ -1,4 +1,3 @@
from django.http import Http404
from mongoengine.queryset import QuerySet
from mongoengine.base import BaseDocument
from mongoengine.base import ValidationError
@@ -27,6 +26,7 @@ def get_document_or_404(cls, *args, **kwargs):
try:
return queryset.get(*args, **kwargs)
except (queryset._document.DoesNotExist, ValidationError):
from django.http import Http404
raise Http404('No %s matches the given query.' % queryset._document._class_name)
def get_list_or_404(cls, *args, **kwargs):
@@ -42,5 +42,6 @@ def get_list_or_404(cls, *args, **kwargs):
queryset = _get_queryset(cls)
obj_list = list(queryset.filter(*args, **kwargs))
if not obj_list:
from django.http import Http404
raise Http404('No %s matches the given query.' % queryset._document._class_name)
return obj_list

View File

@@ -1,16 +1,34 @@
#coding: utf-8
from django.test import TestCase
from django.conf import settings
from nose.plugins.skip import SkipTest
from mongoengine.python_support import PY3
from mongoengine import connect
try:
from django.test import TestCase
from django.conf import settings
except Exception as err:
if PY3:
from unittest import TestCase
# Dummy value so no error
class settings:
MONGO_DATABASE_NAME = 'dummy'
else:
raise err
class MongoTestCase(TestCase):
def setUp(self):
if PY3:
raise SkipTest('django does not have Python 3 support')
"""
TestCase class that clear the collection between the tests
"""
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
def __init__(self, methodName='runtest'):
self.db = connect(self.db_name)
self.db = connect(self.db_name).get_db()
super(MongoTestCase, self).__init__(methodName)
def _post_teardown(self):

View File

@@ -1,14 +1,19 @@
import pymongo
from bson.dbref import DBRef
import warnings
import pymongo
import re
from bson.dbref import DBRef
from mongoengine import signals, queryset
from mongoengine import signals
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
BaseDict, BaseList)
from queryset import OperationError
from queryset import OperationError, NotUniqueError
from connection import get_db, DEFAULT_CONNECTION_NAME
__all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument',
'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError']
'DynamicEmbeddedDocument', 'OperationError',
'InvalidCollectionError', 'NotUniqueError']
class InvalidCollectionError(Exception):
@@ -20,8 +25,19 @@ class EmbeddedDocument(BaseDocument):
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
fields on :class:`~mongoengine.Document`\ s through the
:class:`~mongoengine.EmbeddedDocumentField` field type.
A :class:`~mongoengine.EmbeddedDocument` subclass may be itself subclassed,
to create a specialised version of the embedded document that will be
stored in the same collection. To facilitate this behaviour, `_cls` and
`_types` fields are added to documents (hidden though the MongoEngine
interface though). To disable this behaviour and remove the dependence on
the presence of `_cls` and `_types`, set :attr:`allow_inheritance` to
``False`` in the :attr:`meta` dictionary.
"""
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
my_metaclass = DocumentMetaclass
__metaclass__ = DocumentMetaclass
def __init__(self, *args, **kwargs):
@@ -39,6 +55,11 @@ class EmbeddedDocument(BaseDocument):
else:
super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._data == other._data
return False
class Document(BaseDocument):
"""The base class used for defining the structure and properties of
@@ -74,14 +95,23 @@ class Document(BaseDocument):
names. Index direction may be specified by prefixing the field names with
a **+** or **-** sign.
Automatic index creation can be disabled by specifying
attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
False then indexes will not be created by MongoEngine. This is useful in
production systems where index creation is performed as part of a deployment
system.
By default, _types will be added to the start of every index (that
doesn't contain a list) if allow_inheritence is True. This can be
doesn't contain a list) if allow_inheritance is True. This can be
disabled by either setting types to False on the specific index or
by setting index_types to False on the meta dictionary for the document.
"""
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
my_metaclass = TopLevelDocumentMetaclass
__metaclass__ = TopLevelDocumentMetaclass
@apply
def pk():
"""Primary key alias
"""
@@ -90,6 +120,7 @@ class Document(BaseDocument):
def fset(self, value):
return setattr(self, self._meta['id_field'], value)
return property(fget, fset)
pk = pk()
@classmethod
def _get_db(cls):
@@ -115,8 +146,9 @@ class Document(BaseDocument):
options = cls._collection.options()
if options.get('max') != max_documents or \
options.get('size') != max_size:
msg = ('Cannot create collection "%s" as a capped '
'collection as it already exists') % cls._collection
msg = (('Cannot create collection "%s" as a capped '
'collection as it already exists')
% cls._collection)
raise InvalidCollectionError(msg)
else:
# Create the collection as a capped collection
@@ -130,8 +162,9 @@ class Document(BaseDocument):
cls._collection = db[collection_name]
return cls._collection
def save(self, safe=True, force_insert=False, validate=True, write_options=None,
cascade=None, cascade_kwargs=None, _refs=None):
def save(self, safe=True, force_insert=False, validate=True,
write_options=None, cascade=None, cascade_kwargs=None,
_refs=None):
"""Save the :class:`~mongoengine.Document` to the database. If the
document already exists, it will be updated, otherwise it will be
created.
@@ -144,26 +177,30 @@ class Document(BaseDocument):
updates of existing documents
:param validate: validates the document; set to ``False`` to skip.
:param write_options: Extra keyword arguments are passed down to
:meth:`~pymongo.collection.Collection.save` OR
:meth:`~pymongo.collection.Collection.insert`
which will be used as options for the resultant ``getLastError`` command.
For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers
have recorded the write and will force an fsync on each server being written to.
:param cascade: Sets the flag for cascading saves. You can set a default by setting
"cascade" in the document __meta__
:param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves
:meth:`~pymongo.collection.Collection.save` OR
:meth:`~pymongo.collection.Collection.insert`
which will be used as options for the resultant
``getLastError`` command. For example,
``save(..., write_options={w: 2, fsync: True}, ...)`` will
wait until at least two servers have recorded the write and
will force an fsync on the primary server.
:param cascade: Sets the flag for cascading saves. You can set a
default by setting "cascade" in the document __meta__
:param cascade_kwargs: optional kwargs dictionary to be passed throw
to cascading saves
:param _refs: A list of processed references used in cascading saves
.. versionchanged:: 0.5
In existing documents it only saves changed fields using set / unset
Saves are cascaded and any :class:`~bson.dbref.DBRef` objects
that have changes are saved as well.
In existing documents it only saves changed fields using
set / unset. Saves are cascaded and any
:class:`~bson.dbref.DBRef` objects that have changes are
saved as well.
.. versionchanged:: 0.6
Cascade saves are optional = defaults to True, if you want fine grain
control then you can turn off using document meta['cascade'] = False
Also you can pass different kwargs to the cascade save using cascade_kwargs
which overwrites the existing kwargs with custom values
Cascade saves are optional = defaults to True, if you want
fine grain control then you can turn off using document
meta['cascade'] = False Also you can pass different kwargs to
the cascade save using cascade_kwargs which overwrites the
existing kwargs with custom values
"""
signals.pre_save.send(self.__class__, document=self)
@@ -181,13 +218,14 @@ class Document(BaseDocument):
collection = self.__class__.objects._collection
if created:
if force_insert:
object_id = collection.insert(doc, safe=safe, **write_options)
object_id = collection.insert(doc, safe=safe,
**write_options)
else:
object_id = collection.save(doc, safe=safe, **write_options)
object_id = collection.save(doc, safe=safe,
**write_options)
else:
object_id = doc['_id']
updates, removals = self._delta()
# Need to add shard key to query, or you get an error
select_dict = {'_id': object_id}
shard_key = self.__class__._meta.get('shard_key', tuple())
@@ -197,11 +235,15 @@ class Document(BaseDocument):
upsert = self._created
if updates:
collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options)
collection.update(select_dict, {"$set": updates},
upsert=upsert, safe=safe, **write_options)
if removals:
collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options)
collection.update(select_dict, {"$unset": removals},
upsert=upsert, safe=safe, **write_options)
cascade = self._meta.get('cascade', True) if cascade is None else cascade
warn_cascade = not cascade and 'cascade' not in self._meta
cascade = (self._meta.get('cascade', True)
if cascade is None else cascade)
if cascade:
kwargs = {
"safe": safe,
@@ -213,37 +255,64 @@ class Document(BaseDocument):
if cascade_kwargs: # Allow granular control over cascades
kwargs.update(cascade_kwargs)
kwargs['_refs'] = _refs
self.cascade_save(**kwargs)
self.cascade_save(warn_cascade=warn_cascade, **kwargs)
except pymongo.errors.OperationFailure, err:
message = 'Could not save document (%s)'
if u'duplicate key' in unicode(err):
if re.match('^E1100[01] duplicate key', unicode(err)):
# E11000 - duplicate key error index
# E11001 - duplicate key on update
message = u'Tried to save duplicate unique keys (%s)'
raise NotUniqueError(message % unicode(err))
raise OperationError(message % unicode(err))
id_field = self._meta['id_field']
self[id_field] = self._fields[id_field].to_python(object_id)
if id_field not in self._meta.get('shard_key', []):
self[id_field] = self._fields[id_field].to_python(object_id)
self._changed_fields = []
self._created = False
signals.post_save.send(self.__class__, document=self, created=created)
return self
def cascade_save(self, *args, **kwargs):
"""Recursively saves any references / generic references on an object"""
from fields import ReferenceField, GenericReferenceField
def cascade_save(self, warn_cascade=None, *args, **kwargs):
"""Recursively saves any references /
generic references on an objects"""
import fields
_refs = kwargs.get('_refs', []) or []
for name, cls in self._fields.items():
if not isinstance(cls, (ReferenceField, GenericReferenceField)):
if not isinstance(cls, (fields.ReferenceField,
fields.GenericReferenceField)):
continue
ref = getattr(self, name)
if not ref:
if not ref or isinstance(ref, DBRef):
continue
if not getattr(ref, '_changed_fields', True):
continue
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
if ref and ref_id not in _refs:
if warn_cascade:
msg = ("Cascading saves will default to off in 0.8, "
"please explicitly set `.save(cascade=True)`")
warnings.warn(msg, FutureWarning)
_refs.append(ref_id)
kwargs["_refs"] = _refs
ref.save(**kwargs)
ref._changed_fields = []
@property
def _object_key(self):
"""Dict to identify object in collection
"""
select_dict = {'pk': self.pk}
shard_key = self.__class__._meta.get('shard_key', tuple())
for k in shard_key:
select_dict[k] = getattr(self, k)
return select_dict
def update(self, **kwargs):
"""Performs an update on the :class:`~mongoengine.Document`
A convenience wrapper to :meth:`~mongoengine.QuerySet.update`.
@@ -255,11 +324,7 @@ class Document(BaseDocument):
raise OperationError('attempt to update a document not yet saved')
# Need to add shard key to query, or you get an error
select_dict = {'pk': self.pk}
shard_key = self.__class__._meta.get('shard_key', tuple())
for k in shard_key:
select_dict[k] = getattr(self, k)
return self.__class__.objects(**select_dict).update_one(**kwargs)
return self.__class__.objects(**self._object_key).update_one(**kwargs)
def delete(self, safe=False):
"""Delete the :class:`~mongoengine.Document` from the database. This
@@ -270,7 +335,7 @@ class Document(BaseDocument):
signals.pre_delete.send(self.__class__, document=self)
try:
self.__class__.objects(pk=self.pk).delete(safe=safe)
self.__class__.objects(**self._object_key).delete(safe=safe)
except pymongo.errors.OperationFailure, err:
message = u'Could not delete document (%s)' % err.message
raise OperationError(message)
@@ -283,8 +348,8 @@ class Document(BaseDocument):
.. versionadded:: 0.5
"""
from dereference import DeReference
self._data = DeReference()(self._data, max_depth)
import dereference
self._data = dereference.DeReference()(self._data, max_depth)
return self
def reload(self, max_depth=1):
@@ -296,7 +361,12 @@ class Document(BaseDocument):
id_field = self._meta['id_field']
obj = self.__class__.objects(
**{id_field: self[id_field]}
).first().select_related(max_depth=max_depth)
).limit(1).select_related(max_depth=max_depth)
if obj:
obj = obj[0]
else:
msg = "Reloaded document has been deleted"
raise OperationError(msg)
for field in self._fields:
setattr(self, field, self._reload(field, obj[field]))
if self._dynamic:
@@ -332,17 +402,18 @@ class Document(BaseDocument):
"""This method registers the delete rules to apply when removing this
object.
"""
cls._meta['delete_rules'][(document_cls, field_name)] = rule
delete_rules = cls._meta.get('delete_rules') or {}
delete_rules[(document_cls, field_name)] = rule
cls._meta['delete_rules'] = delete_rules
@classmethod
def drop_collection(cls):
"""Drops the entire collection associated with this
:class:`~mongoengine.Document` type from the database.
"""
from mongoengine.queryset import QuerySet
db = cls._get_db()
db.drop_collection(cls._get_collection_name())
QuerySet._reset_already_indexed(cls)
queryset.QuerySet._reset_already_indexed(cls)
class DynamicDocument(Document):
@@ -351,14 +422,19 @@ class DynamicDocument(Document):
way as an ordinary document but has expando style properties. Any data
passed or set against the :class:`~mongoengine.DynamicDocument` that is
not a field is automatically converted into a
:class:`~mongoengine.BaseDynamicField` and data can be attributed to that
:class:`~mongoengine.DynamicField` and data can be attributed to that
field.
..note::
.. note::
There is one caveat on Dynamic Documents: fields cannot start with `_`
"""
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
my_metaclass = TopLevelDocumentMetaclass
__metaclass__ = TopLevelDocumentMetaclass
_dynamic = True
def __delattr__(self, *args, **kwargs):
@@ -377,7 +453,11 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
information about dynamic documents.
"""
# The __metaclass__ attribute is removed by 2to3 when running with Python3
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
my_metaclass = DocumentMetaclass
__metaclass__ = DocumentMetaclass
_dynamic = True
def __delattr__(self, *args, **kwargs):

View File

@@ -1,18 +1,24 @@
import datetime
import time
import decimal
import gridfs
import itertools
import re
import time
import urllib2
import urlparse
import uuid
import warnings
from operator import itemgetter
import gridfs
from bson import Binary, DBRef, SON, ObjectId
from mongoengine.python_support import (PY3, bin_type, txt_type,
str_types, StringIO)
from base import (BaseField, ComplexBaseField, ObjectIdField,
ValidationError, get_document, BaseDocument)
from queryset import DO_NOTHING, QuerySet
from document import Document, EmbeddedDocument
from connection import get_db, DEFAULT_CONNECTION_NAME
from operator import itemgetter
try:
@@ -21,16 +27,10 @@ except ImportError:
Image = None
ImageOps = None
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
'DecimalField', 'ComplexDateTimeField', 'URLField',
'DecimalField', 'ComplexDateTimeField', 'URLField', 'DynamicField',
'GenericReferenceField', 'FileField', 'BinaryField',
'SortedListField', 'EmailField', 'GeoPointField', 'ImageField',
'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField']
@@ -49,10 +49,16 @@ class StringField(BaseField):
super(StringField, self).__init__(**kwargs)
def to_python(self, value):
return unicode(value)
if isinstance(value, unicode):
return value
try:
value = value.decode('utf-8')
except:
pass
return value
def validate(self, value):
if not isinstance(value, (str, unicode)):
if not isinstance(value, basestring):
self.error('StringField only accepts string values')
if self.max_length is not None and len(value) > self.max_length:
@@ -97,25 +103,30 @@ class URLField(StringField):
.. versionadded:: 0.3
"""
URL_REGEX = re.compile(
r'^https?://'
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|'
r'localhost|'
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})'
r'(?::\d+)?'
r'(?:/?|[/?]\S+)$', re.IGNORECASE
)
_URL_REGEX = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def __init__(self, verify_exists=False, **kwargs):
def __init__(self, verify_exists=False, url_regex=None, **kwargs):
self.verify_exists = verify_exists
self.url_regex = url_regex or self._URL_REGEX
super(URLField, self).__init__(**kwargs)
def validate(self, value):
if not URLField.URL_REGEX.match(value):
if not self.url_regex.match(value):
self.error('Invalid URL: %s' % value)
return
if self.verify_exists:
import urllib2
warnings.warn(
"The URLField verify_exists argument has intractable security "
"and performance issues. Accordingly, it has been deprecated.",
DeprecationWarning
)
try:
request = urllib2.Request(value)
urllib2.urlopen(request)
@@ -149,7 +160,11 @@ class IntField(BaseField):
super(IntField, self).__init__(**kwargs)
def to_python(self, value):
return int(value)
try:
value = int(value)
except ValueError:
pass
return value
def validate(self, value):
try:
@@ -164,6 +179,9 @@ class IntField(BaseField):
self.error('Integer value is too large')
def prepare_query_value(self, op, value):
if value is None:
return value
return int(value)
@@ -176,13 +194,17 @@ class FloatField(BaseField):
super(FloatField, self).__init__(**kwargs)
def to_python(self, value):
return float(value)
try:
value = float(value)
except ValueError:
pass
return value
def validate(self, value):
if isinstance(value, int):
value = float(value)
if not isinstance(value, float):
self.error('FoatField only accepts float values')
self.error('FloatField only accepts float values')
if self.min_value is not None and value < self.min_value:
self.error('Float value is too small')
@@ -191,6 +213,9 @@ class FloatField(BaseField):
self.error('Float value is too large')
def prepare_query_value(self, op, value):
if value is None:
return value
return float(value)
@@ -205,9 +230,14 @@ class DecimalField(BaseField):
super(DecimalField, self).__init__(**kwargs)
def to_python(self, value):
original_value = value
if not isinstance(value, basestring):
value = unicode(value)
return decimal.Decimal(value)
try:
value = decimal.Decimal(value)
except ValueError:
return original_value
return value
def to_mongo(self, value):
return unicode(value)
@@ -235,7 +265,11 @@ class BooleanField(BaseField):
"""
def to_python(self, value):
return bool(value)
try:
value = bool(value)
except ValueError:
pass
return value
def validate(self, value):
if not isinstance(value, bool):
@@ -366,10 +400,12 @@ class ComplexDateTimeField(StringField):
data = super(ComplexDateTimeField, self).__get__(instance, owner)
if data == None:
return datetime.datetime.now()
if isinstance(data, datetime.datetime):
return data
return self._convert_from_string(data)
def __set__(self, instance, value):
value = self._convert_from_datetime(value)
value = self._convert_from_datetime(value) if value else value
return super(ComplexDateTimeField, self).__set__(instance, value)
def validate(self, value):
@@ -378,7 +414,11 @@ class ComplexDateTimeField(StringField):
'ComplexDateTimeField')
def to_python(self, value):
return self._convert_from_string(value)
original_value = value
try:
return self._convert_from_string(value)
except:
return original_value
def to_mongo(self, value):
return self._convert_from_datetime(value)
@@ -441,6 +481,10 @@ class GenericEmbeddedDocumentField(BaseField):
:class:`~mongoengine.EmbeddedDocument` to be stored.
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
.. note ::
You can use the choices param to limit the acceptable
EmbeddedDocument types
"""
def prepare_query_value(self, op, value):
@@ -470,10 +514,56 @@ class GenericEmbeddedDocumentField(BaseField):
return data
class DynamicField(BaseField):
"""A truly dynamic field type capable of handling different and varying
types of data.
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
def to_mongo(self, value):
"""Convert a Python type to a MongoDBcompatible type.
"""
if isinstance(value, basestring):
return value
if hasattr(value, 'to_mongo'):
return value.to_mongo()
if not isinstance(value, (dict, list, tuple)):
return value
is_list = False
if not hasattr(value, 'items'):
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
data = {}
for k, v in value.items():
data[k] = self.to_mongo(v)
if is_list: # Convert back to a list
value = [v for k, v in sorted(data.items(), key=itemgetter(0))]
else:
value = data
return value
def lookup_member(self, member_name):
return member_name
def prepare_query_value(self, op, value):
if isinstance(value, basestring):
from mongoengine.fields import StringField
return StringField().prepare_query_value(op, value)
return self.to_mongo(value)
class ListField(ComplexBaseField):
"""A list field that wraps a standard field, allowing multiple instances
of the field to be used as a list in the database.
If using with ReferenceFields see: :ref:`one-to-many-with-listfields`
.. note::
Required means it cannot be empty - as the default for ListFields is []
"""
@@ -612,13 +702,33 @@ class ReferenceField(BaseField):
* NULLIFY - Updates the reference to null.
* CASCADE - Deletes the documents associated with the reference.
* DENY - Prevent the deletion of the reference object.
* PULL - Pull the reference from a :class:`~mongoengine.ListField`
of references
Alternative syntax for registering delete rules (useful when implementing
bi-directional delete rules)
.. code-block:: python
class Bar(Document):
content = StringField()
foo = ReferenceField('Foo')
Bar.register_delete_rule(Foo, 'bar', NULLIFY)
.. note ::
`reverse_delete_rules` do not trigger pre / post delete signals to be
triggered.
.. versionchanged:: 0.5 added `reverse_delete_rule`
"""
def __init__(self, document_type, reverse_delete_rule=DO_NOTHING, **kwargs):
def __init__(self, document_type, dbref=None,
reverse_delete_rule=DO_NOTHING, **kwargs):
"""Initialises the Reference Field.
:param dbref: Store the reference as :class:`~pymongo.dbref.DBRef`
or as the :class:`~pymongo.objectid.ObjectId`.id .
:param reverse_delete_rule: Determines what to do when the referring
object is deleted
"""
@@ -626,6 +736,13 @@ class ReferenceField(BaseField):
if not issubclass(document_type, (Document, basestring)):
self.error('Argument to ReferenceField constructor must be a '
'document class or a string')
if dbref is None:
msg = ("ReferenceFields will default to using ObjectId "
" strings in 0.8, set DBRef=True if this isn't desired")
warnings.warn(msg, FutureWarning)
self.dbref = dbref if dbref is not None else True # To change in 0.8
self.document_type_obj = document_type
self.reverse_delete_rule = reverse_delete_rule
super(ReferenceField, self).__init__(**kwargs)
@@ -648,8 +765,9 @@ class ReferenceField(BaseField):
# Get value from document instance if available
value = instance._data.get(self.name)
# Dereference DBRefs
if isinstance(value, (DBRef)):
if isinstance(value, DBRef):
value = self.document_type._get_db().dereference(value)
if value is not None:
instance._data[self.name] = self.document_type._from_son(value)
@@ -658,14 +776,18 @@ class ReferenceField(BaseField):
def to_mongo(self, document):
if isinstance(document, DBRef):
if not self.dbref:
return DBRef.id
return document
elif not self.dbref and isinstance(document, basestring):
return document
id_field_name = self.document_type._meta['id_field']
id_field = self.document_type._fields[id_field_name]
if isinstance(document, Document):
# We need the id from the saved object to create the DBRef
id_ = document.id
id_ = document.pk
if id_ is None:
self.error('You can only reference documents once they have'
' been saved to the database')
@@ -673,18 +795,30 @@ class ReferenceField(BaseField):
id_ = document
id_ = id_field.to_mongo(id_)
collection = self.document_type._get_collection_name()
return DBRef(collection, id_)
if self.dbref:
collection = self.document_type._get_collection_name()
return DBRef(collection, id_)
return id_
def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type.
"""
if (not self.dbref and
not isinstance(value, (DBRef, Document, EmbeddedDocument))):
collection = self.document_type._get_collection_name()
value = DBRef(collection, self.document_type.id.to_python(value))
return value
def prepare_query_value(self, op, value):
if value is None:
return None
return self.to_mongo(value)
def validate(self, value):
if not isinstance(value, (self.document_type, DBRef)):
self.error('A ReferenceField only accepts DBRef')
self.error("A ReferenceField only accepts DBRef or documents")
if isinstance(value, Document) and value.id is None:
self.error('You can only reference documents once they have been '
@@ -698,8 +832,12 @@ class GenericReferenceField(BaseField):
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
that will be automatically dereferenced on access (lazily).
..note :: Any documents used as a generic reference must be registered in the
document registry. Importing the model will automatically register it.
.. note ::
* Any documents used as a generic reference must be registered in the
document registry. Importing the model will automatically register
it.
* You can use the choices param to limit the acceptable Document types
.. versionadded:: 0.3
"""
@@ -735,6 +873,9 @@ class GenericReferenceField(BaseField):
if document is None:
return None
if isinstance(document, (dict, SON)):
return document
id_field_name = document.__class__._meta['id_field']
id_field = document.__class__._fields[id_field_name]
@@ -767,16 +908,20 @@ class BinaryField(BaseField):
self.max_bytes = max_bytes
super(BinaryField, self).__init__(**kwargs)
def __set__(self, instance, value):
"""Handle bytearrays in python 3.1"""
if PY3 and isinstance(value, bytearray):
value = bin_type(value)
return super(BinaryField, self).__set__(instance, value)
def to_mongo(self, value):
return Binary(value)
def to_python(self, value):
# Returns str not unicode as this is binary data
return str(value)
def validate(self, value):
if not isinstance(value, str):
self.error('BinaryField only accepts string values')
if not isinstance(value, (bin_type, txt_type, Binary)):
self.error("BinaryField only accepts instances of "
"(%s, %s, Binary)" % (
bin_type.__name__, txt_type.__name__))
if self.max_bytes is not None and len(value) > self.max_bytes:
self.error('Binary value is too long')
@@ -829,6 +974,17 @@ class GridFSProxy(object):
self_dict['_fs'] = None
return self_dict
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
def __eq__(self, other):
if isinstance(other, GridFSProxy):
return ((self.grid_id == other.grid_id) and
(self.collection_name == other.collection_name) and
(self.db_alias == other.db_alias))
else:
return False
@property
def fs(self):
if not self._fs:
@@ -875,10 +1031,14 @@ class GridFSProxy(object):
self.newfile.writelines(lines)
def read(self, size=-1):
try:
return self.get().read(size)
except:
gridout = self.get()
if gridout is None:
return None
else:
try:
return gridout.read(size)
except:
return ""
def delete(self):
# Delete file from GridFS, FileField still remains
@@ -923,19 +1083,21 @@ class FileField(BaseField):
# Check if a file already exists for this model
grid_file = instance._data.get(self.name)
self.grid_file = grid_file
if isinstance(self.grid_file, self.proxy_class):
if not self.grid_file.key:
self.grid_file.key = self.name
self.grid_file.instance = instance
return self.grid_file
return self.proxy_class(key=self.name, instance=instance,
db_alias=self.db_alias,
collection_name=self.collection_name)
if not isinstance(grid_file, self.proxy_class):
grid_file = self.proxy_class(key=self.name, instance=instance,
db_alias=self.db_alias,
collection_name=self.collection_name)
instance._data[self.name] = grid_file
if not grid_file.key:
grid_file.key = self.name
grid_file.instance = instance
return grid_file
def __set__(self, instance, value):
key = self.name
if isinstance(value, file) or isinstance(value, str):
if ((hasattr(value, 'read') and not
isinstance(value, GridFSProxy)) or isinstance(value, str_types)):
# using "FileField() = file/string" notation
grid_file = instance._data.get(self.name)
# If a file already exists, delete it
@@ -991,6 +1153,7 @@ class ImageGridFsProxy(GridFSProxy):
try:
img = Image.open(file_obj)
img_format = img.format
except:
raise ValidationError('Invalid image')
@@ -1025,20 +1188,20 @@ class ImageGridFsProxy(GridFSProxy):
if thumbnail:
thumb_id = self._put_thumbnail(thumbnail,
img.format)
img_format)
else:
thumb_id = None
w, h = img.size
io = StringIO()
img.save(io, img.format)
img.save(io, img_format)
io.seek(0)
return super(ImageGridFsProxy, self).put(io,
width=w,
height=h,
format=img.format,
format=img_format,
thumbnail_id=thumb_id,
**kwargs)
@@ -1124,11 +1287,15 @@ class ImageField(FileField):
params_size = ('width', 'height', 'force')
extra_args = dict(size=size, thumbnail_size=thumbnail_size)
for att_name, att in extra_args.items():
if att and (isinstance(att, tuple) or isinstance(att, list)):
setattr(self, att_name, dict(
map(None, params_size, att)))
else:
setattr(self, att_name, None)
value = None
if isinstance(att, (tuple, list)):
if PY3:
value = dict(itertools.zip_longest(params_size, att,
fillvalue=None))
else:
value = dict(map(None, params_size, att))
setattr(self, att_name, value)
super(ImageField, self).__init__(
collection_name=collection_name,
@@ -1170,18 +1337,19 @@ class SequenceField(IntField):
.. versionadded:: 0.5
"""
def __init__(self, collection_name=None, db_alias = None, *args, **kwargs):
def __init__(self, collection_name=None, db_alias = None, sequence_name = None, *args, **kwargs):
self.collection_name = collection_name or 'mongoengine.counters'
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
self.sequence_name = sequence_name
return super(SequenceField, self).__init__(*args, **kwargs)
def generate_new_value(self):
"""
Generate and Increment the counter
"""
sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(),
self.name)
collection = get_db(alias = self.db_alias )[self.collection_name]
sequence_name = self.sequence_name or self.owner_document._get_collection_name()
sequence_id = "%s.%s" % (sequence_name, self.name)
collection = get_db(alias=self.db_alias)[self.collection_name]
counter = collection.find_and_modify(query={"_id": sequence_id},
update={"$inc": {"next": 1}},
new=True,
@@ -1203,7 +1371,7 @@ class SequenceField(IntField):
instance._data[self.name] = value
instance._mark_as_changed(self.name)
return value
return int(value) if value else None
def __set__(self, instance, value):
@@ -1223,17 +1391,44 @@ class UUIDField(BaseField):
.. versionadded:: 0.6
"""
_binary = None
def __init__(self, **kwargs):
def __init__(self, binary=None, **kwargs):
"""
Store UUID data in the database
:param binary: (optional) boolean store as binary.
.. versionchanged:: 0.6.19
"""
if binary is None:
binary = False
msg = ("UUIDFields will soon default to store as binary, please "
"configure binary=False if you wish to store as a string")
warnings.warn(msg, FutureWarning)
self._binary = binary
super(UUIDField, self).__init__(**kwargs)
def to_python(self, value):
if not isinstance(value, basestring):
value = unicode(value)
return uuid.UUID(value)
if not self._binary:
original_value = value
try:
if not isinstance(value, basestring):
value = unicode(value)
return uuid.UUID(value)
except:
return original_value
return value
def to_mongo(self, value):
return unicode(value)
if not self._binary:
return unicode(value)
return value
def prepare_query_value(self, op, value):
if value is None:
return None
return self.to_mongo(value)
def validate(self, value):
if not isinstance(value, uuid.UUID):

View File

@@ -0,0 +1,61 @@
"""Helper functions and types to aid with Python 2.5 - 3 support."""
import sys
PY3 = sys.version_info[0] == 3
PY25 = sys.version_info[:2] == (2, 5)
UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264
if PY3:
import codecs
from io import BytesIO as StringIO
# return s converted to binary. b('test') should be equivalent to b'test'
def b(s):
return codecs.latin_1_encode(s)[0]
bin_type = bytes
txt_type = str
else:
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
# Conversion to binary only necessary in Python 3
def b(s):
return s
bin_type = str
txt_type = unicode
str_types = (bin_type, txt_type)
if PY25:
def product(*args, **kwds):
pools = map(tuple, args) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x + [y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
reduce = reduce
else:
from itertools import product
from functools import reduce
# For use with Python 2.5
# converts all keys from unicode to str for d and all nested dictionaries
def to_str_keys_recursive(d):
if isinstance(d, list):
for val in d:
if isinstance(val, (dict, list)):
to_str_keys_recursive(val)
elif isinstance(d, dict):
for key, val in d.items():
if isinstance(val, (dict, list)):
to_str_keys_recursive(val)
if isinstance(key, unicode):
d[str(key)] = d.pop(key)
else:
raise ValueError("non list/dict parameter not allowed")

View File

@@ -4,13 +4,18 @@ import copy
import itertools
import operator
from collections import defaultdict
from functools import partial
from mongoengine.python_support import product, reduce
import pymongo
from bson.code import Code
from mongoengine import signals
__all__ = ['queryset_manager', 'Q', 'InvalidQueryError',
'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY']
'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL']
# The maximum number of items to display in a QuerySet.__repr__
@@ -21,6 +26,7 @@ DO_NOTHING = 0
NULLIFY = 1
CASCADE = 2
DENY = 3
PULL = 4
class DoesNotExist(Exception):
@@ -39,6 +45,10 @@ class OperationError(Exception):
pass
class NotUniqueError(OperationError):
pass
RE_TYPE = type(re.compile(''))
@@ -117,7 +127,7 @@ class QueryTreeTransformerVisitor(QNodeVisitor):
# the necessary parts. Then for each $or part, create a new query
# that ANDs the necessary part with the $or part.
clauses = []
for or_group in itertools.product(*or_groups):
for or_group in product(*or_groups):
q_object = reduce(lambda a, b: a & b, and_parts, Q())
q_object = reduce(lambda a, b: a & b, or_group, q_object)
clauses.append(q_object)
@@ -208,7 +218,7 @@ class QNode(object):
def _combine(self, other, operation):
"""Combine this node with another node into a QCombination object.
"""
if other.empty:
if getattr(other, 'empty', True):
return self
if self.empty:
@@ -326,6 +336,7 @@ class QuerySet(object):
"""
__already_indexed = set()
__dereference = False
def __init__(self, document, collection):
self._document = document
@@ -340,11 +351,12 @@ class QuerySet(object):
self._timeout = True
self._class_check = True
self._slave_okay = False
self._iter = False
self._scalar = []
# If inheritance is allowed, only return instances and instances of
# subclasses of the class being used
if document._meta.get('allow_inheritance'):
if document._meta.get('allow_inheritance') != False:
self._initial_query = {'_types': self._document._class_name}
self._loaded_fields = QueryFieldList(always_include=['_cls'])
self._cursor_obj = None
@@ -386,69 +398,15 @@ class QuerySet(object):
or a **-** to determine the index ordering
"""
index_spec = QuerySet._build_index_spec(self._document, key_or_list)
self._collection.ensure_index(
index_spec['fields'],
drop_dups=drop_dups,
background=background,
sparse=index_spec.get('sparse', False),
unique=index_spec.get('unique', False))
index_spec = index_spec.copy()
fields = index_spec.pop('fields')
index_spec['drop_dups'] = drop_dups
index_spec['background'] = background
index_spec.update(kwargs)
self._collection.ensure_index(fields, **index_spec)
return self
@classmethod
def _build_index_spec(cls, doc_cls, spec):
"""Build a PyMongo index spec from a MongoEngine index spec.
"""
if isinstance(spec, basestring):
spec = {'fields': [spec]}
if isinstance(spec, (list, tuple)):
spec = {'fields': spec}
index_list = []
use_types = doc_cls._meta.get('allow_inheritance', True)
for key in spec['fields']:
# Get ASCENDING direction from +, DESCENDING from -, and GEO2D from *
direction = pymongo.ASCENDING
if key.startswith("-"):
direction = pymongo.DESCENDING
elif key.startswith("*"):
direction = pymongo.GEO2D
if key.startswith(("+", "-", "*")):
key = key[1:]
# Use real field name, do it manually because we need field
# objects for the next part (list field checking)
parts = key.split('.')
fields = QuerySet._lookup_field(doc_cls, parts)
parts = [field.db_field for field in fields]
key = '.'.join(parts)
index_list.append((key, direction))
# Check if a list field is being used, don't use _types if it is
if use_types and not all(f._index_with_types for f in fields):
use_types = False
# If _types is being used, prepend it to every specified index
index_types = doc_cls._meta.get('index_types', True)
allow_inheritance = doc_cls._meta.get('allow_inheritance')
if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D:
index_list.insert(0, ('_types', 1))
spec['fields'] = index_list
if spec.get('sparse', False) and len(spec['fields']) > 1:
raise ValueError(
'Sparse indexes can only have one field in them. '
'See https://jira.mongodb.org/browse/SERVER-2193')
return spec
@classmethod
def _reset_already_indexed(cls, document=None):
"""Helper to reset already indexed, can be useful for testing purposes"""
if document:
cls.__already_indexed.discard(document)
cls.__already_indexed.clear()
def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query):
"""Filter the selected documents by calling the
:class:`~mongoengine.queryset.QuerySet` with a query.
@@ -481,13 +439,141 @@ class QuerySet(object):
"""Returns all documents."""
return self.__call__()
def _ensure_indexes(self):
"""Checks the document meta data and ensures all the indexes exist.
.. note:: You can disable automatic index creation by setting
`auto_create_index` to False in the documents meta data
"""
background = self._document._meta.get('index_background', False)
drop_dups = self._document._meta.get('index_drop_dups', False)
index_opts = self._document._meta.get('index_opts') or {}
index_types = self._document._meta.get('index_types', True)
# determine if an index which we are creating includes
# _type as its first field; if so, we can avoid creating
# an extra index on _type, as mongodb will use the existing
# index to service queries against _type
types_indexed = False
def includes_types(fields):
first_field = None
if len(fields):
if isinstance(fields[0], basestring):
first_field = fields[0]
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
first_field = fields[0][0]
return first_field == '_types'
# Ensure indexes created by uniqueness constraints
for index in self._document._meta['unique_indexes']:
types_indexed = types_indexed or includes_types(index)
self._collection.ensure_index(index, unique=True,
background=background, drop_dups=drop_dups, **index_opts)
# Ensure document-defined indexes are created
if self._document._meta['index_specs']:
index_spec = self._document._meta['index_specs']
for spec in index_spec:
spec = spec.copy()
fields = spec.pop('fields')
types_indexed = types_indexed or includes_types(fields)
opts = index_opts.copy()
opts.update(spec)
self._collection.ensure_index(fields,
background=background, **opts)
# If _types is being used (for polymorphism), it needs an index,
# only if another index doesn't begin with _types
if index_types and '_types' in self._query and not types_indexed:
self._collection.ensure_index('_types',
background=background, **index_opts)
# Add geo indicies
for field in self._document._geo_indices():
index_spec = [(field.db_field, pymongo.GEO2D)]
self._collection.ensure_index(index_spec,
background=background, **index_opts)
@classmethod
def _build_index_spec(cls, doc_cls, spec):
"""Build a PyMongo index spec from a MongoEngine index spec.
"""
if isinstance(spec, basestring):
spec = {'fields': [spec]}
elif isinstance(spec, (list, tuple)):
spec = {'fields': list(spec)}
elif isinstance(spec, dict):
spec = dict(spec)
index_list = []
direction = None
allow_inheritance = doc_cls._meta.get('allow_inheritance') != False
# If sparse - dont include types
use_types = allow_inheritance and not spec.get('sparse', False)
for key in spec['fields']:
# If inherited spec continue
if isinstance(key, (list, tuple)):
continue
# Get ASCENDING direction from +, DESCENDING from -, and GEO2D from *
direction = pymongo.ASCENDING
if key.startswith("-"):
direction = pymongo.DESCENDING
elif key.startswith("*"):
direction = pymongo.GEO2D
if key.startswith(("+", "-", "*")):
key = key[1:]
# Use real field name, do it manually because we need field
# objects for the next part (list field checking)
parts = key.split('.')
if parts in (['pk'], ['id'], ['_id']):
key = '_id'
fields = []
else:
fields = QuerySet._lookup_field(doc_cls, parts)
parts = [field if field == '_id' else field.db_field
for field in fields]
key = '.'.join(parts)
index_list.append((key, direction))
# Check if a list field is being used, don't use _types if it is
if use_types and not all(f._index_with_types for f in fields):
use_types = False
# If _types is being used, prepend it to every specified index
index_types = doc_cls._meta.get('index_types', True)
if (spec.get('types', index_types) and use_types
and direction is not pymongo.GEO2D):
index_list.insert(0, ('_types', 1))
spec['fields'] = index_list
if spec.get('sparse', False) and len(spec['fields']) > 1:
raise ValueError(
'Sparse indexes can only have one field in them. '
'See https://jira.mongodb.org/browse/SERVER-2193')
return spec
@classmethod
def _reset_already_indexed(cls, document=None):
"""Helper to reset already indexed, can be useful for testing purposes"""
if document:
cls.__already_indexed.discard(document)
cls.__already_indexed.clear()
@property
def _collection(self):
"""Property that returns the collection object. This allows us to
perform operations only if the collection is accessed.
"""
if self._document not in QuerySet.__already_indexed:
# Ensure collection exists
db = self._document._get_db()
if self._collection_obj.name not in db.collection_names():
@@ -496,52 +582,8 @@ class QuerySet(object):
QuerySet.__already_indexed.add(self._document)
background = self._document._meta.get('index_background', False)
drop_dups = self._document._meta.get('index_drop_dups', False)
index_opts = self._document._meta.get('index_options', {})
index_types = self._document._meta.get('index_types', True)
# determine if an index which we are creating includes
# _type as its first field; if so, we can avoid creating
# an extra index on _type, as mongodb will use the existing
# index to service queries against _type
types_indexed = False
def includes_types(fields):
first_field = None
if len(fields):
if isinstance(fields[0], basestring):
first_field = fields[0]
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
first_field = fields[0][0]
return first_field == '_types'
# Ensure indexes created by uniqueness constraints
for index in self._document._meta['unique_indexes']:
types_indexed = types_indexed or includes_types(index)
self._collection.ensure_index(index, unique=True,
background=background, drop_dups=drop_dups, **index_opts)
# Ensure document-defined indexes are created
if self._document._meta['indexes']:
for spec in self._document._meta['indexes']:
types_indexed = types_indexed or includes_types(spec['fields'])
opts = index_opts.copy()
opts['unique'] = spec.get('unique', False)
opts['sparse'] = spec.get('sparse', False)
self._collection.ensure_index(spec['fields'],
background=background, **opts)
# If _types is being used (for polymorphism), it needs an index,
# only if another index doesn't begin with _types
if index_types and '_types' in self._query and not types_indexed:
self._collection.ensure_index('_types',
background=background, **index_opts)
# Add geo indicies
for field in self._document._geo_indices():
index_spec = [(field.db_field, pymongo.GEO2D)]
self._collection.ensure_index(index_spec,
background=background, **index_opts)
if self._document._meta.get('auto_create_index', True):
self._ensure_indexes()
return self._collection_obj
@@ -580,7 +622,6 @@ class QuerySet(object):
if self._hint != -1:
self._cursor_obj.hint(self._hint)
return self._cursor_obj
@classmethod
@@ -603,6 +644,7 @@ class QuerySet(object):
"Can't use index on unsubscriptable field (%s)" % err)
fields.append(field_name)
continue
if field is None:
# Look up first field from the document
if field_name == 'pk':
@@ -611,8 +653,8 @@ class QuerySet(object):
if field_name in document._fields:
field = document._fields[field_name]
elif document._dynamic:
from base import BaseDynamicField
field = BaseDynamicField(db_field=field_name)
from fields import DynamicField
field = DynamicField(db_field=field_name)
else:
raise InvalidQueryError('Cannot resolve field "%s"'
% field_name)
@@ -620,8 +662,11 @@ class QuerySet(object):
from mongoengine.fields import ReferenceField, GenericReferenceField
if isinstance(field, (ReferenceField, GenericReferenceField)):
raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts))
# Look up subfield on the previous field
new_field = field.lookup_member(field_name)
if hasattr(getattr(field, 'field', None), 'lookup_member'):
new_field = field.field.lookup_member(field_name)
else:
# Look up subfield on the previous field
new_field = field.lookup_member(field_name)
from base import ComplexBaseField
if not new_field and isinstance(field, ComplexBaseField):
fields.append(field_name)
@@ -654,6 +699,7 @@ class QuerySet(object):
custom_operators = ['match']
mongo_query = {}
merge_query = defaultdict(list)
for key, value in query.items():
if key == "__raw__":
mongo_query.update(value)
@@ -680,7 +726,7 @@ class QuerySet(object):
cleaned_fields = []
for field in fields:
append_field = True
if isinstance(field, str):
if isinstance(field, basestring):
parts.append(field)
append_field = False
else:
@@ -741,8 +787,23 @@ class QuerySet(object):
key = '.'.join(parts)
if op is None or key not in mongo_query:
mongo_query[key] = value
elif key in mongo_query and isinstance(mongo_query[key], dict):
mongo_query[key].update(value)
elif key in mongo_query:
if key in mongo_query and isinstance(mongo_query[key], dict):
mongo_query[key].update(value)
else:
# Store for manually merging later
merge_query[key].append(value)
# The queryset has been filter in such a way we must manually merge
for k, v in merge_query.items():
merge_query[k].append(mongo_query[k])
del mongo_query[k]
if isinstance(v, list):
value = [{k:val} for val in v]
if '$and' in mongo_query.keys():
mongo_query['$and'].append(value)
else:
mongo_query['$and'] = value
return mongo_query
@@ -781,15 +842,19 @@ class QuerySet(object):
dictionary of default values for the new document may be provided as a
keyword argument called :attr:`defaults`.
.. note:: This requires two separate operations and therefore a
race condition exists. Because there are no transactions in mongoDB
other approaches should be investigated, to ensure you don't
accidently duplicate data when using this method.
:param write_options: optional extra keyword arguments used if we
have to create a new document.
Passes any write_options onto :meth:`~mongoengine.Document.save`
.. versionadded:: 0.3
:param auto_save: if the object is to be saved automatically if not found.
.. versionadded:: 0.6
.. versionchanged:: 0.6 - added `auto_save`
.. versionadded:: 0.3
"""
defaults = query.get('defaults', {})
if 'defaults' in query:
@@ -824,11 +889,21 @@ class QuerySet(object):
result = None
return result
def insert(self, doc_or_docs, load_bulk=True):
def insert(self, doc_or_docs, load_bulk=True, safe=False, write_options=None):
"""bulk insert documents
If ``safe=True`` and the operation is unsuccessful, an
:class:`~mongoengine.OperationError` will be raised.
:param docs_or_doc: a document or list of documents to be inserted
:param load_bulk (optional): If True returns the list of document instances
:param safe: check if the operation succeeded before returning
:param write_options: Extra keyword arguments are passed down to
:meth:`~pymongo.collection.Collection.insert`
which will be used as options for the resultant ``getLastError`` command.
For example, ``insert(..., {w: 2, fsync: True})`` will wait until at least two
servers have recorded the write and will force an fsync on each server being
written to.
By default returns document instances, set ``load_bulk`` to False to
return just ``ObjectIds``
@@ -837,6 +912,10 @@ class QuerySet(object):
"""
from document import Document
if not write_options:
write_options = {}
write_options.update({'safe': safe})
docs = doc_or_docs
return_one = False
if isinstance(docs, Document) or issubclass(docs.__class__, Document):
@@ -854,7 +933,16 @@ class QuerySet(object):
raw.append(doc.to_mongo())
signals.pre_bulk_insert.send(self._document, documents=docs)
ids = self._collection.insert(raw)
try:
ids = self._collection.insert(raw, **write_options)
except pymongo.errors.OperationFailure, err:
message = 'Could not save document (%s)'
if re.match('^E1100[01] duplicate key', unicode(err)):
# E11000 - duplicate key error index
# E11001 - duplicate key on update
message = u'Tried to save duplicate unique keys (%s)'
raise NotUniqueError(message % unicode(err))
raise OperationError(message % unicode(err))
if not load_bulk:
signals.post_bulk_insert.send(
@@ -907,6 +995,7 @@ class QuerySet(object):
def next(self):
"""Wrap the result in a :class:`~mongoengine.Document` object.
"""
self._iter = True
try:
if self._limit == 0:
raise StopIteration
@@ -923,6 +1012,7 @@ class QuerySet(object):
.. versionadded:: 0.3
"""
self._iter = False
self._cursor.rewind()
def count(self):
@@ -951,6 +1041,8 @@ class QuerySet(object):
:class:`~bson.code.Code` or string
:param output: output collection name, if set to 'inline' will try to
use :class:`~pymongo.collection.Collection.inline_map_reduce`
This can also be a dictionary containing output options
see: http://docs.mongodb.org/manual/reference/commands/#mapReduce
:param finalize_f: finalize function, an optional function that
performs any post-reduction processing.
:param scope: values to insert into map/reduce global scope. Optional.
@@ -1102,9 +1194,10 @@ class QuerySet(object):
.. versionadded:: 0.4
.. versionchanged:: 0.5 - Fixed handling references
.. versionchanged:: 0.6 - Improved db_field refrence handling
"""
from dereference import DeReference
return DeReference()(self._cursor.distinct(field), 1)
return self._dereference(self._cursor.distinct(field), 1,
name=field, instance=self._document)
def only(self, *fields):
"""Load only a subset of this document's fields. ::
@@ -1259,9 +1352,16 @@ class QuerySet(object):
"""
doc = self._document
# Handle deletes where skips or limits have been applied
if self._skip or self._limit:
for doc in self:
doc.delete()
return
delete_rules = doc._meta.get('delete_rules') or {}
# Check for DENY rules before actually deleting/nullifying any other
# references
for rule_entry in doc._meta['delete_rules']:
for rule_entry in delete_rules:
document_cls, field_name = rule_entry
rule = doc._meta['delete_rules'][rule_entry]
if rule == DENY and document_cls.objects(**{field_name + '__in': self}).count() > 0:
@@ -1269,15 +1369,23 @@ class QuerySet(object):
(document_cls.__name__, field_name)
raise OperationError(msg)
for rule_entry in doc._meta['delete_rules']:
for rule_entry in delete_rules:
document_cls, field_name = rule_entry
rule = doc._meta['delete_rules'][rule_entry]
if rule == CASCADE:
document_cls.objects(**{field_name + '__in': self}).delete(safe=safe)
ref_q = document_cls.objects(**{field_name + '__in': self})
ref_q_count = ref_q.count()
if (doc != document_cls and ref_q_count > 0
or (doc == document_cls and ref_q_count > 0)):
ref_q.delete(safe=safe)
elif rule == NULLIFY:
document_cls.objects(**{field_name + '__in': self}).update(
safe_update=safe,
**{'unset__%s' % field_name: 1})
elif rule == PULL:
document_cls.objects(**{field_name + '__in': self}).update(
safe_update=safe,
**{'pull_all__%s' % field_name: self})
self._collection.remove(self._query, safe=safe)
@@ -1287,6 +1395,8 @@ class QuerySet(object):
"""
operators = ['set', 'unset', 'inc', 'dec', 'pop', 'push', 'push_all',
'pull', 'pull_all', 'add_to_set']
match_operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
'all', 'size', 'exists', 'not']
mongo_update = {}
for key, value in update.items():
@@ -1310,6 +1420,10 @@ class QuerySet(object):
elif op == 'add_to_set':
op = op.replace('_to_set', 'ToSet')
match = None
if parts[-1] in match_operators:
match = parts.pop()
if _doc_cls:
# Switch field names to proper names [set in Field(name='foo')]
fields = QuerySet._lookup_field(_doc_cls, parts)
@@ -1318,7 +1432,7 @@ class QuerySet(object):
cleaned_fields = []
for field in fields:
append_field = True
if isinstance(field, str):
if isinstance(field, basestring):
# Convert the S operator to $
if field == 'S':
field = '$'
@@ -1332,20 +1446,42 @@ class QuerySet(object):
# Convert value to proper value
field = cleaned_fields[-1]
if op in (None, 'set', 'push', 'pull', 'addToSet'):
if op in (None, 'set', 'push', 'pull'):
if field.required or value is not None:
value = field.prepare_query_value(op, value)
elif op in ('pushAll', 'pullAll'):
value = [field.prepare_query_value(op, v) for v in value]
elif op == 'addToSet':
if isinstance(value, (list, tuple, set)):
value = [field.prepare_query_value(op, v) for v in value]
elif field.required or value is not None:
value = field.prepare_query_value(op, value)
if match:
match = '$' + match
value = {match: value}
key = '.'.join(parts)
if not op:
raise InvalidQueryError("Updates must supply an operation eg: set__FIELD=value")
raise InvalidQueryError("Updates must supply an operation "
"eg: set__FIELD=value")
if op:
if 'pull' in op and '.' in key:
# Dot operators don't work on pull operations
# it uses nested dict syntax
if op == 'pullAll':
raise InvalidQueryError("pullAll operations only support "
"a single field depth")
parts.reverse()
for key in parts:
value = {key: value}
elif op == 'addToSet' and isinstance(value, list):
value = {key: {"$each": value}}
else:
value = {key: value}
key = '$' + op
key = '$' + op
if key not in mongo_update:
mongo_update[key] = value
@@ -1435,8 +1571,6 @@ class QuerySet(object):
def lookup(obj, name):
chunks = name.split('__')
for chunk in chunks:
if hasattr(obj, '_db_field_map'):
chunk = obj._db_field_map.get(chunk, chunk)
obj = getattr(obj, chunk)
return obj
@@ -1648,10 +1782,11 @@ class QuerySet(object):
def _item_frequencies_map_reduce(self, field, normalize=False):
map_func = """
function() {
path = '{{~%(field)s}}'.split('.');
field = this;
var path = '{{~%(field)s}}'.split('.');
var field = this;
for (p in path) {
if (field)
if (typeof field != 'undefined')
field = field[path[p]];
else
break;
@@ -1660,7 +1795,7 @@ class QuerySet(object):
field.forEach(function(item) {
emit(item, 1);
});
} else if (field) {
} else if (typeof field != 'undefined') {
emit(field, 1);
} else {
emit(null, 1);
@@ -1684,12 +1819,12 @@ class QuerySet(object):
if isinstance(key, float):
if int(key) == key:
key = int(key)
key = str(key)
frequencies[key] = f.value
frequencies[key] = int(f.value)
if normalize:
count = sum(frequencies.values())
frequencies = dict([(k, v / count) for k, v in frequencies.items()])
frequencies = dict([(k, float(v) / count)
for k, v in frequencies.items()])
return frequencies
@@ -1697,31 +1832,28 @@ class QuerySet(object):
"""Uses exec_js to execute"""
freq_func = """
function(path) {
path = path.split('.');
var path = path.split('.');
if (options.normalize) {
var total = 0.0;
db[collection].find(query).forEach(function(doc) {
field = doc;
for (p in path) {
if (field)
field = field[path[p]];
else
break;
}
if (field && field.constructor == Array) {
total += field.length;
} else {
total++;
}
});
}
var total = 0.0;
db[collection].find(query).forEach(function(doc) {
var field = doc;
for (p in path) {
if (field)
field = field[path[p]];
else
break;
}
if (field && field.constructor == Array) {
total += field.length;
} else {
total++;
}
});
var frequencies = {};
var types = {};
var inc = 1.0;
if (options.normalize) {
inc /= total;
}
db[collection].find(query).forEach(function(doc) {
field = doc;
for (p in path) {
@@ -1736,34 +1868,48 @@ class QuerySet(object):
});
} else {
var item = field;
types[item] = item;
frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]);
}
});
return frequencies;
return [total, frequencies, types];
}
"""
data = self.exec_js(freq_func, field, normalize=normalize)
if 'undefined' in data:
data[None] = data['undefined']
del(data['undefined'])
return data
total, data, types = self.exec_js(freq_func, field)
values = dict([(types.get(k), int(v)) for k, v in data.iteritems()])
if normalize:
values = dict([(k, float(v) / total) for k, v in values.items()])
frequencies = {}
for k, v in values.iteritems():
if isinstance(k, float):
if int(k) == k:
k = int(k)
frequencies[k] = v
return frequencies
def __repr__(self):
limit = REPR_OUTPUT_SIZE + 1
start = (0 if self._skip is None else self._skip)
if self._limit is None:
stop = start + limit
if self._limit is not None:
if self._limit - start > limit:
stop = start + limit
else:
stop = self._limit
try:
data = list(self[start:stop])
except pymongo.errors.InvalidOperation:
return ".. queryset mid-iteration .."
"""Provides the string representation of the QuerySet
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
"""
if self._iter:
return '.. queryset mid-iteration ..'
data = []
for i in xrange(REPR_OUTPUT_SIZE + 1):
try:
data.append(self.next())
except StopIteration:
break
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
self.rewind()
return repr(data)
def select_related(self, max_depth=1):
@@ -1772,13 +1918,30 @@ class QuerySet(object):
.. versionadded:: 0.5
"""
from dereference import DeReference
# Make select related work the same for querysets
max_depth += 1
return DeReference()(self, max_depth=max_depth)
return self._dereference(self, max_depth=max_depth)
@property
def _dereference(self):
if not self.__dereference:
from dereference import DeReference
self.__dereference = DeReference() # Cached
return self.__dereference
class QuerySetManager(object):
"""
The default QuerySet Manager.
Custom QuerySet Manager functions can extend this class and users can
add extra queryset functionality. Any custom manager methods must accept a
:class:`~mongoengine.Document` class as its first argument, and a
:class:`~mongoengine.queryset.QuerySet` as its second argument.
The method function should return a :class:`~mongoengine.queryset.QuerySet`
, probably the same one that was passed in, but modified in some way.
"""
get_queryset = None
@@ -1796,13 +1959,16 @@ class QuerySetManager(object):
return self
# owner is the document that contains the QuerySetManager
queryset_class = owner._meta['queryset_class'] or QuerySet
queryset_class = owner._meta.get('queryset_class') or QuerySet
queryset = queryset_class(owner, owner._get_collection())
if self.get_queryset:
if self.get_queryset.func_code.co_argcount == 1:
arg_count = self.get_queryset.func_code.co_argcount
if arg_count == 1:
queryset = self.get_queryset(queryset)
else:
elif arg_count == 2:
queryset = self.get_queryset(owner, queryset)
else:
queryset = partial(self.get_queryset, owner, queryset)
return queryset

View File

@@ -5,7 +5,7 @@
%define srcname mongoengine
Name: python-%{srcname}
Version: 0.6.6
Version: 0.7.5
Release: 1%{?dist}
Summary: A Python Document-Object Mapper for working with MongoDB
@@ -51,24 +51,4 @@ rm -rf $RPM_BUILD_ROOT
# %{python_sitearch}/*
%changelog
* Wed Apr 24 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5
- 0.6.6 released
* Wed Apr 18 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5
- 0.6.5 released
* Wed Apr 18 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5
- 0.6.4 released
* Wed Mar 24 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5
- 0.6.3 released
* Wed Mar 22 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5
- 0.6.2 released
* Wed Mar 05 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5
- 0.6.1 released
* Mon Mar 05 2012 Ross Lawley <ross.lawley@gmail.com> 0.6
- 0.6 released
* Thu Oct 27 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.3-1
- Update to latest dev version
- Add PIL dependency for ImageField
* Wed Oct 12 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.2-1
- Update version
* Fri Sep 23 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.0-1
- Initial version
* See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html

11
setup.cfg Normal file
View File

@@ -0,0 +1,11 @@
[nosetests]
verbosity = 3
detailed-errors = 1
#with-coverage = 1
#cover-erase = 1
#cover-html = 1
#cover-html-dir = ../htmlcov
#cover-package = mongoengine
py3where = build
where = tests
#tests = test_bugfix.py

View File

@@ -1,27 +1,35 @@
from setuptools import setup, find_packages
import os
import sys
from setuptools import setup, find_packages
DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB"
# Hack to silence atexit traceback in newer python versions
try:
import multiprocessing
except ImportError:
pass
DESCRIPTION = """MongoEngine is a Python Object-Document
Mapper for working with MongoDB."""
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
def get_version(version_tuple):
version = '%s.%s' % (version_tuple[0], version_tuple[1])
if version_tuple[2]:
version = '%s.%s' % (version, version_tuple[2])
return version
if not isinstance(version_tuple[-1], int):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
# Dirty hack to get version number from monogengine/__init__.py - we can't
# import it as it depends on PyMongo and PyMongo isn't installed until this
# file is read
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
print VERSION
print(VERSION)
CLASSIFIERS = [
'Development Status :: 4 - Beta',
@@ -29,18 +37,38 @@ CLASSIFIERS = [
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: Implementation :: CPython",
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
extra_opts = {}
if sys.version_info[0] == 3:
extra_opts['use_2to3'] = True
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker']
extra_opts['packages'] = find_packages(exclude=('tests',))
if "test" in sys.argv or "nosetests" in sys.argv:
extra_opts['packages'].append("tests")
extra_opts['package_data'] = {"tests": ["mongoengine.png"]}
else:
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL']
extra_opts['packages'] = find_packages(exclude=('tests',))
setup(name='mongoengine',
version=VERSION,
packages=find_packages(),
author='Harry Marr',
author_email='harry.marr@{nospam}gmail.com',
maintainer="Ross Lawley",
maintainer_email="ross.lawley@{nospam}gmail.com",
url='http://mongoengine.org/',
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
license='MIT',
include_package_data=True,
description=DESCRIPTION,
@@ -48,6 +76,6 @@ setup(name='mongoengine',
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=['pymongo'],
test_suite='tests',
tests_require=['blinker', 'django>=1.3', 'PIL']
test_suite='nose.collector',
**extra_opts
)

View File

@@ -0,0 +1,96 @@
import unittest
import warnings
from mongoengine import *
from mongoengine.tests import query_counter
class TestWarnings(unittest.TestCase):
def setUp(self):
conn = connect(db='mongoenginetest')
self.warning_list = []
self.showwarning_default = warnings.showwarning
warnings.showwarning = self.append_to_warning_list
def append_to_warning_list(self, message, category, *args):
self.warning_list.append({"message": message,
"category": category})
def tearDown(self):
# restore default handling of warnings
warnings.showwarning = self.showwarning_default
def test_allow_inheritance_future_warning(self):
"""Add FutureWarning for future allow_inhertiance default change.
"""
class SimpleBase(Document):
a = IntField()
class InheritedClass(SimpleBase):
b = IntField()
InheritedClass()
self.assertEqual(len(self.warning_list), 1)
warning = self.warning_list[0]
self.assertEqual(FutureWarning, warning["category"])
self.assertTrue("InheritedClass" in str(warning["message"]))
def test_dbref_reference_field_future_warning(self):
class Person(Document):
name = StringField()
parent = ReferenceField('self')
Person.drop_collection()
p1 = Person()
p1.parent = None
p1.save()
p2 = Person(name="Wilson Jr")
p2.parent = p1
p2.save(cascade=False)
self.assertEqual(len(self.warning_list), 1)
warning = self.warning_list[0]
self.assertEqual(FutureWarning, warning["category"])
self.assertTrue("ReferenceFields will default to using ObjectId"
in str(warning["message"]))
def test_document_save_cascade_future_warning(self):
class Person(Document):
name = StringField()
parent = ReferenceField('self')
Person.drop_collection()
p1 = Person(name="Wilson Snr")
p1.parent = None
p1.save()
p2 = Person(name="Wilson Jr")
p2.parent = p1
p2.parent.name = "Poppa Wilson"
p2.save()
self.assertEqual(len(self.warning_list), 1)
warning = self.warning_list[0]
self.assertEqual(FutureWarning, warning["category"])
self.assertTrue("Cascading saves will default to off in 0.8"
in str(warning["message"]))
def test_document_collection_syntax_warning(self):
class NonAbstractBase(Document):
pass
class InheritedDocumentFailTest(NonAbstractBase):
meta = {'collection': 'fail'}
warning = self.warning_list[0]
self.assertEqual(SyntaxWarning, warning["category"])
self.assertEqual('non_abstract_base',
InheritedDocumentFailTest._get_collection_name())

View File

@@ -1,8 +1,11 @@
import unittest
import datetime
import pymongo
import unittest
import mongoengine.connection
from bson.tz_util import utc
from mongoengine import *
from mongoengine.connection import get_db, get_connection, ConnectionError
@@ -65,6 +68,31 @@ class ConnectionTest(unittest.TestCase):
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'mongoenginetest2')
def test_connection_kwargs(self):
"""Ensure that connection kwargs get passed to pymongo.
"""
connect('mongoenginetest', alias='t1', tz_aware=True)
conn = get_connection('t1')
self.assertTrue(conn.tz_aware)
connect('mongoenginetest2', alias='t2')
conn = get_connection('t2')
self.assertFalse(conn.tz_aware)
def test_datetime(self):
connect('mongoenginetest', tz_aware=True)
d = datetime.datetime(2010, 5, 5, tzinfo=utc)
class DateDoc(Document):
the_date = DateTimeField(required=True)
DateDoc.drop_collection()
DateDoc(the_date=d).save()
date_doc = DateDoc.objects.first()
self.assertEqual(d, date_doc.the_date)
if __name__ == '__main__':
unittest.main()

View File

@@ -1,5 +1,8 @@
from __future__ import with_statement
import unittest
from bson import DBRef, ObjectId
from mongoengine import *
from mongoengine.connection import get_db
from mongoengine.tests import query_counter
@@ -63,6 +66,131 @@ class FieldTest(unittest.TestCase):
User.drop_collection()
Group.drop_collection()
def test_list_item_dereference_dref_false(self):
"""Ensure that DBRef items in ListFields are dereferenced.
"""
class User(Document):
name = StringField()
class Group(Document):
members = ListField(ReferenceField(User, dbref=False))
User.drop_collection()
Group.drop_collection()
for i in xrange(1, 51):
user = User(name='user %s' % i)
user.save()
group = Group(members=User.objects)
group.save()
group.reload() # Confirm reload works
with query_counter() as q:
self.assertEqual(q, 0)
group_obj = Group.objects.first()
self.assertEqual(q, 1)
[m for m in group_obj.members]
self.assertEqual(q, 2)
# Document select_related
with query_counter() as q:
self.assertEqual(q, 0)
group_obj = Group.objects.first().select_related()
self.assertEqual(q, 2)
[m for m in group_obj.members]
self.assertEqual(q, 2)
# Queryset select_related
with query_counter() as q:
self.assertEqual(q, 0)
group_objs = Group.objects.select_related()
self.assertEqual(q, 2)
for group_obj in group_objs:
[m for m in group_obj.members]
self.assertEqual(q, 2)
User.drop_collection()
Group.drop_collection()
def test_handle_old_style_references(self):
"""Ensure that DBRef items in ListFields are dereferenced.
"""
class User(Document):
name = StringField()
class Group(Document):
members = ListField(ReferenceField(User, dbref=True))
User.drop_collection()
Group.drop_collection()
for i in xrange(1, 26):
user = User(name='user %s' % i)
user.save()
group = Group(members=User.objects)
group.save()
group = Group._get_collection().find_one()
# Update the model to change the reference
class Group(Document):
members = ListField(ReferenceField(User, dbref=False))
group = Group.objects.first()
group.members.append(User(name="String!").save())
group.save()
group = Group.objects.first()
self.assertEqual(group.members[0].name, 'user 1')
self.assertEqual(group.members[-1].name, 'String!')
def test_migrate_references(self):
"""Example of migrating ReferenceField storage
"""
# Create some sample data
class User(Document):
name = StringField()
class Group(Document):
author = ReferenceField(User, dbref=True)
members = ListField(ReferenceField(User, dbref=True))
User.drop_collection()
Group.drop_collection()
user = User(name="Ross").save()
group = Group(author=user, members=[user]).save()
raw_data = Group._get_collection().find_one()
self.assertTrue(isinstance(raw_data['author'], DBRef))
self.assertTrue(isinstance(raw_data['members'][0], DBRef))
# Migrate the model definition
class Group(Document):
author = ReferenceField(User, dbref=False)
members = ListField(ReferenceField(User, dbref=False))
# Migrate the data
for g in Group.objects():
g.author = g.author
g.members = g.members
g.save()
group = Group.objects.first()
self.assertEqual(group.author, user)
self.assertEqual(group.members, [user])
raw_data = Group._get_collection().find_one()
self.assertTrue(isinstance(raw_data['author'], ObjectId))
self.assertTrue(isinstance(raw_data['members'][0], ObjectId))
def test_recursive_reference(self):
"""Ensure that ReferenceFields can reference their own documents.
"""
@@ -109,10 +237,10 @@ class FieldTest(unittest.TestCase):
peter = Employee.objects.with_id(peter.id).select_related()
self.assertEqual(q, 2)
self.assertEquals(peter.boss, bill)
self.assertEqual(peter.boss, bill)
self.assertEqual(q, 2)
self.assertEquals(peter.friends, friends)
self.assertEqual(peter.friends, friends)
self.assertEqual(q, 2)
# Queryset select_related
@@ -123,10 +251,10 @@ class FieldTest(unittest.TestCase):
self.assertEqual(q, 2)
for employee in employees:
self.assertEquals(employee.boss, bill)
self.assertEqual(employee.boss, bill)
self.assertEqual(q, 2)
self.assertEquals(employee.friends, friends)
self.assertEqual(employee.friends, friends)
self.assertEqual(q, 2)
def test_circular_reference(self):
@@ -160,7 +288,7 @@ class FieldTest(unittest.TestCase):
daughter.relations.append(self_rel)
daughter.save()
self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
def test_circular_reference_on_self(self):
"""Ensure you can handle circular references
@@ -186,7 +314,7 @@ class FieldTest(unittest.TestCase):
daughter.relations.append(daughter)
daughter.save()
self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
def test_circular_tree_reference(self):
"""Ensure you can handle circular references with more than one level
@@ -228,7 +356,7 @@ class FieldTest(unittest.TestCase):
anna.other.name = "Anna's friends"
anna.save()
self.assertEquals(
self.assertEqual(
"[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]",
"%s" % Person.objects()
)
@@ -781,8 +909,8 @@ class FieldTest(unittest.TestCase):
root.save()
root = root.reload()
self.assertEquals(root.children, [company])
self.assertEquals(company.parents, [root])
self.assertEqual(root.children, [company])
self.assertEqual(company.parents, [root])
def test_dict_in_dbref_instance(self):
@@ -808,5 +936,58 @@ class FieldTest(unittest.TestCase):
room_101.save()
room = Room.objects.first().select_related()
self.assertEquals(room.staffs_with_position[0]['staff'], sarah)
self.assertEquals(room.staffs_with_position[1]['staff'], bob)
self.assertEqual(room.staffs_with_position[0]['staff'], sarah)
self.assertEqual(room.staffs_with_position[1]['staff'], bob)
def test_document_reload_no_inheritance(self):
class Foo(Document):
meta = {'allow_inheritance': False}
bar = ReferenceField('Bar')
baz = ReferenceField('Baz')
class Bar(Document):
meta = {'allow_inheritance': False}
msg = StringField(required=True, default='Blammo!')
class Baz(Document):
meta = {'allow_inheritance': False}
msg = StringField(required=True, default='Kaboom!')
Foo.drop_collection()
Bar.drop_collection()
Baz.drop_collection()
bar = Bar()
bar.save()
baz = Baz()
baz.save()
foo = Foo()
foo.bar = bar
foo.baz = baz
foo.save()
foo.reload()
self.assertEqual(type(foo.bar), Bar)
self.assertEqual(type(foo.baz), Baz)
def test_list_lookup_not_checked_in_map(self):
"""Ensure we dereference list data correctly
"""
class Comment(Document):
id = IntField(primary_key=True)
text = StringField()
class Message(Document):
id = IntField(primary_key=True)
comments = ListField(ReferenceField(Comment))
Comment.drop_collection()
Message.drop_collection()
c1 = Comment(id=0, text='zero').save()
c2 = Comment(id=1, text='one').save()
Message(id=1, comments=[c1, c2]).save()
msg = Message.objects.get(id=1)
self.assertEqual(0, msg.comments[0].id)
self.assertEqual(1, msg.comments[1].id)

View File

@@ -1,24 +1,34 @@
# -*- coding: utf-8 -*-
from __future__ import with_statement
import unittest
from nose.plugins.skip import SkipTest
from mongoengine.python_support import PY3
from mongoengine import *
from mongoengine.django.shortcuts import get_document_or_404
from django.http import Http404
from django.template import Context, Template
from django.conf import settings
from django.core.paginator import Paginator
try:
from mongoengine.django.shortcuts import get_document_or_404
settings.configure()
from django.http import Http404
from django.template import Context, Template
from django.conf import settings
from django.core.paginator import Paginator
from django.contrib.sessions.tests import SessionTestsMixin
from mongoengine.django.sessions import SessionStore, MongoSession
settings.configure()
from django.contrib.sessions.tests import SessionTestsMixin
from mongoengine.django.sessions import SessionStore, MongoSession
except Exception, err:
if PY3:
SessionTestsMixin = type # dummy value so no error
SessionStore = None # dummy value so no error
else:
raise err
class QuerySetTest(unittest.TestCase):
def setUp(self):
if PY3:
raise SkipTest('django does not have Python 3 support')
connect(db='mongoenginetest')
class Person(Document):
@@ -99,7 +109,14 @@ class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
backend = SessionStore
def setUp(self):
if PY3:
raise SkipTest('django does not have Python 3 support')
connect(db='mongoenginetest')
MongoSession.drop_collection()
super(MongoDBSessionTest, self).setUp()
def test_first_save(self):
session = SessionStore()
session['test'] = True
session.save()
self.assertTrue('test' in session)

File diff suppressed because it is too large Load Diff

View File

@@ -25,14 +25,14 @@ class DynamicDocTest(unittest.TestCase):
p.name = "James"
p.age = 34
self.assertEquals(p.to_mongo(),
self.assertEqual(p.to_mongo(),
{"_types": ["Person"], "_cls": "Person",
"name": "James", "age": 34}
)
p.save()
self.assertEquals(self.Person.objects.first().age, 34)
self.assertEqual(self.Person.objects.first().age, 34)
# Confirm no changes to self.Person
self.assertFalse(hasattr(self.Person, 'age'))
@@ -40,11 +40,11 @@ class DynamicDocTest(unittest.TestCase):
def test_dynamic_document_delta(self):
"""Ensures simple dynamic documents can delta correctly"""
p = self.Person(name="James", age=34)
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {}))
self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {}))
p.doc = 123
del(p.doc)
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1}))
self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1}))
def test_change_scope_of_variable(self):
"""Test changing the scope of a dynamic field has no adverse effects"""
@@ -58,7 +58,7 @@ class DynamicDocTest(unittest.TestCase):
p.save()
p = self.Person.objects.get()
self.assertEquals(p.misc, {'hello': 'world'})
self.assertEqual(p.misc, {'hello': 'world'})
def test_delete_dynamic_field(self):
"""Test deleting a dynamic field works"""
@@ -73,10 +73,10 @@ class DynamicDocTest(unittest.TestCase):
p.save()
p = self.Person.objects.get()
self.assertEquals(p.misc, {'hello': 'world'})
self.assertEqual(p.misc, {'hello': 'world'})
collection = self.db[self.Person._get_collection_name()]
obj = collection.find_one()
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name'])
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name'])
del(p.misc)
p.save()
@@ -85,7 +85,7 @@ class DynamicDocTest(unittest.TestCase):
self.assertFalse(hasattr(p, 'misc'))
obj = collection.find_one()
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name'])
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'name'])
def test_dynamic_document_queries(self):
"""Ensure we can query dynamic fields"""
@@ -94,10 +94,10 @@ class DynamicDocTest(unittest.TestCase):
p.age = 22
p.save()
self.assertEquals(1, self.Person.objects(age=22).count())
self.assertEqual(1, self.Person.objects(age=22).count())
p = self.Person.objects(age=22)
p = p.get()
self.assertEquals(22, p.age)
self.assertEqual(22, p.age)
def test_complex_dynamic_document_queries(self):
class Person(DynamicDocument):
@@ -117,8 +117,8 @@ class DynamicDocTest(unittest.TestCase):
p2.age = 10
p2.save()
self.assertEquals(Person.objects(age__icontains='ten').count(), 2)
self.assertEquals(Person.objects(age__gte=10).count(), 1)
self.assertEqual(Person.objects(age__icontains='ten').count(), 2)
self.assertEqual(Person.objects(age__gte=10).count(), 1)
def test_complex_data_lookups(self):
"""Ensure you can query dynamic document dynamic fields"""
@@ -126,7 +126,7 @@ class DynamicDocTest(unittest.TestCase):
p.misc = {'hello': 'world'}
p.save()
self.assertEquals(1, self.Person.objects(misc__hello='world').count())
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
def test_inheritance(self):
"""Ensure that dynamic document plays nice with inheritance"""
@@ -146,8 +146,8 @@ class DynamicDocTest(unittest.TestCase):
joe_bloggs.age = 20
joe_bloggs.save()
self.assertEquals(1, self.Person.objects(age=20).count())
self.assertEquals(1, Employee.objects(age=20).count())
self.assertEqual(1, self.Person.objects(age=20).count())
self.assertEqual(1, Employee.objects(age=20).count())
joe_bloggs = self.Person.objects.first()
self.assertTrue(isinstance(joe_bloggs, Employee))
@@ -170,7 +170,7 @@ class DynamicDocTest(unittest.TestCase):
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field = embedded_1
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
"embedded_field": {
"_types": ['Embedded'], "_cls": "Embedded",
"string_field": "hello",
@@ -182,11 +182,11 @@ class DynamicDocTest(unittest.TestCase):
doc.save()
doc = Doc.objects.first()
self.assertEquals(doc.embedded_field.__class__, Embedded)
self.assertEquals(doc.embedded_field.string_field, "hello")
self.assertEquals(doc.embedded_field.int_field, 1)
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}])
self.assertEqual(doc.embedded_field.__class__, Embedded)
self.assertEqual(doc.embedded_field.string_field, "hello")
self.assertEqual(doc.embedded_field.int_field, 1)
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
self.assertEqual(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}])
def test_complex_embedded_documents(self):
"""Test complex dynamic embedded documents setups"""
@@ -213,7 +213,7 @@ class DynamicDocTest(unittest.TestCase):
embedded_1.list_field = ['1', 2, embedded_2]
doc.embedded_field = embedded_1
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
"embedded_field": {
"_types": ['Embedded'], "_cls": "Embedded",
"string_field": "hello",
@@ -230,20 +230,20 @@ class DynamicDocTest(unittest.TestCase):
})
doc.save()
doc = Doc.objects.first()
self.assertEquals(doc.embedded_field.__class__, Embedded)
self.assertEquals(doc.embedded_field.string_field, "hello")
self.assertEquals(doc.embedded_field.int_field, 1)
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
self.assertEquals(doc.embedded_field.list_field[0], '1')
self.assertEquals(doc.embedded_field.list_field[1], 2)
self.assertEqual(doc.embedded_field.__class__, Embedded)
self.assertEqual(doc.embedded_field.string_field, "hello")
self.assertEqual(doc.embedded_field.int_field, 1)
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
self.assertEqual(doc.embedded_field.list_field[0], '1')
self.assertEqual(doc.embedded_field.list_field[1], 2)
embedded_field = doc.embedded_field.list_field[2]
self.assertEquals(embedded_field.__class__, Embedded)
self.assertEquals(embedded_field.string_field, "hello")
self.assertEquals(embedded_field.int_field, 1)
self.assertEquals(embedded_field.dict_field, {'hello': 'world'})
self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}])
self.assertEqual(embedded_field.__class__, Embedded)
self.assertEqual(embedded_field.string_field, "hello")
self.assertEqual(embedded_field.int_field, 1)
self.assertEqual(embedded_field.dict_field, {'hello': 'world'})
self.assertEqual(embedded_field.list_field, ['1', 2, {'hello': 'world'}])
def test_delta_for_dynamic_documents(self):
p = self.Person()
@@ -252,18 +252,18 @@ class DynamicDocTest(unittest.TestCase):
p.save()
p.age = 24
self.assertEquals(p.age, 24)
self.assertEquals(p._get_changed_fields(), ['age'])
self.assertEquals(p._delta(), ({'age': 24}, {}))
self.assertEqual(p.age, 24)
self.assertEqual(p._get_changed_fields(), ['age'])
self.assertEqual(p._delta(), ({'age': 24}, {}))
p = self.Person.objects(age=22).get()
p.age = 24
self.assertEquals(p.age, 24)
self.assertEquals(p._get_changed_fields(), ['age'])
self.assertEquals(p._delta(), ({'age': 24}, {}))
self.assertEqual(p.age, 24)
self.assertEqual(p._get_changed_fields(), ['age'])
self.assertEqual(p._delta(), ({'age': 24}, {}))
p.save()
self.assertEquals(1, self.Person.objects(age=24).count())
self.assertEqual(1, self.Person.objects(age=24).count())
def test_delta(self):
@@ -275,40 +275,40 @@ class DynamicDocTest(unittest.TestCase):
doc.save()
doc = Doc.objects.first()
self.assertEquals(doc._get_changed_fields(), [])
self.assertEquals(doc._delta(), ({}, {}))
self.assertEqual(doc._get_changed_fields(), [])
self.assertEqual(doc._delta(), ({}, {}))
doc.string_field = 'hello'
self.assertEquals(doc._get_changed_fields(), ['string_field'])
self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {}))
self.assertEqual(doc._get_changed_fields(), ['string_field'])
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
doc._changed_fields = []
doc.int_field = 1
self.assertEquals(doc._get_changed_fields(), ['int_field'])
self.assertEquals(doc._delta(), ({'int_field': 1}, {}))
self.assertEqual(doc._get_changed_fields(), ['int_field'])
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
doc._changed_fields = []
dict_value = {'hello': 'world', 'ping': 'pong'}
doc.dict_field = dict_value
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {}))
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
doc._changed_fields = []
list_value = ['1', 2, {'hello': 'world'}]
doc.list_field = list_value
self.assertEquals(doc._get_changed_fields(), ['list_field'])
self.assertEquals(doc._delta(), ({'list_field': list_value}, {}))
self.assertEqual(doc._get_changed_fields(), ['list_field'])
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
# Test unsetting
doc._changed_fields = []
doc.dict_field = {}
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
self.assertEquals(doc._delta(), ({}, {'dict_field': 1}))
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
doc._changed_fields = []
doc.list_field = []
self.assertEquals(doc._get_changed_fields(), ['list_field'])
self.assertEquals(doc._delta(), ({}, {'list_field': 1}))
self.assertEqual(doc._get_changed_fields(), ['list_field'])
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
def test_delta_recursive(self):
"""Testing deltaing works with dynamic documents"""
@@ -323,8 +323,8 @@ class DynamicDocTest(unittest.TestCase):
doc.save()
doc = Doc.objects.first()
self.assertEquals(doc._get_changed_fields(), [])
self.assertEquals(doc._delta(), ({}, {}))
self.assertEqual(doc._get_changed_fields(), [])
self.assertEqual(doc._delta(), ({}, {}))
embedded_1 = Embedded()
embedded_1.string_field = 'hello'
@@ -333,7 +333,7 @@ class DynamicDocTest(unittest.TestCase):
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field = embedded_1
self.assertEquals(doc._get_changed_fields(), ['embedded_field'])
self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
embedded_delta = {
'string_field': 'hello',
@@ -341,28 +341,28 @@ class DynamicDocTest(unittest.TestCase):
'dict_field': {'hello': 'world'},
'list_field': ['1', 2, {'hello': 'world'}]
}
self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {}))
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
embedded_delta.update({
'_types': ['Embedded'],
'_cls': 'Embedded',
})
self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {}))
self.assertEqual(doc._delta(), ({'embedded_field': embedded_delta}, {}))
doc.save()
doc.reload()
doc.embedded_field.dict_field = {}
self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field'])
self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
self.assertEqual(doc._get_changed_fields(), ['embedded_field.dict_field'])
self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
doc.save()
doc.reload()
doc.embedded_field.list_field = []
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1}))
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1}))
self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1}))
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1}))
doc.save()
doc.reload()
@@ -373,8 +373,8 @@ class DynamicDocTest(unittest.TestCase):
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
doc.embedded_field.list_field = ['1', 2, embedded_2]
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEquals(doc.embedded_field._delta(), ({
self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEqual(doc.embedded_field._delta(), ({
'list_field': ['1', 2, {
'_cls': 'Embedded',
'_types': ['Embedded'],
@@ -385,7 +385,7 @@ class DynamicDocTest(unittest.TestCase):
}]
}, {}))
self.assertEquals(doc._delta(), ({
self.assertEqual(doc._delta(), ({
'embedded_field.list_field': ['1', 2, {
'_cls': 'Embedded',
'_types': ['Embedded'],
@@ -398,25 +398,25 @@ class DynamicDocTest(unittest.TestCase):
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, [])
self.assertEquals(doc.embedded_field.list_field[0], '1')
self.assertEquals(doc.embedded_field.list_field[1], 2)
self.assertEqual(doc.embedded_field.list_field[2]._changed_fields, [])
self.assertEqual(doc.embedded_field.list_field[0], '1')
self.assertEqual(doc.embedded_field.list_field[1], 2)
for k in doc.embedded_field.list_field[2]._fields:
self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k])
self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k])
doc.embedded_field.list_field[2].string_field = 'world'
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field'])
self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field'])
self.assertEqual(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world')
self.assertEqual(doc.embedded_field.list_field[2].string_field, 'world')
# Test multiple assignments
doc.embedded_field.list_field[2].string_field = 'hello world'
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEquals(doc.embedded_field._delta(), ({
self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field'])
self.assertEqual(doc.embedded_field._delta(), ({
'list_field': ['1', 2, {
'_types': ['Embedded'],
'_cls': 'Embedded',
@@ -424,7 +424,7 @@ class DynamicDocTest(unittest.TestCase):
'int_field': 1,
'list_field': ['1', 2, {'hello': 'world'}],
'dict_field': {'hello': 'world'}}]}, {}))
self.assertEquals(doc._delta(), ({
self.assertEqual(doc._delta(), ({
'embedded_field.list_field': ['1', 2, {
'_types': ['Embedded'],
'_cls': 'Embedded',
@@ -435,32 +435,32 @@ class DynamicDocTest(unittest.TestCase):
]}, {}))
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world')
self.assertEqual(doc.embedded_field.list_field[2].string_field, 'hello world')
# Test list native methods
doc.embedded_field.list_field[2].list_field.pop(0)
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
doc.save()
doc.reload()
doc.embedded_field.list_field[2].list_field.append(1)
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
self.assertEqual(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
doc.embedded_field.list_field[2].list_field.sort()
doc.embedded_field.list_field[2].list_field.sort(key=str)# use str as a key to allow comparing uncomperable types
doc.save()
doc.reload()
self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
self.assertEqual(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
doc.save()
doc.reload()
del(doc.embedded_field.list_field[2].list_field)
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
doc.save()
doc.reload()
@@ -470,8 +470,8 @@ class DynamicDocTest(unittest.TestCase):
doc.reload()
doc.dict_field['embedded'].string_field = 'Hello World'
self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field'])
self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {}))
self.assertEqual(doc._get_changed_fields(), ['dict_field.embedded.string_field'])
self.assertEqual(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {}))
def test_indexes(self):
"""Ensure that indexes are used when meta[indexes] is specified.
@@ -500,3 +500,34 @@ class DynamicDocTest(unittest.TestCase):
self.assertTrue([('_types', 1), ('category', 1), ('date', -1)]
in info)
self.assertTrue([('_types', 1), ('date', -1)] in info)
def test_dynamic_and_embedded(self):
"""Ensure embedded documents play nicely"""
class Address(EmbeddedDocument):
city = StringField()
class Person(DynamicDocument):
name = StringField()
meta = {'allow_inheritance': True}
Person.drop_collection()
Person(name="Ross", address=Address(city="London")).save()
person = Person.objects.first()
person.address.city = "Lundenne"
person.save()
self.assertEqual(Person.objects.first().address.city, "Lundenne")
person = Person.objects.first()
person.address = Address(city="Londinium")
person.save()
self.assertEqual(Person.objects.first().address.city, "Londinium")
person = Person.objects.first()
person.age = 35
person.save()
self.assertEqual(Person.objects.first().age, 35)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,33 @@
import unittest
import pymongo
from pymongo import ReadPreference, ReplicaSetConnection
import mongoengine
from mongoengine import *
from mongoengine.connection import get_db, get_connection, ConnectionError
class ConnectionTest(unittest.TestCase):
def tearDown(self):
mongoengine.connection._connection_settings = {}
mongoengine.connection._connections = {}
mongoengine.connection._dbs = {}
def test_replicaset_uri_passes_read_preference(self):
"""Requires a replica set called "rs" on port 27017
"""
try:
conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY)
except ConnectionError, e:
return
if not isinstance(conn, ReplicaSetConnection):
return
self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_ONLY)
if __name__ == '__main__':
unittest.main()

View File

@@ -212,9 +212,9 @@ class SignalTests(unittest.TestCase):
# The output of this signal is not entirely deterministic. The reloaded
# object will have an object ID. Hence, we only check part of the output
self.assertEquals(signal_output[3],
self.assertEqual(signal_output[3],
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]")
self.assertEquals(signal_output[-2:],
self.assertEqual(signal_output[-2:],
["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
"Is loaded",])