Compare commits
	
		
			162 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 2801b38c75 | ||
|  | dc3fea875e | ||
|  | aab8c2b687 | ||
|  | 3577773af3 | ||
|  | 8ac9e6dc19 | ||
|  | 4b3cea9e78 | ||
|  | 2420b5e937 | ||
|  | f23a976bea | ||
|  | 4226cd08f1 | ||
|  | 7a230f1693 | ||
|  | a43d0d4612 | ||
|  | 78a40a0c70 | ||
|  | 2c69d8f0b0 | ||
|  | 0018c38b83 | ||
|  | 8df81571fc | ||
|  | 48f988acd7 | ||
|  | 6526923345 | ||
|  | 24fd1acce6 | ||
|  | cbb9235dc5 | ||
|  | 19ec2c9bc9 | ||
|  | 6459d4c0b6 | ||
|  | 1304f2721f | ||
|  | 8bde0c0e53 | ||
|  | 598ffd3e5c | ||
|  | 601f0eb168 | ||
|  | 3070e0bf5d | ||
|  | 83c11a9834 | ||
|  | 5c912b930e | ||
|  | 1b17fb0ae7 | ||
|  | d83e67c121 | ||
|  | ae39ed94c9 | ||
|  | 1e51180d42 | ||
|  | 87ba69d02e | ||
|  | 8879d5560b | ||
|  | c1621ee39c | ||
|  | b0aa98edb4 | ||
|  | a7a2fe0216 | ||
|  | 8e50f5fa3c | ||
|  | 31793520bf | ||
|  | 0b6b0368c5 | ||
|  | d1d30a9280 | ||
|  | 420c6f2d1e | ||
|  | 34f06c4971 | ||
|  | 9cc4bbd49d | ||
|  | f66b312869 | ||
|  | 2405ba8708 | ||
|  | a91b6bff8b | ||
|  | 450dc11a68 | ||
|  | 1ce2f84ce5 | ||
|  | f55b241cfa | ||
|  | 34d08ce8ef | ||
|  | 4f5aa8c43b | ||
|  | 27b375060d | ||
|  | cbfdc401f7 | ||
|  | b58bf3e0ce | ||
|  | 1fff7e9aca | ||
|  | 494b981b13 | ||
|  | dd93995bd0 | ||
|  | b3bb4add9c | ||
|  | d305e71c27 | ||
|  | 0d92baa670 | ||
|  | 7a1b110f62 | ||
|  | db8df057ce | ||
|  | 5d8ffded40 | ||
|  | 07f3e5356d | ||
|  | 1ece62f960 | ||
|  | 056c604dc3 | ||
|  | 2d08eec093 | ||
|  | 614b590551 | ||
|  | 6d90ce250a | ||
|  | ea31846a19 | ||
|  | e6317776c1 | ||
|  | efeaba39a4 | ||
|  | 1a97dfd479 | ||
|  | 9fecf2b303 | ||
|  | 3d0d2f48ad | ||
|  | 581605e0e2 | ||
|  | 45d3a7f6ff | ||
|  | 7ca2ea0766 | ||
|  | 89220c142b | ||
|  | c73ce3d220 | ||
|  | b0f127af4e | ||
|  | 766d54795f | ||
|  | bd41c6eea4 | ||
|  | 2435786713 | ||
|  | 9e7ea64bd2 | ||
|  | 89a6eee6af | ||
|  | 2ec1476e50 | ||
|  | 2d9b581f34 | ||
|  | 5bb63f645b | ||
|  | a856c7cc37 | ||
|  | 26db9d8a9d | ||
|  | 8060179f6d | ||
|  | 77ebd87fed | ||
|  | e4bc92235d | ||
|  | 27a4d83ce8 | ||
|  | ece9b902f8 | ||
|  | 65a2f8a68b | ||
|  | 9c212306b8 | ||
|  | 1fdc7ce6bb | ||
|  | 0b22c140c5 | ||
|  | 944aa45459 | ||
|  | c9842ba13a | ||
|  | 8840680303 | ||
|  | 376b9b1316 | ||
|  | 54bb1cb3d9 | ||
|  | 43468b474e | ||
|  | 28a957c684 | ||
|  | ec5ddbf391 | ||
|  | bab186e195 | ||
|  | bc7e874476 | ||
|  | 97114b5948 | ||
|  | 45e015d71d | ||
|  | 0ff6531953 | ||
|  | ba298c3cfc | ||
|  | 0479bea40b | ||
|  | a536097804 | ||
|  | bbefd0fdf9 | ||
|  | 2aa8b04c21 | ||
|  | aeebdfec51 | ||
|  | debfcdf498 | ||
|  | 5c4b33e8e6 | ||
|  | eb54037b66 | ||
|  | f48af8db3b | ||
|  | 97c5b957dd | ||
|  | 95e7397803 | ||
|  | 43a989978a | ||
|  | 27734a7c26 | ||
|  | dd786d6fc4 | ||
|  | be1c28fc45 | ||
|  | 20e41b3523 | ||
|  | e07ecc5cf8 | ||
|  | 3360b72531 | ||
|  | 233b13d670 | ||
|  | 5bcbb4fdaa | ||
|  | dbe2f5f2b8 | ||
|  | ca8b58d66d | ||
|  | f80f0b416f | ||
|  | d7765511ee | ||
|  | 0240a09056 | ||
|  | ab15c4eec9 | ||
|  | 4ce1ba81a6 | ||
|  | 530440b333 | ||
|  | b80fda36af | ||
|  | 42d24263ef | ||
|  | 1e2797e7ce | ||
|  | f7075766fc | ||
|  | 5647ca70bb | ||
|  | 2b8aa6bafc | ||
|  | 410443471c | ||
|  | 0bb9781b91 | ||
|  | 2769d6d7ca | ||
|  | 120b9433c2 | ||
|  | 605092bd88 | ||
|  | a4a8c94374 | ||
|  | 0e93f6c0db | ||
|  | c474ca0f13 | ||
|  | 49a66ba81a | ||
|  | a1d43fecd9 | ||
|  | 7e376b40bb | ||
|  | 540a0cc59c | ||
|  | 83eb4f6b16 | 
							
								
								
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -13,4 +13,5 @@ env/ | ||||
| .settings | ||||
| .project | ||||
| .pydevproject | ||||
| tests/bugfix.py | ||||
| tests/test_bugfix.py | ||||
| htmlcov/ | ||||
							
								
								
									
										12
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
| # http://travis-ci.org/#!/MongoEngine/mongoengine | ||||
| language: python | ||||
| python: | ||||
|     - 2.6 | ||||
|     - 2.7 | ||||
| install: | ||||
|     - sudo apt-get install zlib1g zlib1g-dev | ||||
|     - sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/ | ||||
|     - pip install PIL --use-mirrors ; true | ||||
|     - python setup.py install | ||||
| script: | ||||
|     - python setup.py test | ||||
							
								
								
									
										15
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										15
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -100,3 +100,18 @@ that much better: | ||||
|  * Jacob Peddicord | ||||
|  * Nils Hasenbanck | ||||
|  * mostlystatic | ||||
|  * Greg Banks | ||||
|  * swashbuckler | ||||
|  * Adam Reeve | ||||
|  * Anthony Nemitz | ||||
|  * deignacio | ||||
|  * shaunduncan | ||||
|  * Meir Kriheli | ||||
|  * Andrey Fedoseev | ||||
|  * aparajita | ||||
|  * Tristan Escalada | ||||
|  * Alexander Koshelev | ||||
|  * Jaime Irurzun | ||||
|  * Alexandre González | ||||
|  * Thomas Steinacher | ||||
|  * Tommi Komulainen | ||||
							
								
								
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							| @@ -1,4 +1,4 @@ | ||||
| Copyright (c) 2009-2010 Harry Marr | ||||
| Copyright (c) 2009-2012 See AUTHORS | ||||
|  | ||||
| Permission is hereby granted, free of charge, to any person | ||||
| obtaining a copy of this software and associated documentation | ||||
|   | ||||
| @@ -2,9 +2,13 @@ | ||||
| MongoEngine | ||||
| =========== | ||||
| :Info: MongoEngine is an ORM-like layer on top of PyMongo. | ||||
| :Repository: https://github.com/MongoEngine/mongoengine | ||||
| :Author: Harry Marr (http://github.com/hmarr) | ||||
| :Maintainer: Ross Lawley (http://github.com/rozza) | ||||
|  | ||||
| .. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master | ||||
|   :target: http://travis-ci.org/MongoEngine/mongoengine | ||||
|  | ||||
| About | ||||
| ===== | ||||
| MongoEngine is a Python Object-Document Mapper for working with MongoDB. | ||||
| @@ -22,7 +26,7 @@ setup.py install``. | ||||
|  | ||||
| Dependencies | ||||
| ============ | ||||
| - pymongo 1.1+ | ||||
| - pymongo 2.1.1+ | ||||
| - sphinx (optional - for documentation generation) | ||||
|  | ||||
| Examples | ||||
| @@ -96,3 +100,4 @@ Contributing | ||||
| The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to | ||||
| contribute to the project, fork it on GitHub and send a pull request, all | ||||
| contributions and suggestions are welcome! | ||||
|  | ||||
|   | ||||
| @@ -31,6 +31,9 @@ Documents | ||||
| .. autoclass:: mongoengine.document.MapReduceDocument | ||||
|   :members: | ||||
|  | ||||
| .. autoclass:: mongoengine.ValidationError | ||||
|   :members: | ||||
|  | ||||
| Querying | ||||
| ======== | ||||
|  | ||||
| @@ -44,25 +47,28 @@ Querying | ||||
| Fields | ||||
| ====== | ||||
|  | ||||
| .. autoclass:: mongoengine.StringField | ||||
| .. autoclass:: mongoengine.URLField | ||||
| .. autoclass:: mongoengine.EmailField | ||||
| .. autoclass:: mongoengine.IntField | ||||
| .. autoclass:: mongoengine.FloatField | ||||
| .. autoclass:: mongoengine.DecimalField | ||||
| .. autoclass:: mongoengine.DateTimeField | ||||
| .. autoclass:: mongoengine.BinaryField | ||||
| .. autoclass:: mongoengine.BooleanField | ||||
| .. autoclass:: mongoengine.ComplexDateTimeField | ||||
| .. autoclass:: mongoengine.ListField | ||||
| .. autoclass:: mongoengine.SortedListField | ||||
| .. autoclass:: mongoengine.DateTimeField | ||||
| .. autoclass:: mongoengine.DecimalField | ||||
| .. autoclass:: mongoengine.DictField | ||||
| .. autoclass:: mongoengine.DynamicField | ||||
| .. autoclass:: mongoengine.EmailField | ||||
| .. autoclass:: mongoengine.EmbeddedDocumentField | ||||
| .. autoclass:: mongoengine.FileField | ||||
| .. autoclass:: mongoengine.FloatField | ||||
| .. autoclass:: mongoengine.GenericEmbeddedDocumentField | ||||
| .. autoclass:: mongoengine.GenericReferenceField | ||||
| .. autoclass:: mongoengine.GeoPointField | ||||
| .. autoclass:: mongoengine.ImageField | ||||
| .. autoclass:: mongoengine.IntField | ||||
| .. autoclass:: mongoengine.ListField | ||||
| .. autoclass:: mongoengine.MapField | ||||
| .. autoclass:: mongoengine.ObjectIdField | ||||
| .. autoclass:: mongoengine.ReferenceField | ||||
| .. autoclass:: mongoengine.GenericReferenceField | ||||
| .. autoclass:: mongoengine.EmbeddedDocumentField | ||||
| .. autoclass:: mongoengine.GenericEmbeddedDocumentField | ||||
| .. autoclass:: mongoengine.BooleanField | ||||
| .. autoclass:: mongoengine.FileField | ||||
| .. autoclass:: mongoengine.BinaryField | ||||
| .. autoclass:: mongoengine.GeoPointField | ||||
| .. autoclass:: mongoengine.SequenceField | ||||
| .. autoclass:: mongoengine.SortedListField | ||||
| .. autoclass:: mongoengine.StringField | ||||
| .. autoclass:: mongoengine.URLField | ||||
| .. autoclass:: mongoengine.UUIDField | ||||
|   | ||||
| @@ -2,6 +2,115 @@ | ||||
| Changelog | ||||
| ========= | ||||
|  | ||||
| Changes in 0.6.19 | ||||
| ================= | ||||
|  | ||||
| - Added Binary support to UUID (MongoEngine/mongoengine#47) | ||||
| - Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46) | ||||
| - Fixed BinaryField python value issue (MongoEngine/mongoengine#48) | ||||
| - Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41) | ||||
| - Fixed queryset manager issue (MongoEngine/mongoengine#52) | ||||
| - Fixed FileField comparision (hmarr/mongoengine#547) | ||||
|  | ||||
| Changes in 0.6.18 | ||||
| ================= | ||||
| - Fixed recursion loading bug in _get_changed_fields | ||||
|  | ||||
| Changes in 0.6.17 | ||||
| ================= | ||||
| - Fixed issue with custom queryset manager expecting explict variable names | ||||
|  | ||||
| Changes in 0.6.16 | ||||
| ================= | ||||
| - Fixed issue where db_alias wasn't inherited | ||||
|  | ||||
| Changes in 0.6.15 | ||||
| ================= | ||||
| - Updated validation error messages | ||||
| - Added support for null / zero / false values in item_frequencies | ||||
| - Fixed cascade save edge case | ||||
| - Fixed geo index creation through reference fields | ||||
| - Added support for args / kwargs when using @queryset_manager | ||||
| - Deref list custom id fix | ||||
|  | ||||
| Changes in 0.6.14 | ||||
| ================= | ||||
| - Fixed error dict with nested validation | ||||
| - Fixed Int/Float fields and not equals None | ||||
| - Exclude tests from installation | ||||
| - Allow tuples for index meta | ||||
| - Fixed use of str in instance checks | ||||
| - Fixed unicode support in transform update | ||||
| - Added support for add_to_set and each | ||||
|  | ||||
| Changes in 0.6.13 | ||||
| ================= | ||||
| - Fixed EmbeddedDocument db_field validation issue | ||||
| - Fixed StringField unicode issue | ||||
| - Fixes __repr__ modifying the cursor | ||||
|  | ||||
| Changes in 0.6.12 | ||||
| ================= | ||||
| - Fixes scalar lookups for primary_key | ||||
| - Fixes error with _delta handling DBRefs | ||||
|  | ||||
| Changes in 0.6.11 | ||||
| ================== | ||||
| - Fixed inconsistency handling None values field attrs | ||||
| - Fixed map_field embedded db_field issue | ||||
| - Fixed .save() _delta issue with DbRefs | ||||
| - Fixed Django TestCase | ||||
| - Added cmp to Embedded Document | ||||
| - Added PULL reverse_delete_rule | ||||
| - Fixed CASCADE delete bug | ||||
| - Fixed db_field data load error | ||||
| - Fixed recursive save with FileField | ||||
|  | ||||
| Changes in 0.6.10 | ||||
| ================= | ||||
| - Fixed basedict / baselist to return super(..) | ||||
| - Promoted BaseDynamicField to DynamicField | ||||
|  | ||||
| Changes in 0.6.9 | ||||
| ================ | ||||
| - Fixed sparse indexes on inherited docs | ||||
| - Removed FileField auto deletion, needs more work maybe 0.7 | ||||
|  | ||||
| Changes in 0.6.8 | ||||
| ================ | ||||
| - Fixed FileField losing reference when no default set | ||||
| - Removed possible race condition from FileField (grid_file) | ||||
| - Added assignment to save, can now do: b = MyDoc(**kwargs).save() | ||||
| - Added support for pull operations on nested EmbeddedDocuments | ||||
| - Added support for choices with GenericReferenceFields | ||||
| - Added support for choices with GenericEmbeddedDocumentFields | ||||
| - Fixed Django 1.4 sessions first save data loss | ||||
| - FileField now automatically delete files on .delete() | ||||
| - Fix for GenericReference to_mongo method | ||||
| - Fixed connection regression | ||||
| - Updated Django User document, now allows inheritance | ||||
|  | ||||
| Changes in 0.6.7 | ||||
| ================ | ||||
| - Fixed indexing on '_id' or 'pk' or 'id' | ||||
| - Invalid data from the DB now raises a InvalidDocumentError | ||||
| - Cleaned up the Validation Error - docs and code | ||||
| - Added meta `auto_create_index` so you can disable index creation | ||||
| - Added write concern options to inserts | ||||
| - Fixed typo in meta for index options | ||||
| - Bug fix Read preference now passed correctly | ||||
| - Added support for File like objects for GridFS | ||||
| - Fix for #473 - Dereferencing abstracts | ||||
|  | ||||
| Changes in 0.6.6 | ||||
| ================ | ||||
| - Django 1.4 fixed (finally) | ||||
| - Added tests for Django | ||||
|  | ||||
| Changes in 0.6.5 | ||||
| ================ | ||||
| - More Django updates | ||||
|  | ||||
| Changes in 0.6.4 | ||||
| ================ | ||||
|  | ||||
|   | ||||
| @@ -62,28 +62,31 @@ not provided. Default values may optionally be a callable, which will be called | ||||
| to retrieve the value (such as in the above example). The field types available | ||||
| are as follows: | ||||
|  | ||||
| * :class:`~mongoengine.StringField` | ||||
| * :class:`~mongoengine.URLField` | ||||
| * :class:`~mongoengine.EmailField` | ||||
| * :class:`~mongoengine.IntField` | ||||
| * :class:`~mongoengine.FloatField` | ||||
| * :class:`~mongoengine.DecimalField` | ||||
| * :class:`~mongoengine.DateTimeField` | ||||
| * :class:`~mongoengine.BinaryField` | ||||
| * :class:`~mongoengine.BooleanField` | ||||
| * :class:`~mongoengine.ComplexDateTimeField` | ||||
| * :class:`~mongoengine.ListField` | ||||
| * :class:`~mongoengine.SortedListField` | ||||
| * :class:`~mongoengine.DateTimeField` | ||||
| * :class:`~mongoengine.DecimalField` | ||||
| * :class:`~mongoengine.DictField` | ||||
| * :class:`~mongoengine.DynamicField` | ||||
| * :class:`~mongoengine.EmailField` | ||||
| * :class:`~mongoengine.EmbeddedDocumentField` | ||||
| * :class:`~mongoengine.FileField` | ||||
| * :class:`~mongoengine.FloatField` | ||||
| * :class:`~mongoengine.GenericEmbeddedDocumentField` | ||||
| * :class:`~mongoengine.GenericReferenceField` | ||||
| * :class:`~mongoengine.GeoPointField` | ||||
| * :class:`~mongoengine.ImageField` | ||||
| * :class:`~mongoengine.IntField` | ||||
| * :class:`~mongoengine.ListField` | ||||
| * :class:`~mongoengine.MapField` | ||||
| * :class:`~mongoengine.ObjectIdField` | ||||
| * :class:`~mongoengine.ReferenceField` | ||||
| * :class:`~mongoengine.GenericReferenceField` | ||||
| * :class:`~mongoengine.EmbeddedDocumentField` | ||||
| * :class:`~mongoengine.GenericEmbeddedDocumentField` | ||||
| * :class:`~mongoengine.BooleanField` | ||||
| * :class:`~mongoengine.FileField` | ||||
| * :class:`~mongoengine.BinaryField` | ||||
| * :class:`~mongoengine.GeoPointField` | ||||
| * :class:`~mongoengine.SequenceField` | ||||
| * :class:`~mongoengine.SortedListField` | ||||
| * :class:`~mongoengine.StringField` | ||||
| * :class:`~mongoengine.URLField` | ||||
| * :class:`~mongoengine.UUIDField` | ||||
|  | ||||
| Field arguments | ||||
| --------------- | ||||
| @@ -98,7 +101,7 @@ arguments can be set on all fields: | ||||
|  | ||||
| :attr:`required` (Default: False) | ||||
|     If set to True and the field is not set on the document instance, a | ||||
|     :class:`~mongoengine.base.ValidationError` will be raised when the document is | ||||
|     :class:`~mongoengine.ValidationError` will be raised when the document is | ||||
|     validated. | ||||
|  | ||||
| :attr:`default` (Default: None) | ||||
| @@ -256,6 +259,35 @@ as the constructor's argument:: | ||||
|         content = StringField() | ||||
|  | ||||
|  | ||||
| .. _one-to-many-with-listfields: | ||||
|  | ||||
| One to Many with ListFields | ||||
| ''''''''''''''''''''''''''' | ||||
|  | ||||
| If you are implementing a one to many relationship via a list of references, | ||||
| then the references are stored as DBRefs and to query you need to pass an | ||||
| instance of the object to the query:: | ||||
|  | ||||
|     class User(Document): | ||||
|         name = StringField() | ||||
|  | ||||
|     class Page(Document): | ||||
|         content = StringField() | ||||
|         authors = ListField(ReferenceField(User)) | ||||
|  | ||||
|     bob = User(name="Bob Jones").save() | ||||
|     john = User(name="John Smith").save() | ||||
|  | ||||
|     Page(content="Test Page", authors=[bob, john]).save() | ||||
|     Page(content="Another Page", authors=[john]).save() | ||||
|  | ||||
|     # Find all pages Bob authored | ||||
|     Page.objects(authors__in=[bob]) | ||||
|  | ||||
|     # Find all pages that both Bob and John have authored | ||||
|     Page.objects(authors__all=[bob, john]) | ||||
|  | ||||
|  | ||||
| Dealing with deletion of referred documents | ||||
| ''''''''''''''''''''''''''''''''''''''''''' | ||||
| By default, MongoDB doesn't check the integrity of your data, so deleting | ||||
| @@ -289,6 +321,10 @@ Its value can take any of the following constants: | ||||
| :const:`mongoengine.CASCADE` | ||||
|   Any object containing fields that are refererring to the object being deleted | ||||
|   are deleted first. | ||||
| :const:`mongoengine.PULL` | ||||
|   Removes the reference to the object (using MongoDB's "pull" operation) | ||||
|   from any object's fields of | ||||
|   :class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`). | ||||
|  | ||||
|  | ||||
| .. warning:: | ||||
|   | ||||
| @@ -91,5 +91,5 @@ is an alias to :attr:`id`:: | ||||
| .. note:: | ||||
|  | ||||
|    If you define your own primary key field, the field implicitly becomes | ||||
|    required, so a :class:`ValidationError` will be thrown if you don't provide | ||||
|    it. | ||||
|    required, so a :class:`~mongoengine.ValidationError` will be thrown if | ||||
|    you don't provide it. | ||||
|   | ||||
| @@ -65,7 +65,7 @@ Deleting stored files is achieved with the :func:`delete` method:: | ||||
|  | ||||
|     marmot.photo.delete() | ||||
|  | ||||
| .. note:: | ||||
| .. warning:: | ||||
|  | ||||
|     The FileField in a Document actually only stores the ID of a file in a | ||||
|     separate GridFS collection. This means that deleting a document | ||||
|   | ||||
| @@ -232,7 +232,7 @@ custom manager methods as you like:: | ||||
|     BlogPost(title='test1', published=False).save() | ||||
|     BlogPost(title='test2', published=True).save() | ||||
|     assert len(BlogPost.objects) == 2 | ||||
|     assert len(BlogPost.live_posts) == 1 | ||||
|     assert len(BlogPost.live_posts()) == 1 | ||||
|  | ||||
| Custom QuerySets | ||||
| ================ | ||||
| @@ -243,11 +243,16 @@ a document, set ``queryset_class`` to the custom class in a | ||||
| :class:`~mongoengine.Document`\ s ``meta`` dictionary:: | ||||
|  | ||||
|     class AwesomerQuerySet(QuerySet): | ||||
|         pass | ||||
|  | ||||
|         def get_awesome(self): | ||||
|             return self.filter(awesome=True) | ||||
|  | ||||
|     class Page(Document): | ||||
|         meta = {'queryset_class': AwesomerQuerySet} | ||||
|  | ||||
|     # To call: | ||||
|     Page.objects.get_awesome() | ||||
|  | ||||
| .. versionadded:: 0.4 | ||||
|  | ||||
| Aggregation | ||||
|   | ||||
| @@ -12,7 +12,7 @@ from signals import * | ||||
| __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | ||||
|            queryset.__all__ + signals.__all__) | ||||
|  | ||||
| VERSION = (0, 6, 6) | ||||
| VERSION = (0, 6, 19) | ||||
|  | ||||
|  | ||||
| def get_version(): | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| import warnings | ||||
| from collections import defaultdict | ||||
|  | ||||
| from queryset import QuerySet, QuerySetManager | ||||
| from queryset import DoesNotExist, MultipleObjectsReturned | ||||
| @@ -25,7 +26,15 @@ class InvalidDocumentError(Exception): | ||||
|  | ||||
| class ValidationError(AssertionError): | ||||
|     """Validation exception. | ||||
|  | ||||
|     May represent an error validating a field or a | ||||
|     document containing fields with validation errors. | ||||
|  | ||||
|     :ivar errors: A dictionary of errors for fields within this | ||||
|         document or list, or None if the error is for an | ||||
|         individual field. | ||||
|     """ | ||||
|  | ||||
|     errors = {} | ||||
|     field_name = None | ||||
|     _message = None | ||||
| @@ -43,9 +52,11 @@ class ValidationError(AssertionError): | ||||
|  | ||||
|     def __getattribute__(self, name): | ||||
|         message = super(ValidationError, self).__getattribute__(name) | ||||
|         if name == 'message' and self.field_name: | ||||
|             return message + ' ("%s")' % self.field_name | ||||
|         else: | ||||
|         if name == 'message': | ||||
|             if self.field_name: | ||||
|                 message = '%s' % message | ||||
|             if self.errors: | ||||
|                 message = '%s(%s)' % (message, self._format_errors()) | ||||
|         return message | ||||
|  | ||||
|     def _get_message(self): | ||||
| @@ -57,6 +68,13 @@ class ValidationError(AssertionError): | ||||
|     message = property(_get_message, _set_message) | ||||
|  | ||||
|     def to_dict(self): | ||||
|         """Returns a dictionary of all errors within a document | ||||
|  | ||||
|         Keys are field names or list indices and values are the | ||||
|         validation error messages, or a nested dictionary of | ||||
|         errors for an embedded document or list. | ||||
|         """ | ||||
|  | ||||
|         def build_dict(source): | ||||
|             errors_dict = {} | ||||
|             if not source: | ||||
| @@ -73,6 +91,24 @@ class ValidationError(AssertionError): | ||||
|             return {} | ||||
|         return build_dict(self.errors) | ||||
|  | ||||
|     def _format_errors(self): | ||||
|         """Returns a string listing all errors within a document""" | ||||
|  | ||||
|         def generate_key(value, prefix=''): | ||||
|             if isinstance(value, list): | ||||
|                 value = ' '.join([generate_key(k) for k in value]) | ||||
|             if isinstance(value, dict): | ||||
|                 value = ' '.join( | ||||
|                         [generate_key(v, k) for k, v in value.iteritems()]) | ||||
|  | ||||
|             results = "%s.%s" % (prefix, value) if prefix else value | ||||
|             return results | ||||
|  | ||||
|         error_dict = defaultdict(list) | ||||
|         for k, v in self.to_dict().iteritems(): | ||||
|             error_dict[generate_key(v)].append(k) | ||||
|         return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()]) | ||||
|  | ||||
|  | ||||
| _document_registry = {} | ||||
|  | ||||
| @@ -191,16 +227,18 @@ class BaseField(object): | ||||
|         pass | ||||
|  | ||||
|     def _validate(self, value): | ||||
|  | ||||
|         from mongoengine import Document, EmbeddedDocument | ||||
|         # check choices | ||||
|         if self.choices: | ||||
|             is_cls = isinstance(value, (Document, EmbeddedDocument)) | ||||
|             value_to_check = value.__class__ if is_cls else value | ||||
|             err_msg = 'an instance' if is_cls else 'one' | ||||
|             if isinstance(self.choices[0], (list, tuple)): | ||||
|                 option_keys = [option_key for option_key, option_value in self.choices] | ||||
|                 if value not in option_keys: | ||||
|                     self.error('Value must be one of %s' % unicode(option_keys)) | ||||
|             else: | ||||
|                 if value not in self.choices: | ||||
|                     self.error('Value must be one of %s' % unicode(self.choices)) | ||||
|                 if value_to_check not in option_keys: | ||||
|                     self.error('Value must be %s of %s' % (err_msg, unicode(option_keys))) | ||||
|             elif value_to_check not in self.choices: | ||||
|                 self.error('Value must be %s of %s' % (err_msg, unicode(self.choices))) | ||||
|  | ||||
|         # check validation argument | ||||
|         if self.validation is not None: | ||||
| @@ -233,8 +271,10 @@ class ComplexBaseField(BaseField): | ||||
|         if instance is None: | ||||
|             # Document class being used rather than a document object | ||||
|             return self | ||||
|  | ||||
|         if not self._dereference and instance._initialised: | ||||
|         from fields import GenericReferenceField, ReferenceField | ||||
|         dereference = self.field is None or isinstance(self.field, | ||||
|             (GenericReferenceField, ReferenceField)) | ||||
|         if not self._dereference and instance._initialised and dereference: | ||||
|             from dereference import DeReference | ||||
|             self._dereference = DeReference()  # Cached | ||||
|             instance._data[self.name] = self._dereference( | ||||
| @@ -368,12 +408,12 @@ class ComplexBaseField(BaseField): | ||||
|                 sequence = enumerate(value) | ||||
|             for k, v in sequence: | ||||
|                 try: | ||||
|                     self.field.validate(v) | ||||
|                 except (ValidationError, AssertionError), error: | ||||
|                     if hasattr(error, 'errors'): | ||||
|                         errors[k] = error.errors | ||||
|                     else: | ||||
|                     self.field._validate(v) | ||||
|                 except ValidationError, error: | ||||
|                     errors[k] = error.errors or error | ||||
|                 except (ValueError, AssertionError), error: | ||||
|                     errors[k] = error | ||||
|  | ||||
|             if errors: | ||||
|                 field_class = self.field.__class__.__name__ | ||||
|                 self.error('Invalid %s item (%s)' % (field_class, value), | ||||
| @@ -401,47 +441,6 @@ class ComplexBaseField(BaseField): | ||||
|     owner_document = property(_get_owner_document, _set_owner_document) | ||||
|  | ||||
|  | ||||
| class BaseDynamicField(BaseField): | ||||
|     """Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data""" | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         """Convert a Python type to a MongoDBcompatible type. | ||||
|         """ | ||||
|  | ||||
|         if isinstance(value, basestring): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, 'to_mongo'): | ||||
|             return value.to_mongo() | ||||
|  | ||||
|         if not isinstance(value, (dict, list, tuple)): | ||||
|             return value | ||||
|  | ||||
|         is_list = False | ||||
|         if not hasattr(value, 'items'): | ||||
|             is_list = True | ||||
|             value = dict([(k, v) for k, v in enumerate(value)]) | ||||
|  | ||||
|         data = {} | ||||
|         for k, v in value.items(): | ||||
|             data[k] = self.to_mongo(v) | ||||
|  | ||||
|         if is_list:  # Convert back to a list | ||||
|             value = [v for k, v in sorted(data.items(), key=operator.itemgetter(0))] | ||||
|         else: | ||||
|             value = data | ||||
|         return value | ||||
|  | ||||
|     def lookup_member(self, member_name): | ||||
|         return member_name | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if isinstance(value, basestring): | ||||
|             from mongoengine.fields import StringField | ||||
|             return StringField().prepare_query_value(op, value) | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|  | ||||
| class ObjectIdField(BaseField): | ||||
|     """An field wrapper around MongoDB's ObjectIds. | ||||
|     """ | ||||
| @@ -650,8 +649,13 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|                     del(attrs['meta']['collection']) | ||||
|                 if base._get_collection_name(): | ||||
|                     collection = base._get_collection_name() | ||||
|                 # Propagate index options. | ||||
|                 for key in ('index_background', 'index_drop_dups', 'index_opts'): | ||||
|  | ||||
|                 # Propagate inherited values | ||||
|                 keys_to_propogate = ( | ||||
|                     'index_background', 'index_drop_dups', 'index_opts', | ||||
|                     'allow_inheritance', 'queryset_class', 'db_alias', | ||||
|                 ) | ||||
|                 for key in keys_to_propogate: | ||||
|                     if key in base._meta: | ||||
|                         base_meta[key] = base._meta[key] | ||||
|  | ||||
| @@ -660,11 +664,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|                     abstract_base_indexes += base._meta.get('indexes', []) | ||||
|                 else: | ||||
|                     base_indexes += base._meta.get('indexes', []) | ||||
|                 # Propagate 'allow_inheritance' | ||||
|                 if 'allow_inheritance' in base._meta: | ||||
|                     base_meta['allow_inheritance'] = base._meta['allow_inheritance'] | ||||
|                 if 'queryset_class' in base._meta: | ||||
|                     base_meta['queryset_class'] = base._meta['queryset_class'] | ||||
|             try: | ||||
|                 base_meta['objects'] = base.__getattribute__(base, 'objects') | ||||
|             except TypeError: | ||||
| @@ -672,6 +671,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|             except AttributeError: | ||||
|                 pass | ||||
|  | ||||
|         # defaults | ||||
|         meta = { | ||||
|             'abstract': False, | ||||
|             'collection': collection, | ||||
| @@ -711,7 +711,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|             meta['queryset_class'] = manager.queryset_class | ||||
|         new_class.objects = manager | ||||
|  | ||||
|         indicies = meta['indexes'] + abstract_base_indexes | ||||
|         indicies = list(meta['indexes']) + abstract_base_indexes | ||||
|         user_indexes = [QuerySet._build_index_spec(new_class, spec) | ||||
|                         for spec in indicies] + base_indexes | ||||
|         new_class._meta['indexes'] = user_indexes | ||||
| @@ -805,6 +805,7 @@ class BaseDocument(object): | ||||
|                     dynamic_data[key] = value | ||||
|         else: | ||||
|             for key, value in values.items(): | ||||
|                 key = self._reverse_db_field_map.get(key, key) | ||||
|                 setattr(self, key, value) | ||||
|  | ||||
|         # Set any get_fieldname_display methods | ||||
| @@ -825,7 +826,8 @@ class BaseDocument(object): | ||||
|  | ||||
|             field = None | ||||
|             if not hasattr(self, name) and not name.startswith('_'): | ||||
|                 field = BaseDynamicField(db_field=name) | ||||
|                 from fields import DynamicField | ||||
|                 field = DynamicField(db_field=name) | ||||
|                 field.name = name | ||||
|                 self._dynamic_fields[name] = field | ||||
|  | ||||
| @@ -838,13 +840,6 @@ class BaseDocument(object): | ||||
|                 if hasattr(self, '_changed_fields'): | ||||
|                     self._mark_as_changed(name) | ||||
|  | ||||
|         # Handle None values for required fields | ||||
|         if value is None and name in getattr(self, '_fields', {}): | ||||
|             self._data[name] = value | ||||
|             if hasattr(self, '_changed_fields'): | ||||
|                 self._mark_as_changed(name) | ||||
|             return | ||||
|  | ||||
|         if not self._created and name in self._meta.get('shard_key', tuple()): | ||||
|             from queryset import OperationError | ||||
|             raise OperationError("Shard Keys are immutable. Tried to update %s" % name) | ||||
| @@ -909,8 +904,7 @@ class BaseDocument(object): | ||||
|                 errors[field.name] = ValidationError('Field is required', | ||||
|                                                      field_name=field.name) | ||||
|         if errors: | ||||
|             raise ValidationError('Errors encountered validating document', | ||||
|                                   errors=errors) | ||||
|             raise ValidationError('ValidationError', errors=errors) | ||||
|  | ||||
|     def to_mongo(self): | ||||
|         """Return data dictionary ready for use with MongoDB. | ||||
| @@ -947,8 +941,8 @@ class BaseDocument(object): | ||||
|         """ | ||||
|         # get the class name from the document, falling back to the given | ||||
|         # class if unavailable | ||||
|         class_name = son.get(u'_cls', cls._class_name) | ||||
|         data = dict((str(key), value) for key, value in son.items()) | ||||
|         class_name = son.get('_cls', cls._class_name) | ||||
|         data = dict(("%s" % key, value) for key, value in son.items()) | ||||
|  | ||||
|         if '_types' in data: | ||||
|             del data['_types'] | ||||
| @@ -961,11 +955,18 @@ class BaseDocument(object): | ||||
|             cls = get_document(class_name) | ||||
|  | ||||
|         changed_fields = [] | ||||
|         errors_dict = {} | ||||
|  | ||||
|         for field_name, field in cls._fields.items(): | ||||
|             if field.db_field in data: | ||||
|                 value = data[field.db_field] | ||||
|                 try: | ||||
|                     data[field_name] = (value if value is None | ||||
|                                     else field.to_python(value)) | ||||
|                     if field_name != field.db_field: | ||||
|                         del data[field.db_field] | ||||
|                 except (AttributeError, ValueError), e: | ||||
|                     errors_dict[field_name] = e | ||||
|             elif field.default: | ||||
|                 default = field.default | ||||
|                 if callable(default): | ||||
| @@ -973,7 +974,13 @@ class BaseDocument(object): | ||||
|                 if isinstance(default, BaseDocument): | ||||
|                     changed_fields.append(field_name) | ||||
|  | ||||
|         if errors_dict: | ||||
|             errors = "\n".join(["%s - %s" % (k, v) for k, v in errors_dict.items()]) | ||||
|             raise InvalidDocumentError(""" | ||||
| Invalid data to create a `%s` instance.\n%s""".strip() % (cls._class_name, errors)) | ||||
|  | ||||
|         obj = cls(**data) | ||||
|  | ||||
|         obj._changed_fields = changed_fields | ||||
|         obj._created = False | ||||
|         return obj | ||||
| @@ -1005,9 +1012,10 @@ class BaseDocument(object): | ||||
|             field_list.update(self._dynamic_fields) | ||||
|  | ||||
|         for field_name in field_list: | ||||
|  | ||||
|             db_field_name = self._db_field_map.get(field_name, field_name) | ||||
|             key = '%s.' % db_field_name | ||||
|             field = getattr(self, field_name, None) | ||||
|             field = self._data.get(field_name, None) | ||||
|             if hasattr(field, 'id'): | ||||
|                 if field.id in inspected: | ||||
|                     continue | ||||
| @@ -1044,13 +1052,16 @@ class BaseDocument(object): | ||||
|             for path in set_fields: | ||||
|                 parts = path.split('.') | ||||
|                 d = doc | ||||
|                 new_path = [] | ||||
|                 for p in parts: | ||||
|                     if hasattr(d, '__getattr__'): | ||||
|                         d = getattr(p, d) | ||||
|                     if isinstance(d, DBRef): | ||||
|                         break | ||||
|                     elif p.isdigit(): | ||||
|                         d = d[int(p)] | ||||
|                     else: | ||||
|                     elif hasattr(d, 'get'): | ||||
|                         d = d.get(p) | ||||
|                     new_path.append(p) | ||||
|                 path = '.'.join(new_path) | ||||
|                 set_data[path] = d | ||||
|         else: | ||||
|             set_data = doc | ||||
| @@ -1107,7 +1118,11 @@ class BaseDocument(object): | ||||
|         inspected = inspected or [] | ||||
|         geo_indices = [] | ||||
|         inspected.append(cls) | ||||
|  | ||||
|         from fields import EmbeddedDocumentField, GeoPointField | ||||
|         for field in cls._fields.values(): | ||||
|             if not isinstance(field, (EmbeddedDocumentField, GeoPointField)): | ||||
|                 continue | ||||
|             if hasattr(field, 'document_type'): | ||||
|                 field_cls = field.document_type | ||||
|                 if field_cls in inspected: | ||||
| @@ -1212,15 +1227,15 @@ class BaseList(list): | ||||
|     def __init__(self, list_items, instance, name): | ||||
|         self._instance = instance | ||||
|         self._name = name | ||||
|         super(BaseList, self).__init__(list_items) | ||||
|         return super(BaseList, self).__init__(list_items) | ||||
|  | ||||
|     def __setitem__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         super(BaseList, self).__setitem__(*args, **kwargs) | ||||
|         return super(BaseList, self).__setitem__(*args, **kwargs) | ||||
|  | ||||
|     def __delitem__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         super(BaseList, self).__delitem__(*args, **kwargs) | ||||
|         return super(BaseList, self).__delitem__(*args, **kwargs) | ||||
|  | ||||
|     def __getstate__(self): | ||||
|         self.observer = None | ||||
| @@ -1274,23 +1289,23 @@ class BaseDict(dict): | ||||
|     def __init__(self, dict_items, instance, name): | ||||
|         self._instance = instance | ||||
|         self._name = name | ||||
|         super(BaseDict, self).__init__(dict_items) | ||||
|         return super(BaseDict, self).__init__(dict_items) | ||||
|  | ||||
|     def __setitem__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         super(BaseDict, self).__setitem__(*args, **kwargs) | ||||
|         return super(BaseDict, self).__setitem__(*args, **kwargs) | ||||
|  | ||||
|     def __delete__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         super(BaseDict, self).__delete__(*args, **kwargs) | ||||
|         return super(BaseDict, self).__delete__(*args, **kwargs) | ||||
|  | ||||
|     def __delitem__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         super(BaseDict, self).__delitem__(*args, **kwargs) | ||||
|         return super(BaseDict, self).__delitem__(*args, **kwargs) | ||||
|  | ||||
|     def __delattr__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         super(BaseDict, self).__delattr__(*args, **kwargs) | ||||
|         return super(BaseDict, self).__delattr__(*args, **kwargs) | ||||
|  | ||||
|     def __getstate__(self): | ||||
|         self.instance = None | ||||
| @@ -1303,19 +1318,19 @@ class BaseDict(dict): | ||||
|  | ||||
|     def clear(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         super(BaseDict, self).clear(*args, **kwargs) | ||||
|         return super(BaseDict, self).clear(*args, **kwargs) | ||||
|  | ||||
|     def pop(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         super(BaseDict, self).pop(*args, **kwargs) | ||||
|         return super(BaseDict, self).pop(*args, **kwargs) | ||||
|  | ||||
|     def popitem(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         super(BaseDict, self).popitem(*args, **kwargs) | ||||
|         return super(BaseDict, self).popitem(*args, **kwargs) | ||||
|  | ||||
|     def update(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         super(BaseDict, self).update(*args, **kwargs) | ||||
|         return super(BaseDict, self).update(*args, **kwargs) | ||||
|  | ||||
|     def _mark_as_changed(self): | ||||
|         if hasattr(self._instance, '_mark_as_changed'): | ||||
|   | ||||
| @@ -63,7 +63,10 @@ def register_connection(alias, name, host='localhost', port=27017, | ||||
|             'password': uri_dict.get('password'), | ||||
|             'read_preference': read_preference, | ||||
|         }) | ||||
|         if "replicaSet" in host: | ||||
|             conn_settings['replicaSet'] = True | ||||
|  | ||||
|     conn_settings.update(kwargs) | ||||
|     _connection_settings[alias] = conn_settings | ||||
|  | ||||
|  | ||||
| @@ -112,7 +115,11 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||
|             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) | ||||
|             # Discard port since it can't be used on ReplicaSetConnection | ||||
|             conn_settings.pop('port', None) | ||||
|             # Discard replicaSet if not base string | ||||
|             if not isinstance(conn_settings['replicaSet'], basestring): | ||||
|                 conn_settings.pop('replicaSet', None) | ||||
|             connection_class = ReplicaSetConnection | ||||
|  | ||||
|         try: | ||||
|             _connections[alias] = connection_class(**conn_settings) | ||||
|         except Exception, e: | ||||
|   | ||||
| @@ -112,6 +112,10 @@ class DeReference(object): | ||||
|                     for ref in references: | ||||
|                         if '_cls' in ref: | ||||
|                             doc = get_document(ref["_cls"])._from_son(ref) | ||||
|                         elif doc_type is None: | ||||
|                             doc = get_document( | ||||
|                                 ''.join(x.capitalize() | ||||
|                                         for x in col.split('_')))._from_son(ref) | ||||
|                         else: | ||||
|                             doc = doc_type._from_son(ref) | ||||
|                         object_map[doc.id] = doc | ||||
| @@ -162,7 +166,7 @@ class DeReference(object): | ||||
|             else: | ||||
|                 data[k] = v | ||||
|  | ||||
|             if k in self.object_map: | ||||
|             if k in self.object_map and not is_list: | ||||
|                 data[k] = self.object_map[k] | ||||
|             elif hasattr(v, '_fields'): | ||||
|                 for field_name, field in v._fields.iteritems(): | ||||
|   | ||||
| @@ -66,6 +66,7 @@ class User(Document): | ||||
|                                 verbose_name=_('date joined')) | ||||
|  | ||||
|     meta = { | ||||
|         'allow_inheritance': True, | ||||
|         'indexes': [ | ||||
|             {'fields': ['username'], 'unique': True} | ||||
|         ] | ||||
|   | ||||
| @@ -55,7 +55,7 @@ class SessionStore(SessionBase): | ||||
|  | ||||
|     def save(self, must_create=False): | ||||
|         if self.session_key is None: | ||||
|             self.create() | ||||
|             self._session_key = self._get_new_session_key() | ||||
|         s = MongoSession(session_key=self.session_key) | ||||
|         s.session_data = self.encode(self._get_session(no_load=must_create)) | ||||
|         s.expire_date = self.get_expiry_date() | ||||
|   | ||||
| @@ -10,7 +10,7 @@ class MongoTestCase(TestCase): | ||||
|     """ | ||||
|     db_name = 'test_%s' % settings.MONGO_DATABASE_NAME | ||||
|     def __init__(self, methodName='runtest'): | ||||
|         self.db = connect(self.db_name) | ||||
|         self.db = connect(self.db_name).get_db() | ||||
|         super(MongoTestCase, self).__init__(methodName) | ||||
|  | ||||
|     def _post_teardown(self): | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| import pymongo | ||||
|  | ||||
| from bson.dbref import DBRef | ||||
|  | ||||
| from mongoengine import signals | ||||
| @@ -39,6 +40,11 @@ class EmbeddedDocument(BaseDocument): | ||||
|         else: | ||||
|             super(EmbeddedDocument, self).__delattr__(*args, **kwargs) | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         if isinstance(other, self.__class__): | ||||
|             return self._data == other._data | ||||
|         return False | ||||
|  | ||||
|  | ||||
| class Document(BaseDocument): | ||||
|     """The base class used for defining the structure and properties of | ||||
| @@ -74,8 +80,14 @@ class Document(BaseDocument): | ||||
|     names. Index direction may be specified by prefixing the field names with | ||||
|     a **+** or **-** sign. | ||||
|  | ||||
|     Automatic index creation can be disabled by specifying | ||||
|     attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to | ||||
|     False then indexes will not be created by MongoEngine.  This is useful in | ||||
|     production systems where index creation is performed as part of a deployment | ||||
|     system. | ||||
|  | ||||
|     By default, _types will be added to the start of every index (that | ||||
|     doesn't contain a list) if allow_inheritence is True. This can be | ||||
|     doesn't contain a list) if allow_inheritance is True. This can be | ||||
|     disabled by either setting types to False on the specific index or | ||||
|     by setting index_types to False on the meta dictionary for the document. | ||||
|     """ | ||||
| @@ -147,8 +159,9 @@ class Document(BaseDocument): | ||||
|                 :meth:`~pymongo.collection.Collection.save` OR | ||||
|                 :meth:`~pymongo.collection.Collection.insert` | ||||
|                 which will be used as options for the resultant ``getLastError`` command. | ||||
|                 For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers | ||||
|                 have recorded the write and will force an fsync on each server being written to. | ||||
|                 For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will | ||||
|                 wait until at least two servers have recorded the write and will force an | ||||
|                 fsync on each server being written to. | ||||
|         :param cascade: Sets the flag for cascading saves.  You can set a default by setting | ||||
|             "cascade" in the document __meta__ | ||||
|         :param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves | ||||
| @@ -213,6 +226,7 @@ class Document(BaseDocument): | ||||
|                 if cascade_kwargs:  # Allow granular control over cascades | ||||
|                     kwargs.update(cascade_kwargs) | ||||
|                 kwargs['_refs'] = _refs | ||||
|                 #self._changed_fields = [] | ||||
|                 self.cascade_save(**kwargs) | ||||
|  | ||||
|         except pymongo.errors.OperationFailure, err: | ||||
| @@ -226,17 +240,24 @@ class Document(BaseDocument): | ||||
|         self._changed_fields = [] | ||||
|         self._created = False | ||||
|         signals.post_save.send(self.__class__, document=self, created=created) | ||||
|         return self | ||||
|  | ||||
|     def cascade_save(self, *args, **kwargs): | ||||
|         """Recursively saves any references / generic references on an object""" | ||||
|         from fields import ReferenceField, GenericReferenceField | ||||
|         _refs = kwargs.get('_refs', []) or [] | ||||
|  | ||||
|         for name, cls in self._fields.items(): | ||||
|  | ||||
|             if not isinstance(cls, (ReferenceField, GenericReferenceField)): | ||||
|                 continue | ||||
|  | ||||
|             ref = getattr(self, name) | ||||
|             if not ref: | ||||
|                 continue | ||||
|             if isinstance(ref, DBRef): | ||||
|                 continue | ||||
|  | ||||
|             ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) | ||||
|             if ref and ref_id not in _refs: | ||||
|                 _refs.append(ref_id) | ||||
| @@ -351,10 +372,10 @@ class DynamicDocument(Document): | ||||
|     way as an ordinary document but has expando style properties.  Any data | ||||
|     passed or set against the :class:`~mongoengine.DynamicDocument` that is | ||||
|     not a field is automatically converted into a | ||||
|     :class:`~mongoengine.BaseDynamicField` and data can be attributed to that | ||||
|     :class:`~mongoengine.DynamicField` and data can be attributed to that | ||||
|     field. | ||||
|  | ||||
|     ..note:: | ||||
|     .. note:: | ||||
|  | ||||
|         There is one caveat on Dynamic Documents: fields cannot start with `_` | ||||
|     """ | ||||
|   | ||||
| @@ -4,9 +4,9 @@ import decimal | ||||
| import gridfs | ||||
| import re | ||||
| import uuid | ||||
| import warnings | ||||
|  | ||||
| from bson import Binary, DBRef, SON, ObjectId | ||||
|  | ||||
| from base import (BaseField, ComplexBaseField, ObjectIdField, | ||||
|                   ValidationError, get_document, BaseDocument) | ||||
| from queryset import DO_NOTHING, QuerySet | ||||
| @@ -30,7 +30,7 @@ except ImportError: | ||||
| __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', | ||||
|            'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', | ||||
|            'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', | ||||
|            'DecimalField', 'ComplexDateTimeField', 'URLField', | ||||
|            'DecimalField', 'ComplexDateTimeField', 'URLField', 'DynamicField', | ||||
|            'GenericReferenceField', 'FileField', 'BinaryField', | ||||
|            'SortedListField', 'EmailField', 'GeoPointField', 'ImageField', | ||||
|            'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField'] | ||||
| @@ -49,10 +49,13 @@ class StringField(BaseField): | ||||
|         super(StringField, self).__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         return unicode(value) | ||||
|         if isinstance(value, unicode): | ||||
|             return value | ||||
|         else: | ||||
|             return value.decode('utf-8') | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, (str, unicode)): | ||||
|         if not isinstance(value, basestring): | ||||
|             self.error('StringField only accepts string values') | ||||
|  | ||||
|         if self.max_length is not None and len(value) > self.max_length: | ||||
| @@ -164,6 +167,9 @@ class IntField(BaseField): | ||||
|             self.error('Integer value is too large') | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return value | ||||
|  | ||||
|         return int(value) | ||||
|  | ||||
|  | ||||
| @@ -182,7 +188,7 @@ class FloatField(BaseField): | ||||
|         if isinstance(value, int): | ||||
|             value = float(value) | ||||
|         if not isinstance(value, float): | ||||
|             self.error('FoatField only accepts float values') | ||||
|             self.error('FloatField only accepts float values') | ||||
|  | ||||
|         if self.min_value is not None and value < self.min_value: | ||||
|             self.error('Float value is too small') | ||||
| @@ -191,6 +197,9 @@ class FloatField(BaseField): | ||||
|             self.error('Float value is too large') | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return value | ||||
|  | ||||
|         return float(value) | ||||
|  | ||||
|  | ||||
| @@ -369,7 +378,7 @@ class ComplexDateTimeField(StringField): | ||||
|         return self._convert_from_string(data) | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         value = self._convert_from_datetime(value) | ||||
|         value = self._convert_from_datetime(value) if value else value | ||||
|         return super(ComplexDateTimeField, self).__set__(instance, value) | ||||
|  | ||||
|     def validate(self, value): | ||||
| @@ -441,6 +450,9 @@ class GenericEmbeddedDocumentField(BaseField): | ||||
|     :class:`~mongoengine.EmbeddedDocument` to be stored. | ||||
|  | ||||
|     Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`. | ||||
|  | ||||
|     .. note:: You can use the choices param to limit the acceptable | ||||
|     EmbeddedDocument types | ||||
|     """ | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
| @@ -470,10 +482,56 @@ class GenericEmbeddedDocumentField(BaseField): | ||||
|         return data | ||||
|  | ||||
|  | ||||
| class DynamicField(BaseField): | ||||
|     """A truly dynamic field type capable of handling different and varying | ||||
|     types of data. | ||||
|  | ||||
|     Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data""" | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         """Convert a Python type to a MongoDBcompatible type. | ||||
|         """ | ||||
|  | ||||
|         if isinstance(value, basestring): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, 'to_mongo'): | ||||
|             return value.to_mongo() | ||||
|  | ||||
|         if not isinstance(value, (dict, list, tuple)): | ||||
|             return value | ||||
|  | ||||
|         is_list = False | ||||
|         if not hasattr(value, 'items'): | ||||
|             is_list = True | ||||
|             value = dict([(k, v) for k, v in enumerate(value)]) | ||||
|  | ||||
|         data = {} | ||||
|         for k, v in value.items(): | ||||
|             data[k] = self.to_mongo(v) | ||||
|  | ||||
|         if is_list:  # Convert back to a list | ||||
|             value = [v for k, v in sorted(data.items(), key=itemgetter(0))] | ||||
|         else: | ||||
|             value = data | ||||
|         return value | ||||
|  | ||||
|     def lookup_member(self, member_name): | ||||
|         return member_name | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if isinstance(value, basestring): | ||||
|             from mongoengine.fields import StringField | ||||
|             return StringField().prepare_query_value(op, value) | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|  | ||||
| class ListField(ComplexBaseField): | ||||
|     """A list field that wraps a standard field, allowing multiple instances | ||||
|     of the field to be used as a list in the database. | ||||
|  | ||||
|     If using with ReferenceFields see: :ref:`one-to-many-with-listfields` | ||||
|  | ||||
|     .. note:: | ||||
|         Required means it cannot be empty - as the default for ListFields is [] | ||||
|     """ | ||||
| @@ -612,6 +670,18 @@ class ReferenceField(BaseField): | ||||
|       * NULLIFY     - Updates the reference to null. | ||||
|       * CASCADE     - Deletes the documents associated with the reference. | ||||
|       * DENY        - Prevent the deletion of the reference object. | ||||
|       * PULL        - Pull the reference from a :class:`~mongoengine.ListField` of references | ||||
|  | ||||
|     Alternative syntax for registering delete rules (useful when implementing | ||||
|     bi-directional delete rules) | ||||
|  | ||||
|     .. code-block:: python | ||||
|  | ||||
|         class Bar(Document): | ||||
|             content = StringField() | ||||
|             foo = ReferenceField('Foo') | ||||
|  | ||||
|         Bar.register_delete_rule(Foo, 'bar', NULLIFY) | ||||
|  | ||||
|     .. versionchanged:: 0.5 added `reverse_delete_rule` | ||||
|     """ | ||||
| @@ -698,9 +768,11 @@ class GenericReferenceField(BaseField): | ||||
|     """A reference to *any* :class:`~mongoengine.document.Document` subclass | ||||
|     that will be automatically dereferenced on access (lazily). | ||||
|  | ||||
|     ..note ::  Any documents used as a generic reference must be registered in the | ||||
|     .. note:: Any documents used as a generic reference must be registered in the | ||||
|     document registry.  Importing the model will automatically register it. | ||||
|  | ||||
|     .. note:: You can use the choices param to limit the acceptable Document types | ||||
|  | ||||
|     .. versionadded:: 0.3 | ||||
|     """ | ||||
|  | ||||
| @@ -735,6 +807,9 @@ class GenericReferenceField(BaseField): | ||||
|         if document is None: | ||||
|             return None | ||||
|  | ||||
|         if isinstance(document, (dict, SON)): | ||||
|             return document | ||||
|  | ||||
|         id_field_name = document.__class__._meta['id_field'] | ||||
|         id_field = document.__class__._fields[id_field_name] | ||||
|  | ||||
| @@ -770,13 +845,9 @@ class BinaryField(BaseField): | ||||
|     def to_mongo(self, value): | ||||
|         return Binary(value) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         # Returns str not unicode as this is binary data | ||||
|         return str(value) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, str): | ||||
|             self.error('BinaryField only accepts string values') | ||||
|         if not isinstance(value, (basestring, Binary)): | ||||
|             self.error('BinaryField only accepts string or bson Binary values') | ||||
|  | ||||
|         if self.max_bytes is not None and len(value) > self.max_bytes: | ||||
|             self.error('Binary value is too long') | ||||
| @@ -829,6 +900,15 @@ class GridFSProxy(object): | ||||
|         self_dict['_fs'] = None | ||||
|         return self_dict | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return '<%s: %s>' % (self.__class__.__name__, self.grid_id) | ||||
|  | ||||
|     def __cmp__(self, other): | ||||
|         if not isinstance(other, GridFSProxy): | ||||
|             return -1 | ||||
|         return cmp((self.grid_id, self.collection_name, self.db_alias), | ||||
|                    (other.grid_id, other.collection_name, other.db_alias)) | ||||
|  | ||||
|     @property | ||||
|     def fs(self): | ||||
|         if not self._fs: | ||||
| @@ -875,10 +955,14 @@ class GridFSProxy(object): | ||||
|         self.newfile.writelines(lines) | ||||
|  | ||||
|     def read(self, size=-1): | ||||
|         try: | ||||
|             return self.get().read(size) | ||||
|         except: | ||||
|         gridout = self.get() | ||||
|         if gridout is None: | ||||
|             return None | ||||
|         else: | ||||
|             try: | ||||
|                 return gridout.read(size) | ||||
|             except: | ||||
|                 return "" | ||||
|  | ||||
|     def delete(self): | ||||
|         # Delete file from GridFS, FileField still remains | ||||
| @@ -923,19 +1007,20 @@ class FileField(BaseField): | ||||
|  | ||||
|         # Check if a file already exists for this model | ||||
|         grid_file = instance._data.get(self.name) | ||||
|         self.grid_file = grid_file | ||||
|         if isinstance(self.grid_file, self.proxy_class): | ||||
|             if not self.grid_file.key: | ||||
|                 self.grid_file.key = self.name | ||||
|                 self.grid_file.instance = instance | ||||
|             return self.grid_file | ||||
|         return self.proxy_class(key=self.name, instance=instance, | ||||
|         if not isinstance(grid_file, self.proxy_class): | ||||
|             grid_file = self.proxy_class(key=self.name, instance=instance, | ||||
|                                          db_alias=self.db_alias, | ||||
|                                          collection_name=self.collection_name) | ||||
|             instance._data[self.name] = grid_file | ||||
|  | ||||
|         if not grid_file.key: | ||||
|             grid_file.key = self.name | ||||
|             grid_file.instance = instance | ||||
|         return grid_file | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         key = self.name | ||||
|         if isinstance(value, file) or isinstance(value, str): | ||||
|         if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, basestring): | ||||
|             # using "FileField() = file/string" notation | ||||
|             grid_file = instance._data.get(self.name) | ||||
|             # If a file already exists, delete it | ||||
| @@ -1203,7 +1288,7 @@ class SequenceField(IntField): | ||||
|             instance._data[self.name] = value | ||||
|             instance._mark_as_changed(self.name) | ||||
|  | ||||
|         return value | ||||
|         return int(value) if value else None | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|  | ||||
| @@ -1223,17 +1308,40 @@ class UUIDField(BaseField): | ||||
|  | ||||
|     .. versionadded:: 0.6 | ||||
|     """ | ||||
|     _binary = None | ||||
|  | ||||
|     def __init__(self, **kwargs): | ||||
|     def __init__(self, binary=None, **kwargs): | ||||
|         """ | ||||
|         Store UUID data in the database | ||||
|  | ||||
|         :param binary: (optional) boolean store as binary. | ||||
|  | ||||
|         .. versionchanged:: 0.6.19 | ||||
|         """ | ||||
|         if binary is None: | ||||
|             binary = False | ||||
|             msg = ("UUIDFields will soon default to store as binary, please " | ||||
|                   "configure binary=False if you wish to store as a string") | ||||
|             warnings.warn(msg, FutureWarning) | ||||
|         self._binary = binary | ||||
|         super(UUIDField, self).__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if not self.binary: | ||||
|             if not isinstance(value, basestring): | ||||
|                 value = unicode(value) | ||||
|             return uuid.UUID(value) | ||||
|         return value | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if not self._binary: | ||||
|             return unicode(value) | ||||
|         return value | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return None | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, uuid.UUID): | ||||
|   | ||||
| @@ -4,13 +4,15 @@ import copy | ||||
| import itertools | ||||
| import operator | ||||
|  | ||||
| from functools import partial | ||||
|  | ||||
| import pymongo | ||||
| from bson.code import Code | ||||
|  | ||||
| from mongoengine import signals | ||||
|  | ||||
| __all__ = ['queryset_manager', 'Q', 'InvalidQueryError', | ||||
|            'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY'] | ||||
|            'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL'] | ||||
|  | ||||
|  | ||||
| # The maximum number of items to display in a QuerySet.__repr__ | ||||
| @@ -21,6 +23,7 @@ DO_NOTHING = 0 | ||||
| NULLIFY = 1 | ||||
| CASCADE = 2 | ||||
| DENY = 3 | ||||
| PULL = 4 | ||||
|  | ||||
|  | ||||
| class DoesNotExist(Exception): | ||||
| @@ -340,6 +343,7 @@ class QuerySet(object): | ||||
|         self._timeout = True | ||||
|         self._class_check = True | ||||
|         self._slave_okay = False | ||||
|         self._iter = False | ||||
|         self._scalar = [] | ||||
|  | ||||
|         # If inheritance is allowed, only return instances and instances of | ||||
| @@ -394,61 +398,6 @@ class QuerySet(object): | ||||
|             unique=index_spec.get('unique', False)) | ||||
|         return self | ||||
|  | ||||
|     @classmethod | ||||
|     def _build_index_spec(cls, doc_cls, spec): | ||||
|         """Build a PyMongo index spec from a MongoEngine index spec. | ||||
|         """ | ||||
|         if isinstance(spec, basestring): | ||||
|             spec = {'fields': [spec]} | ||||
|         if isinstance(spec, (list, tuple)): | ||||
|             spec = {'fields': spec} | ||||
|  | ||||
|         index_list = [] | ||||
|         use_types = doc_cls._meta.get('allow_inheritance', True) | ||||
|         for key in spec['fields']: | ||||
|             # Get ASCENDING direction from +, DESCENDING from -, and GEO2D from * | ||||
|             direction = pymongo.ASCENDING | ||||
|             if key.startswith("-"): | ||||
|                 direction = pymongo.DESCENDING | ||||
|             elif key.startswith("*"): | ||||
|                 direction = pymongo.GEO2D | ||||
|             if key.startswith(("+", "-", "*")): | ||||
|                 key = key[1:] | ||||
|  | ||||
|             # Use real field name, do it manually because we need field | ||||
|             # objects for the next part (list field checking) | ||||
|             parts = key.split('.') | ||||
|             fields = QuerySet._lookup_field(doc_cls, parts) | ||||
|             parts = [field.db_field for field in fields] | ||||
|             key = '.'.join(parts) | ||||
|             index_list.append((key, direction)) | ||||
|  | ||||
|             # Check if a list field is being used, don't use _types if it is | ||||
|             if use_types and not all(f._index_with_types for f in fields): | ||||
|                 use_types = False | ||||
|  | ||||
|         # If _types is being used, prepend it to every specified index | ||||
|         index_types = doc_cls._meta.get('index_types', True) | ||||
|         allow_inheritance = doc_cls._meta.get('allow_inheritance') | ||||
|         if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D: | ||||
|             index_list.insert(0, ('_types', 1)) | ||||
|  | ||||
|         spec['fields'] = index_list | ||||
|  | ||||
|         if spec.get('sparse', False) and len(spec['fields']) > 1: | ||||
|             raise ValueError( | ||||
|                 'Sparse indexes can only have one field in them. ' | ||||
|                 'See https://jira.mongodb.org/browse/SERVER-2193') | ||||
|  | ||||
|         return spec | ||||
|  | ||||
|     @classmethod | ||||
|     def _reset_already_indexed(cls, document=None): | ||||
|         """Helper to reset already indexed, can be useful for testing purposes""" | ||||
|         if document: | ||||
|             cls.__already_indexed.discard(document) | ||||
|         cls.__already_indexed.clear() | ||||
|  | ||||
|     def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query): | ||||
|         """Filter the selected documents by calling the | ||||
|         :class:`~mongoengine.queryset.QuerySet` with a query. | ||||
| @@ -481,24 +430,15 @@ class QuerySet(object): | ||||
|         """Returns all documents.""" | ||||
|         return self.__call__() | ||||
|  | ||||
|     @property | ||||
|     def _collection(self): | ||||
|         """Property that returns the collection object. This allows us to | ||||
|         perform operations only if the collection is accessed. | ||||
|     def _ensure_indexes(self): | ||||
|         """Checks the document meta data and ensures all the indexes exist. | ||||
|  | ||||
|         .. note:: You can disable automatic index creation by setting | ||||
|                   `auto_create_index` to False in the documents meta data | ||||
|         """ | ||||
|         if self._document not in QuerySet.__already_indexed: | ||||
|  | ||||
|             # Ensure collection exists | ||||
|             db = self._document._get_db() | ||||
|             if self._collection_obj.name not in db.collection_names(): | ||||
|                 self._document._collection = None | ||||
|                 self._collection_obj = self._document._get_collection() | ||||
|  | ||||
|             QuerySet.__already_indexed.add(self._document) | ||||
|  | ||||
|         background = self._document._meta.get('index_background', False) | ||||
|         drop_dups = self._document._meta.get('index_drop_dups', False) | ||||
|             index_opts = self._document._meta.get('index_options', {}) | ||||
|         index_opts = self._document._meta.get('index_opts', {}) | ||||
|         index_types = self._document._meta.get('index_types', True) | ||||
|  | ||||
|         # determine if an index which we are creating includes | ||||
| @@ -543,6 +483,86 @@ class QuerySet(object): | ||||
|             self._collection.ensure_index(index_spec, | ||||
|                 background=background, **index_opts) | ||||
|  | ||||
|     @classmethod | ||||
|     def _build_index_spec(cls, doc_cls, spec): | ||||
|         """Build a PyMongo index spec from a MongoEngine index spec. | ||||
|         """ | ||||
|         if isinstance(spec, basestring): | ||||
|             spec = {'fields': [spec]} | ||||
|         if isinstance(spec, (list, tuple)): | ||||
|             spec = {'fields': spec} | ||||
|  | ||||
|         index_list = [] | ||||
|         direction = None | ||||
|         use_types = doc_cls._meta.get('allow_inheritance', True) | ||||
|         for key in spec['fields']: | ||||
|             # Get ASCENDING direction from +, DESCENDING from -, and GEO2D from * | ||||
|             direction = pymongo.ASCENDING | ||||
|             if key.startswith("-"): | ||||
|                 direction = pymongo.DESCENDING | ||||
|             elif key.startswith("*"): | ||||
|                 direction = pymongo.GEO2D | ||||
|             if key.startswith(("+", "-", "*")): | ||||
|                 key = key[1:] | ||||
|  | ||||
|             # Use real field name, do it manually because we need field | ||||
|             # objects for the next part (list field checking) | ||||
|             parts = key.split('.') | ||||
|             if parts in (['pk'], ['id'], ['_id']): | ||||
|                 key = '_id' | ||||
|             else: | ||||
|                 fields = QuerySet._lookup_field(doc_cls, parts) | ||||
|                 parts = [field if field == '_id' else field.db_field for field in fields] | ||||
|                 key = '.'.join(parts) | ||||
|             index_list.append((key, direction)) | ||||
|  | ||||
|             # If sparse - dont include types | ||||
|             if spec.get('sparse', False): | ||||
|                 use_types = False | ||||
|  | ||||
|             # Check if a list field is being used, don't use _types if it is | ||||
|             if use_types and not all(f._index_with_types for f in fields): | ||||
|                 use_types = False | ||||
|  | ||||
|         # If _types is being used, prepend it to every specified index | ||||
|         index_types = doc_cls._meta.get('index_types', True) | ||||
|         allow_inheritance = doc_cls._meta.get('allow_inheritance') | ||||
|         if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D: | ||||
|             index_list.insert(0, ('_types', 1)) | ||||
|  | ||||
|         spec['fields'] = index_list | ||||
|         if spec.get('sparse', False) and len(spec['fields']) > 1: | ||||
|             raise ValueError( | ||||
|                 'Sparse indexes can only have one field in them. ' | ||||
|                 'See https://jira.mongodb.org/browse/SERVER-2193') | ||||
|  | ||||
|         return spec | ||||
|  | ||||
|     @classmethod | ||||
|     def _reset_already_indexed(cls, document=None): | ||||
|         """Helper to reset already indexed, can be useful for testing purposes""" | ||||
|         if document: | ||||
|             cls.__already_indexed.discard(document) | ||||
|         cls.__already_indexed.clear() | ||||
|  | ||||
|  | ||||
|     @property | ||||
|     def _collection(self): | ||||
|         """Property that returns the collection object. This allows us to | ||||
|         perform operations only if the collection is accessed. | ||||
|         """ | ||||
|         if self._document not in QuerySet.__already_indexed: | ||||
|             # Ensure collection exists | ||||
|             db = self._document._get_db() | ||||
|             if self._collection_obj.name not in db.collection_names(): | ||||
|                 self._document._collection = None | ||||
|                 self._collection_obj = self._document._get_collection() | ||||
|  | ||||
|             QuerySet.__already_indexed.add(self._document) | ||||
|  | ||||
|             if self._document._meta.get('auto_create_index', True): | ||||
|                 self._ensure_indexes() | ||||
|  | ||||
|         return self._collection_obj | ||||
|  | ||||
|     @property | ||||
| @@ -603,6 +623,7 @@ class QuerySet(object): | ||||
|                         "Can't use index on unsubscriptable field (%s)" % err) | ||||
|                 fields.append(field_name) | ||||
|                 continue | ||||
|  | ||||
|             if field is None: | ||||
|                 # Look up first field from the document | ||||
|                 if field_name == 'pk': | ||||
| @@ -611,8 +632,8 @@ class QuerySet(object): | ||||
|                 if field_name in document._fields: | ||||
|                     field = document._fields[field_name] | ||||
|                 elif document._dynamic: | ||||
|                     from base import BaseDynamicField | ||||
|                     field = BaseDynamicField(db_field=field_name) | ||||
|                     from fields import DynamicField | ||||
|                     field = DynamicField(db_field=field_name) | ||||
|                 else: | ||||
|                     raise InvalidQueryError('Cannot resolve field "%s"' | ||||
|                                                 % field_name) | ||||
| @@ -620,6 +641,9 @@ class QuerySet(object): | ||||
|                 from mongoengine.fields import ReferenceField, GenericReferenceField | ||||
|                 if isinstance(field, (ReferenceField, GenericReferenceField)): | ||||
|                     raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts)) | ||||
|                 if hasattr(getattr(field, 'field', None), 'lookup_member'): | ||||
|                     new_field = field.field.lookup_member(field_name) | ||||
|                 else: | ||||
|                    # Look up subfield on the previous field | ||||
|                     new_field = field.lookup_member(field_name) | ||||
|                 from base import ComplexBaseField | ||||
| @@ -680,7 +704,7 @@ class QuerySet(object): | ||||
|                 cleaned_fields = [] | ||||
|                 for field in fields: | ||||
|                     append_field = True | ||||
|                     if isinstance(field, str): | ||||
|                     if isinstance(field, basestring): | ||||
|                         parts.append(field) | ||||
|                         append_field = False | ||||
|                     else: | ||||
| @@ -781,15 +805,19 @@ class QuerySet(object): | ||||
|         dictionary of default values for the new document may be provided as a | ||||
|         keyword argument called :attr:`defaults`. | ||||
|  | ||||
|         .. note:: This requires two separate operations and therefore a | ||||
|             race condition exists.  Because there are no transactions in mongoDB | ||||
|             other approaches should be investigated, to ensure you don't | ||||
|             accidently duplicate data when using this method. | ||||
|  | ||||
|         :param write_options: optional extra keyword arguments used if we | ||||
|             have to create a new document. | ||||
|             Passes any write_options onto :meth:`~mongoengine.Document.save` | ||||
|  | ||||
|         .. versionadded:: 0.3 | ||||
|  | ||||
|         :param auto_save: if the object is to be saved automatically if not found. | ||||
|  | ||||
|         .. versionadded:: 0.6 | ||||
|         .. versionchanged:: 0.6 - added `auto_save` | ||||
|         .. versionadded:: 0.3 | ||||
|         """ | ||||
|         defaults = query.get('defaults', {}) | ||||
|         if 'defaults' in query: | ||||
| @@ -824,11 +852,21 @@ class QuerySet(object): | ||||
|             result = None | ||||
|         return result | ||||
|  | ||||
|     def insert(self, doc_or_docs, load_bulk=True): | ||||
|     def insert(self, doc_or_docs, load_bulk=True, safe=False, write_options=None): | ||||
|         """bulk insert documents | ||||
|  | ||||
|         If ``safe=True`` and the operation is unsuccessful, an | ||||
|         :class:`~mongoengine.OperationError` will be raised. | ||||
|  | ||||
|         :param docs_or_doc: a document or list of documents to be inserted | ||||
|         :param load_bulk (optional): If True returns the list of document instances | ||||
|         :param safe: check if the operation succeeded before returning | ||||
|         :param write_options: Extra keyword arguments are passed down to | ||||
|                 :meth:`~pymongo.collection.Collection.insert` | ||||
|                 which will be used as options for the resultant ``getLastError`` command. | ||||
|                 For example, ``insert(..., {w: 2, fsync: True})`` will wait until at least two | ||||
|                 servers have recorded the write and will force an fsync on each server being | ||||
|                 written to. | ||||
|  | ||||
|         By default returns document instances, set ``load_bulk`` to False to | ||||
|         return just ``ObjectIds`` | ||||
| @@ -837,6 +875,10 @@ class QuerySet(object): | ||||
|         """ | ||||
|         from document import Document | ||||
|  | ||||
|         if not write_options: | ||||
|             write_options = {} | ||||
|         write_options.update({'safe': safe}) | ||||
|  | ||||
|         docs = doc_or_docs | ||||
|         return_one = False | ||||
|         if isinstance(docs, Document) or issubclass(docs.__class__, Document): | ||||
| @@ -854,7 +896,13 @@ class QuerySet(object): | ||||
|             raw.append(doc.to_mongo()) | ||||
|  | ||||
|         signals.pre_bulk_insert.send(self._document, documents=docs) | ||||
|         ids = self._collection.insert(raw) | ||||
|         try: | ||||
|             ids = self._collection.insert(raw, **write_options) | ||||
|         except pymongo.errors.OperationFailure, err: | ||||
|             message = 'Could not save document (%s)' | ||||
|             if u'duplicate key' in unicode(err): | ||||
|                 message = u'Tried to save duplicate unique keys (%s)' | ||||
|             raise OperationError(message % unicode(err)) | ||||
|  | ||||
|         if not load_bulk: | ||||
|             signals.post_bulk_insert.send( | ||||
| @@ -907,6 +955,7 @@ class QuerySet(object): | ||||
|     def next(self): | ||||
|         """Wrap the result in a :class:`~mongoengine.Document` object. | ||||
|         """ | ||||
|         self._iter = True | ||||
|         try: | ||||
|             if self._limit == 0: | ||||
|                 raise StopIteration | ||||
| @@ -923,6 +972,7 @@ class QuerySet(object): | ||||
|  | ||||
|         .. versionadded:: 0.3 | ||||
|         """ | ||||
|         self._iter = False | ||||
|         self._cursor.rewind() | ||||
|  | ||||
|     def count(self): | ||||
| @@ -1273,11 +1323,17 @@ class QuerySet(object): | ||||
|             document_cls, field_name = rule_entry | ||||
|             rule = doc._meta['delete_rules'][rule_entry] | ||||
|             if rule == CASCADE: | ||||
|                 document_cls.objects(**{field_name + '__in': self}).delete(safe=safe) | ||||
|                 ref_q = document_cls.objects(**{field_name + '__in': self}) | ||||
|                 if doc != document_cls or (doc == document_cls and ref_q.count() > 0): | ||||
|                     ref_q.delete(safe=safe) | ||||
|             elif rule == NULLIFY: | ||||
|                 document_cls.objects(**{field_name + '__in': self}).update( | ||||
|                         safe_update=safe, | ||||
|                         **{'unset__%s' % field_name: 1}) | ||||
|             elif rule == PULL: | ||||
|                 document_cls.objects(**{field_name + '__in': self}).update( | ||||
|                         safe_update=safe, | ||||
|                         **{'pull_all__%s' % field_name: self}) | ||||
|  | ||||
|         self._collection.remove(self._query, safe=safe) | ||||
|  | ||||
| @@ -1318,7 +1374,7 @@ class QuerySet(object): | ||||
|                 cleaned_fields = [] | ||||
|                 for field in fields: | ||||
|                     append_field = True | ||||
|                     if isinstance(field, str): | ||||
|                     if isinstance(field, basestring): | ||||
|                         # Convert the S operator to $ | ||||
|                         if field == 'S': | ||||
|                             field = '$' | ||||
| @@ -1332,18 +1388,34 @@ class QuerySet(object): | ||||
|                 # Convert value to proper value | ||||
|                 field = cleaned_fields[-1] | ||||
|  | ||||
|                 if op in (None, 'set', 'push', 'pull', 'addToSet'): | ||||
|                 if op in (None, 'set', 'push', 'pull'): | ||||
|                     if field.required or value is not None: | ||||
|                         value = field.prepare_query_value(op, value) | ||||
|                 elif op in ('pushAll', 'pullAll'): | ||||
|                     value = [field.prepare_query_value(op, v) for v in value] | ||||
|                 elif op == 'addToSet': | ||||
|                     if isinstance(value, (list, tuple, set)): | ||||
|                         value = [field.prepare_query_value(op, v) for v in value] | ||||
|                     elif field.required or value is not None: | ||||
|                         value = field.prepare_query_value(op, value) | ||||
|  | ||||
|             key = '.'.join(parts) | ||||
|  | ||||
|             if not op: | ||||
|                 raise InvalidQueryError("Updates must supply an operation eg: set__FIELD=value") | ||||
|  | ||||
|             if op: | ||||
|             if 'pull' in op and '.' in key: | ||||
|                 # Dot operators don't work on pull operations | ||||
|                 # it uses nested dict syntax | ||||
|                 if op == 'pullAll': | ||||
|                     raise InvalidQueryError("pullAll operations only support a single field depth") | ||||
|  | ||||
|                 parts.reverse() | ||||
|                 for key in parts: | ||||
|                     value = {key: value} | ||||
|             elif op == 'addToSet' and isinstance(value, list): | ||||
|                 value = {key: {"$each": value}} | ||||
|             else: | ||||
|                 value = {key: value} | ||||
|             key = '$' + op | ||||
|  | ||||
| @@ -1435,8 +1507,6 @@ class QuerySet(object): | ||||
|         def lookup(obj, name): | ||||
|             chunks = name.split('__') | ||||
|             for chunk in chunks: | ||||
|                 if hasattr(obj, '_db_field_map'): | ||||
|                     chunk = obj._db_field_map.get(chunk, chunk) | ||||
|                 obj = getattr(obj, chunk) | ||||
|             return obj | ||||
|  | ||||
| @@ -1648,10 +1718,11 @@ class QuerySet(object): | ||||
|     def _item_frequencies_map_reduce(self, field, normalize=False): | ||||
|         map_func = """ | ||||
|             function() { | ||||
|                 path = '{{~%(field)s}}'.split('.'); | ||||
|                 field = this; | ||||
|                 var path = '{{~%(field)s}}'.split('.'); | ||||
|                 var field = this; | ||||
|  | ||||
|                 for (p in path) { | ||||
|                     if (field) | ||||
|                     if (typeof field != 'undefined') | ||||
|                        field = field[path[p]]; | ||||
|                     else | ||||
|                        break; | ||||
| @@ -1660,7 +1731,7 @@ class QuerySet(object): | ||||
|                     field.forEach(function(item) { | ||||
|                         emit(item, 1); | ||||
|                     }); | ||||
|                 } else if (field) { | ||||
|                 } else if (typeof field != 'undefined') { | ||||
|                     emit(field, 1); | ||||
|                 } else { | ||||
|                     emit(null, 1); | ||||
| @@ -1684,12 +1755,12 @@ class QuerySet(object): | ||||
|             if isinstance(key, float): | ||||
|                 if int(key) == key: | ||||
|                     key = int(key) | ||||
|                 key = str(key) | ||||
|             frequencies[key] = f.value | ||||
|             frequencies[key] = int(f.value) | ||||
|  | ||||
|         if normalize: | ||||
|             count = sum(frequencies.values()) | ||||
|             frequencies = dict([(k, v / count) for k, v in frequencies.items()]) | ||||
|             frequencies = dict([(k, float(v) / count) | ||||
|                                 for k, v in frequencies.items()]) | ||||
|  | ||||
|         return frequencies | ||||
|  | ||||
| @@ -1697,12 +1768,11 @@ class QuerySet(object): | ||||
|         """Uses exec_js to execute""" | ||||
|         freq_func = """ | ||||
|             function(path) { | ||||
|                 path = path.split('.'); | ||||
|                 var path = path.split('.'); | ||||
|  | ||||
|                 if (options.normalize) { | ||||
|                 var total = 0.0; | ||||
|                 db[collection].find(query).forEach(function(doc) { | ||||
|                         field = doc; | ||||
|                     var field = doc; | ||||
|                     for (p in path) { | ||||
|                         if (field) | ||||
|                             field = field[path[p]]; | ||||
| @@ -1715,13 +1785,11 @@ class QuerySet(object): | ||||
|                        total++; | ||||
|                     } | ||||
|                 }); | ||||
|                 } | ||||
|  | ||||
|                 var frequencies = {}; | ||||
|                 var types = {}; | ||||
|                 var inc = 1.0; | ||||
|                 if (options.normalize) { | ||||
|                     inc /= total; | ||||
|                 } | ||||
|  | ||||
|                 db[collection].find(query).forEach(function(doc) { | ||||
|                     field = doc; | ||||
|                     for (p in path) { | ||||
| @@ -1736,34 +1804,48 @@ class QuerySet(object): | ||||
|                         }); | ||||
|                     } else { | ||||
|                         var item = field; | ||||
|                         types[item] = item; | ||||
|                         frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); | ||||
|                     } | ||||
|                 }); | ||||
|                 return frequencies; | ||||
|                 return [total, frequencies, types]; | ||||
|             } | ||||
|         """ | ||||
|         data = self.exec_js(freq_func, field, normalize=normalize) | ||||
|         if 'undefined' in data: | ||||
|             data[None] = data['undefined'] | ||||
|             del(data['undefined']) | ||||
|         return data | ||||
|         total, data, types = self.exec_js(freq_func, field) | ||||
|         values = dict([(types.get(k), int(v)) for k, v in data.iteritems()]) | ||||
|  | ||||
|         if normalize: | ||||
|             values = dict([(k, float(v) / total) for k, v in values.items()]) | ||||
|  | ||||
|         frequencies = {} | ||||
|         for k, v in values.iteritems(): | ||||
|             if isinstance(k, float): | ||||
|                 if int(k) == k: | ||||
|                     k = int(k) | ||||
|  | ||||
|             frequencies[k] = v | ||||
|  | ||||
|         return frequencies | ||||
|  | ||||
|     def __repr__(self): | ||||
|         limit = REPR_OUTPUT_SIZE + 1 | ||||
|         start = (0 if self._skip is None else self._skip) | ||||
|         if self._limit is None: | ||||
|             stop = start + limit | ||||
|         if self._limit is not None: | ||||
|             if self._limit - start > limit: | ||||
|                 stop = start + limit | ||||
|             else: | ||||
|                 stop = self._limit | ||||
|         """Provides the string representation of the QuerySet | ||||
|  | ||||
|         .. versionchanged:: 0.6.13 Now doesnt modify the cursor | ||||
|         """ | ||||
|  | ||||
|         if self._iter: | ||||
|             return '.. queryset mid-iteration ..' | ||||
|  | ||||
|         data = [] | ||||
|         for i in xrange(REPR_OUTPUT_SIZE + 1): | ||||
|             try: | ||||
|             data = list(self[start:stop]) | ||||
|         except pymongo.errors.InvalidOperation: | ||||
|             return ".. queryset mid-iteration .." | ||||
|                 data.append(self.next()) | ||||
|             except StopIteration: | ||||
|                 break | ||||
|         if len(data) > REPR_OUTPUT_SIZE: | ||||
|             data[-1] = "...(remaining elements truncated)..." | ||||
|  | ||||
|         self.rewind() | ||||
|         return repr(data) | ||||
|  | ||||
|     def select_related(self, max_depth=1): | ||||
| @@ -1779,6 +1861,17 @@ class QuerySet(object): | ||||
|  | ||||
|  | ||||
| class QuerySetManager(object): | ||||
|     """ | ||||
|     The default QuerySet Manager. | ||||
|  | ||||
|     Custom QuerySet Manager functions can extend this class and users can | ||||
|     add extra queryset functionality.  Any custom manager methods must accept a | ||||
|     :class:`~mongoengine.Document` class as its first argument, and a | ||||
|     :class:`~mongoengine.queryset.QuerySet` as its second argument. | ||||
|  | ||||
|     The method function should return a :class:`~mongoengine.queryset.QuerySet` | ||||
|     , probably the same one that was passed in, but modified in some way. | ||||
|     """ | ||||
|  | ||||
|     get_queryset = None | ||||
|  | ||||
| @@ -1799,10 +1892,13 @@ class QuerySetManager(object): | ||||
|         queryset_class = owner._meta['queryset_class'] or QuerySet | ||||
|         queryset = queryset_class(owner, owner._get_collection()) | ||||
|         if self.get_queryset: | ||||
|             if self.get_queryset.func_code.co_argcount == 1: | ||||
|             arg_count = self.get_queryset.func_code.co_argcount | ||||
|             if arg_count == 1: | ||||
|                 queryset = self.get_queryset(queryset) | ||||
|             else: | ||||
|             elif arg_count == 2: | ||||
|                 queryset = self.get_queryset(owner, queryset) | ||||
|             else: | ||||
|                 queryset = partial(self.get_queryset, owner, queryset) | ||||
|         return queryset | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -5,7 +5,7 @@ | ||||
| %define srcname mongoengine | ||||
|  | ||||
| Name:           python-%{srcname} | ||||
| Version:        0.6.6 | ||||
| Version:        0.6.19 | ||||
| Release:        1%{?dist} | ||||
| Summary:        A Python Document-Object Mapper for working with MongoDB | ||||
|  | ||||
| @@ -51,24 +51,4 @@ rm -rf $RPM_BUILD_ROOT | ||||
| # %{python_sitearch}/* | ||||
|  | ||||
| %changelog | ||||
| * Wed Apr 24 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5 | ||||
| - 0.6.6 released | ||||
| * Wed Apr 18 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5 | ||||
| - 0.6.5 released | ||||
| * Wed Apr 18 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5 | ||||
| - 0.6.4 released | ||||
| * Wed Mar 24 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5 | ||||
| - 0.6.3 released | ||||
| * Wed Mar 22 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5 | ||||
| - 0.6.2 released | ||||
| * Wed Mar 05 2012 Ross Lawley <ross.lawley@gmail.com> 0.6.5 | ||||
| - 0.6.1 released | ||||
| * Mon Mar 05 2012 Ross Lawley <ross.lawley@gmail.com> 0.6 | ||||
| - 0.6 released | ||||
| * Thu Oct 27 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.3-1 | ||||
| - Update to latest dev version | ||||
| - Add PIL dependency for ImageField | ||||
| * Wed Oct 12 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.2-1 | ||||
| - Update version | ||||
| * Fri Sep 23 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.0-1 | ||||
| - Initial version | ||||
| * See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html | ||||
							
								
								
									
										13
									
								
								setup.cfg
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								setup.cfg
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | ||||
| [aliases] | ||||
| test = nosetests | ||||
|  | ||||
| [nosetests] | ||||
| verbosity = 2 | ||||
| detailed-errors = 1 | ||||
| #with-coverage = 1 | ||||
| #cover-erase = 1 | ||||
| #cover-html = 1 | ||||
| #cover-html-dir = ../htmlcov | ||||
| #cover-package = mongoengine | ||||
| where = tests | ||||
| #tests = test_bugfix.py | ||||
							
								
								
									
										5
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										5
									
								
								setup.py
									
									
									
									
									
								
							| @@ -35,7 +35,7 @@ CLASSIFIERS = [ | ||||
|  | ||||
| setup(name='mongoengine', | ||||
|       version=VERSION, | ||||
|       packages=find_packages(), | ||||
|       packages=find_packages(exclude=('tests',)), | ||||
|       author='Harry Marr', | ||||
|       author_email='harry.marr@{nospam}gmail.com', | ||||
|       maintainer="Ross Lawley", | ||||
| @@ -48,6 +48,5 @@ setup(name='mongoengine', | ||||
|       platforms=['any'], | ||||
|       classifiers=CLASSIFIERS, | ||||
|       install_requires=['pymongo'], | ||||
|       test_suite='tests', | ||||
|       tests_require=['blinker', 'django>=1.3', 'PIL'] | ||||
|       tests_require=['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL'] | ||||
| ) | ||||
|   | ||||
| @@ -1,8 +1,11 @@ | ||||
| import unittest | ||||
| import datetime | ||||
| import pymongo | ||||
| import unittest | ||||
| 
 | ||||
| import mongoengine.connection | ||||
| 
 | ||||
| from bson.tz_util import utc | ||||
| 
 | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db, get_connection, ConnectionError | ||||
| 
 | ||||
| @@ -65,6 +68,31 @@ class ConnectionTest(unittest.TestCase): | ||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||
|         self.assertEqual(db.name, 'mongoenginetest2') | ||||
| 
 | ||||
|     def test_connection_kwargs(self): | ||||
|         """Ensure that connection kwargs get passed to pymongo. | ||||
|         """ | ||||
|         connect('mongoenginetest', alias='t1', tz_aware=True) | ||||
|         conn = get_connection('t1') | ||||
| 
 | ||||
|         self.assertTrue(conn.tz_aware) | ||||
| 
 | ||||
|         connect('mongoenginetest2', alias='t2') | ||||
|         conn = get_connection('t2') | ||||
|         self.assertFalse(conn.tz_aware) | ||||
| 
 | ||||
|     def test_datetime(self): | ||||
|         connect('mongoenginetest', tz_aware=True) | ||||
|         d = datetime.datetime(2010, 5, 5, tzinfo=utc) | ||||
| 
 | ||||
|         class DateDoc(Document): | ||||
|             the_date = DateTimeField(required=True) | ||||
| 
 | ||||
|         DateDoc.drop_collection() | ||||
|         DateDoc(the_date=d).save() | ||||
| 
 | ||||
|         date_doc = DateDoc.objects.first() | ||||
|         self.assertEqual(d, date_doc.the_date) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -810,3 +810,56 @@ class FieldTest(unittest.TestCase): | ||||
|         room = Room.objects.first().select_related() | ||||
|         self.assertEquals(room.staffs_with_position[0]['staff'], sarah) | ||||
|         self.assertEquals(room.staffs_with_position[1]['staff'], bob) | ||||
| 
 | ||||
|     def test_document_reload_no_inheritance(self): | ||||
|         class Foo(Document): | ||||
|             meta = {'allow_inheritance': False} | ||||
|             bar = ReferenceField('Bar') | ||||
|             baz = ReferenceField('Baz') | ||||
| 
 | ||||
|         class Bar(Document): | ||||
|             meta = {'allow_inheritance': False} | ||||
|             msg = StringField(required=True, default='Blammo!') | ||||
| 
 | ||||
|         class Baz(Document): | ||||
|             meta = {'allow_inheritance': False} | ||||
|             msg = StringField(required=True, default='Kaboom!') | ||||
| 
 | ||||
|         Foo.drop_collection() | ||||
|         Bar.drop_collection() | ||||
|         Baz.drop_collection() | ||||
| 
 | ||||
|         bar = Bar() | ||||
|         bar.save() | ||||
|         baz = Baz() | ||||
|         baz.save() | ||||
|         foo = Foo() | ||||
|         foo.bar = bar | ||||
|         foo.baz = baz | ||||
|         foo.save() | ||||
|         foo.reload() | ||||
| 
 | ||||
|         self.assertEquals(type(foo.bar), Bar) | ||||
|         self.assertEquals(type(foo.baz), Baz) | ||||
| 
 | ||||
|     def test_list_lookup_not_checked_in_map(self): | ||||
|         """Ensure we dereference list data correctly | ||||
|         """ | ||||
|         class Comment(Document): | ||||
|             id = IntField(primary_key=True) | ||||
|             text = StringField() | ||||
| 
 | ||||
|         class Message(Document): | ||||
|             id = IntField(primary_key=True) | ||||
|             comments = ListField(ReferenceField(Comment)) | ||||
| 
 | ||||
|         Comment.drop_collection() | ||||
|         Message.drop_collection() | ||||
| 
 | ||||
|         c1 = Comment(id=0, text='zero').save() | ||||
|         c2 = Comment(id=1, text='one').save() | ||||
|         Message(id=1, comments=[c1, c2]).save() | ||||
| 
 | ||||
|         msg = Message.objects.get(id=1) | ||||
|         self.assertEqual(0, msg.comments[0].id) | ||||
|         self.assertEqual(1, msg.comments[1].id) | ||||
| @@ -103,3 +103,8 @@ class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): | ||||
|         MongoSession.drop_collection() | ||||
|         super(MongoDBSessionTest, self).setUp() | ||||
| 
 | ||||
|     def test_first_save(self): | ||||
|         session = SessionStore() | ||||
|         session['test'] = True | ||||
|         session.save() | ||||
|         self.assertTrue('test' in session) | ||||
| @@ -1,3 +1,4 @@ | ||||
| import os | ||||
| import pickle | ||||
| import pymongo | ||||
| import bson | ||||
| @@ -6,13 +7,15 @@ import warnings | ||||
| 
 | ||||
| from datetime import datetime | ||||
| 
 | ||||
| from fixtures import Base, Mixin, PickleEmbedded, PickleTest | ||||
| from tests.fixtures import Base, Mixin, PickleEmbedded, PickleTest | ||||
| 
 | ||||
| from mongoengine import * | ||||
| from mongoengine.base import NotRegistered, InvalidDocumentError | ||||
| from mongoengine.queryset import InvalidQueryError | ||||
| from mongoengine.connection import get_db | ||||
| 
 | ||||
| TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') | ||||
| 
 | ||||
| 
 | ||||
| class DocumentTest(unittest.TestCase): | ||||
| 
 | ||||
| @@ -661,6 +664,49 @@ class DocumentTest(unittest.TestCase): | ||||
| 
 | ||||
|         BlogPost.drop_collection() | ||||
| 
 | ||||
|     def test_db_field_load(self): | ||||
|         """Ensure we load data correctly | ||||
|         """ | ||||
|         class Person(Document): | ||||
|             name = StringField(required=True) | ||||
|             _rank = StringField(required=False, db_field="rank") | ||||
| 
 | ||||
|             @property | ||||
|             def rank(self): | ||||
|                 return self._rank or "Private" | ||||
| 
 | ||||
|         Person.drop_collection() | ||||
| 
 | ||||
|         Person(name="Jack", _rank="Corporal").save() | ||||
| 
 | ||||
|         Person(name="Fred").save() | ||||
| 
 | ||||
|         self.assertEquals(Person.objects.get(name="Jack").rank, "Corporal") | ||||
|         self.assertEquals(Person.objects.get(name="Fred").rank, "Private") | ||||
| 
 | ||||
|     def test_db_embedded_doc_field_load(self): | ||||
|         """Ensure we load embedded document data correctly | ||||
|         """ | ||||
|         class Rank(EmbeddedDocument): | ||||
|             title = StringField(required=True) | ||||
| 
 | ||||
|         class Person(Document): | ||||
|             name = StringField(required=True) | ||||
|             rank_ = EmbeddedDocumentField(Rank, required=False, db_field='rank') | ||||
| 
 | ||||
|             @property | ||||
|             def rank(self): | ||||
|                 return self.rank_.title if self.rank_ is not None else "Private" | ||||
| 
 | ||||
|         Person.drop_collection() | ||||
| 
 | ||||
|         Person(name="Jack", rank_=Rank(title="Corporal")).save() | ||||
| 
 | ||||
|         Person(name="Fred").save() | ||||
| 
 | ||||
|         self.assertEquals(Person.objects.get(name="Jack").rank, "Corporal") | ||||
|         self.assertEquals(Person.objects.get(name="Fred").rank, "Private") | ||||
| 
 | ||||
|     def test_explicit_geo2d_index(self): | ||||
|         """Ensure that geo2d indexes work when created via meta[indexes] | ||||
|         """ | ||||
| @@ -741,6 +787,28 @@ class DocumentTest(unittest.TestCase): | ||||
|         self.assertEqual(info.keys(), ['_types_1_user_guid_1', '_id_', '_types_1_name_1']) | ||||
|         Person.drop_collection() | ||||
| 
 | ||||
|     def test_disable_index_creation(self): | ||||
|         """Tests setting auto_create_index to False on the connection will | ||||
|         disable any index generation. | ||||
|         """ | ||||
|         class User(Document): | ||||
|             meta = { | ||||
|                 'indexes': ['user_guid'], | ||||
|                 'auto_create_index': False | ||||
|             } | ||||
|             user_guid = StringField(required=True) | ||||
| 
 | ||||
| 
 | ||||
|         User.drop_collection() | ||||
| 
 | ||||
|         u = User(user_guid='123') | ||||
|         u.save() | ||||
| 
 | ||||
|         self.assertEquals(1, User.objects.count()) | ||||
|         info = User.objects._collection.index_information() | ||||
|         self.assertEqual(info.keys(), ['_id_']) | ||||
|         User.drop_collection() | ||||
| 
 | ||||
|     def test_embedded_document_index(self): | ||||
|         """Tests settings an index on an embedded document | ||||
|         """ | ||||
| @@ -804,15 +872,26 @@ class DocumentTest(unittest.TestCase): | ||||
| 
 | ||||
|     def test_geo_indexes_recursion(self): | ||||
| 
 | ||||
|         class User(Document): | ||||
|             channel = ReferenceField('Channel') | ||||
|         class Location(Document): | ||||
|             name = StringField() | ||||
|             location = GeoPointField() | ||||
| 
 | ||||
|         class Channel(Document): | ||||
|             user = ReferenceField('User') | ||||
|             location = GeoPointField() | ||||
|         class Parent(Document): | ||||
|             name = StringField() | ||||
|             location = ReferenceField(Location) | ||||
| 
 | ||||
|         self.assertEquals(len(User._geo_indices()), 2) | ||||
|         Location.drop_collection() | ||||
|         Parent.drop_collection() | ||||
| 
 | ||||
|         list(Parent.objects) | ||||
| 
 | ||||
|         collection = Parent._get_collection() | ||||
|         info = collection.index_information() | ||||
| 
 | ||||
|         self.assertFalse('location_2d' in info) | ||||
| 
 | ||||
|         self.assertEquals(len(Parent._geo_indices()), 0) | ||||
|         self.assertEquals(len(Location._geo_indices()), 1) | ||||
| 
 | ||||
|     def test_covered_index(self): | ||||
|         """Ensure that covered indexes can be used | ||||
| @@ -842,6 +921,26 @@ class DocumentTest(unittest.TestCase): | ||||
|         query_plan = Test.objects(a=1).only('a').exclude('id').explain() | ||||
|         self.assertTrue(query_plan['indexOnly']) | ||||
| 
 | ||||
|     def test_index_on_id(self): | ||||
| 
 | ||||
|         class BlogPost(Document): | ||||
|             meta = { | ||||
|                 'indexes': [ | ||||
|                     ['categories', 'id'] | ||||
|                 ], | ||||
|                 'allow_inheritance': False | ||||
|             } | ||||
| 
 | ||||
|             title = StringField(required=True) | ||||
|             description = StringField(required=True) | ||||
|             categories = ListField() | ||||
| 
 | ||||
|         BlogPost.drop_collection() | ||||
| 
 | ||||
|         indexes = BlogPost.objects._collection.index_information() | ||||
|         self.assertEquals(indexes['categories_1__id_1']['key'], | ||||
|                                  [('categories', 1), ('_id', 1)]) | ||||
| 
 | ||||
|     def test_hint(self): | ||||
| 
 | ||||
|         class BlogPost(Document): | ||||
| @@ -1217,6 +1316,22 @@ class DocumentTest(unittest.TestCase): | ||||
|         comment.date = datetime.now() | ||||
|         comment.validate() | ||||
| 
 | ||||
|     def test_embedded_db_field_validate(self): | ||||
| 
 | ||||
|         class SubDoc(EmbeddedDocument): | ||||
|             val = IntField() | ||||
| 
 | ||||
|         class Doc(Document): | ||||
|             e = EmbeddedDocumentField(SubDoc, db_field='eb') | ||||
| 
 | ||||
|         Doc.drop_collection() | ||||
| 
 | ||||
|         Doc(e=SubDoc(val=15)).save() | ||||
| 
 | ||||
|         doc = Doc.objects.first() | ||||
|         doc.validate() | ||||
|         self.assertEquals([None, 'e'], doc._data.keys()) | ||||
| 
 | ||||
|     def test_save(self): | ||||
|         """Ensure that a document may be saved in the database. | ||||
|         """ | ||||
| @@ -1286,6 +1401,30 @@ class DocumentTest(unittest.TestCase): | ||||
|         p0.name = 'wpjunior' | ||||
|         p0.save() | ||||
| 
 | ||||
|     def test_save_max_recursion_not_hit_with_file_field(self): | ||||
| 
 | ||||
|         class Foo(Document): | ||||
|             name = StringField() | ||||
|             picture = FileField() | ||||
|             bar = ReferenceField('self') | ||||
| 
 | ||||
|         Foo.drop_collection() | ||||
| 
 | ||||
|         a = Foo(name='hello') | ||||
|         a.save() | ||||
| 
 | ||||
|         a.bar = a | ||||
|         a.picture = open(TEST_IMAGE_PATH, 'rb') | ||||
|         a.save() | ||||
| 
 | ||||
|         # Confirm can save and it resets the changed fields without hitting | ||||
|         # max recursion error | ||||
|         b = Foo.objects.with_id(a.id) | ||||
|         b.name='world' | ||||
|         b.save() | ||||
| 
 | ||||
|         self.assertEquals(b.picture, b.bar.picture, b.bar.bar.picture) | ||||
| 
 | ||||
|     def test_save_cascades(self): | ||||
| 
 | ||||
|         class Person(Document): | ||||
| @@ -1549,6 +1688,77 @@ class DocumentTest(unittest.TestCase): | ||||
|         site = Site.objects.first() | ||||
|         self.assertEqual(site.page.log_message, "Error: Dummy message") | ||||
| 
 | ||||
|     def test_circular_reference_deltas(self): | ||||
| 
 | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             owns = ListField(ReferenceField('Organization')) | ||||
| 
 | ||||
|         class Organization(Document): | ||||
|             name = StringField() | ||||
|             owner = ReferenceField('Person') | ||||
| 
 | ||||
|         Person.drop_collection() | ||||
|         Organization.drop_collection() | ||||
| 
 | ||||
|         person = Person(name="owner") | ||||
|         person.save() | ||||
|         organization = Organization(name="company") | ||||
|         organization.save() | ||||
| 
 | ||||
|         person.owns.append(organization) | ||||
|         organization.owner = person | ||||
| 
 | ||||
|         person.save() | ||||
|         organization.save() | ||||
| 
 | ||||
|         p = Person.objects[0].select_related() | ||||
|         o = Organization.objects.first() | ||||
|         self.assertEquals(p.owns[0], o) | ||||
|         self.assertEquals(o.owner, p) | ||||
| 
 | ||||
|     def test_circular_reference_deltas_2(self): | ||||
| 
 | ||||
|         class Person( Document ): | ||||
|            name = StringField() | ||||
|            owns = ListField( ReferenceField( 'Organization' ) ) | ||||
|            employer = ReferenceField( 'Organization' ) | ||||
| 
 | ||||
|         class Organization( Document ): | ||||
|            name = StringField() | ||||
|            owner = ReferenceField( 'Person' ) | ||||
|            employees = ListField( ReferenceField( 'Person' ) ) | ||||
| 
 | ||||
|         Person.drop_collection() | ||||
|         Organization.drop_collection() | ||||
| 
 | ||||
|         person = Person( name="owner" ) | ||||
|         person.save() | ||||
| 
 | ||||
|         employee = Person( name="employee" ) | ||||
|         employee.save() | ||||
| 
 | ||||
|         organization = Organization( name="company" ) | ||||
|         organization.save() | ||||
| 
 | ||||
|         person.owns.append( organization ) | ||||
|         organization.owner = person | ||||
| 
 | ||||
|         organization.employees.append( employee ) | ||||
|         employee.employer = organization | ||||
| 
 | ||||
|         person.save() | ||||
|         organization.save() | ||||
|         employee.save() | ||||
| 
 | ||||
|         p = Person.objects.get(name="owner") | ||||
|         e = Person.objects.get(name="employee") | ||||
|         o = Organization.objects.first() | ||||
| 
 | ||||
|         self.assertEquals(p.owns[0], o) | ||||
|         self.assertEquals(o.owner, p) | ||||
|         self.assertEquals(e.employer, o) | ||||
| 
 | ||||
|     def test_delta(self): | ||||
| 
 | ||||
|         class Doc(Document): | ||||
| @@ -2376,6 +2586,22 @@ class DocumentTest(unittest.TestCase): | ||||
| 
 | ||||
|         self.assertRaises(InvalidDocumentError, throw_invalid_document_error) | ||||
| 
 | ||||
|     def test_invalid_son(self): | ||||
|         """Raise an error if loading invalid data""" | ||||
|         class Occurrence(EmbeddedDocument): | ||||
|             number = IntField() | ||||
| 
 | ||||
|         class Word(Document): | ||||
|             stem = StringField() | ||||
|             count = IntField(default=1) | ||||
|             forms = ListField(StringField(), default=list) | ||||
|             occurs = ListField(EmbeddedDocumentField(Occurrence), default=list) | ||||
| 
 | ||||
|         def raise_invalid_document(): | ||||
|             Word._from_son({'stem': [1,2,3], 'forms': 1, 'count': 'one', 'occurs': {"hello": None}}) | ||||
| 
 | ||||
|         self.assertRaises(InvalidDocumentError, raise_invalid_document) | ||||
| 
 | ||||
|     def test_reverse_delete_rule_cascade_and_nullify(self): | ||||
|         """Ensure that a referenced document is also deleted upon deletion. | ||||
|         """ | ||||
| @@ -2438,6 +2664,40 @@ class DocumentTest(unittest.TestCase): | ||||
|         author.delete() | ||||
|         self.assertEqual(len(BlogPost.objects), 0) | ||||
| 
 | ||||
|     def test_two_way_reverse_delete_rule(self): | ||||
|         """Ensure that Bi-Directional relationships work with | ||||
|         reverse_delete_rule | ||||
|         """ | ||||
| 
 | ||||
|         class Bar(Document): | ||||
|             content = StringField() | ||||
|             foo = ReferenceField('Foo') | ||||
| 
 | ||||
|         class Foo(Document): | ||||
|             content = StringField() | ||||
|             bar = ReferenceField(Bar) | ||||
| 
 | ||||
|         Bar.register_delete_rule(Foo, 'bar', NULLIFY) | ||||
|         Foo.register_delete_rule(Bar, 'foo', NULLIFY) | ||||
| 
 | ||||
| 
 | ||||
|         Bar.drop_collection() | ||||
|         Foo.drop_collection() | ||||
| 
 | ||||
|         b = Bar(content="Hello") | ||||
|         b.save() | ||||
| 
 | ||||
|         f = Foo(content="world", bar=b) | ||||
|         f.save() | ||||
| 
 | ||||
|         b.foo = f | ||||
|         b.save() | ||||
| 
 | ||||
|         f.delete() | ||||
| 
 | ||||
|         self.assertEqual(len(Bar.objects), 1)  # No effect on the BlogPost | ||||
|         self.assertEqual(Bar.objects.get().foo, None) | ||||
| 
 | ||||
|     def test_invalid_reverse_delete_rules_raise_errors(self): | ||||
| 
 | ||||
|         def throw_invalid_document_error(): | ||||
| @@ -2739,7 +2999,7 @@ class DocumentTest(unittest.TestCase): | ||||
|         self.assertEqual(User.objects.first(), bob) | ||||
|         self.assertEqual(Book.objects.first(), hp) | ||||
| 
 | ||||
|         # DeRefecence | ||||
|         # DeReference | ||||
|         class AuthorBooks(Document): | ||||
|             author = ReferenceField(User) | ||||
|             book = ReferenceField(Book) | ||||
| @@ -2767,6 +3027,18 @@ class DocumentTest(unittest.TestCase): | ||||
|         self.assertEqual(Book._get_collection(), get_db("testdb-2")[Book._get_collection_name()]) | ||||
|         self.assertEqual(AuthorBooks._get_collection(), get_db("testdb-3")[AuthorBooks._get_collection_name()]) | ||||
| 
 | ||||
|     def test_db_alias_propagates(self): | ||||
|         """db_alias propagates? | ||||
|         """ | ||||
|         class A(Document): | ||||
|             name = StringField() | ||||
|             meta = {"db_alias": "testdb-1", "allow_inheritance": True} | ||||
| 
 | ||||
|         class B(A): | ||||
|             pass | ||||
| 
 | ||||
|         self.assertEquals('testdb-1', B._meta.get('db_alias')) | ||||
| 
 | ||||
|     def test_db_ref_usage(self): | ||||
|         """ DB Ref usage in __raw__ queries """ | ||||
| 
 | ||||
| @@ -2839,5 +3111,114 @@ class DocumentTest(unittest.TestCase): | ||||
|                                         } | ||||
|                                     ) ]), "1,2") | ||||
| 
 | ||||
| 
 | ||||
| class ValidatorErrorTest(unittest.TestCase): | ||||
| 
 | ||||
|     def test_to_dict(self): | ||||
|         """Ensure a ValidationError handles error to_dict correctly. | ||||
|         """ | ||||
|         error = ValidationError('root') | ||||
|         self.assertEquals(error.to_dict(), {}) | ||||
| 
 | ||||
|         # 1st level error schema | ||||
|         error.errors = {'1st': ValidationError('bad 1st'), } | ||||
|         self.assertTrue('1st' in error.to_dict()) | ||||
|         self.assertEquals(error.to_dict()['1st'], 'bad 1st') | ||||
| 
 | ||||
|         # 2nd level error schema | ||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ | ||||
|             '2nd': ValidationError('bad 2nd'), | ||||
|         })} | ||||
|         self.assertTrue('1st' in error.to_dict()) | ||||
|         self.assertTrue(isinstance(error.to_dict()['1st'], dict)) | ||||
|         self.assertTrue('2nd' in error.to_dict()['1st']) | ||||
|         self.assertEquals(error.to_dict()['1st']['2nd'], 'bad 2nd') | ||||
| 
 | ||||
|         # moar levels | ||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ | ||||
|             '2nd': ValidationError('bad 2nd', errors={ | ||||
|                 '3rd': ValidationError('bad 3rd', errors={ | ||||
|                     '4th': ValidationError('Inception'), | ||||
|                 }), | ||||
|             }), | ||||
|         })} | ||||
|         self.assertTrue('1st' in error.to_dict()) | ||||
|         self.assertTrue('2nd' in error.to_dict()['1st']) | ||||
|         self.assertTrue('3rd' in error.to_dict()['1st']['2nd']) | ||||
|         self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd']) | ||||
|         self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'], | ||||
|                          'Inception') | ||||
| 
 | ||||
|         self.assertEquals(error.message, "root(2nd.3rd.4th.Inception: ['1st'])") | ||||
| 
 | ||||
|     def test_model_validation(self): | ||||
| 
 | ||||
|         class User(Document): | ||||
|             username = StringField(primary_key=True) | ||||
|             name = StringField(required=True) | ||||
| 
 | ||||
|         try: | ||||
|             User().validate() | ||||
|         except ValidationError, e: | ||||
|             expected_error_message = """ValidationError(Field is required: ['username', 'name'])""" | ||||
|             self.assertEquals(e.message, expected_error_message) | ||||
|             self.assertEquals(e.to_dict(), { | ||||
|                 'username': 'Field is required', | ||||
|                 'name': 'Field is required'}) | ||||
| 
 | ||||
|     def test_spaces_in_keys(self): | ||||
| 
 | ||||
|         class Embedded(DynamicEmbeddedDocument): | ||||
|             pass | ||||
| 
 | ||||
|         class Doc(DynamicDocument): | ||||
|             pass | ||||
| 
 | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         setattr(doc, 'hello world', 1) | ||||
|         doc.save() | ||||
| 
 | ||||
|         one = Doc.objects.filter(**{'hello world': 1}).count() | ||||
|         self.assertEqual(1, one) | ||||
| 
 | ||||
| 
 | ||||
|     def test_fields_rewrite(self): | ||||
|         class BasePerson(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|             meta = {'abstract': True} | ||||
| 
 | ||||
|         class Person(BasePerson): | ||||
|             name = StringField(required=True) | ||||
| 
 | ||||
| 
 | ||||
|         p = Person(age=15) | ||||
|         self.assertRaises(ValidationError, p.validate) | ||||
| 
 | ||||
|     def test_cascaded_save_wrong_reference(self): | ||||
| 
 | ||||
|         class ADocument(Document): | ||||
|             val = IntField() | ||||
| 
 | ||||
|         class BDocument(Document): | ||||
|             a = ReferenceField(ADocument) | ||||
| 
 | ||||
|         ADocument.drop_collection() | ||||
|         BDocument.drop_collection() | ||||
| 
 | ||||
|         a = ADocument() | ||||
|         a.val = 15 | ||||
|         a.save() | ||||
| 
 | ||||
|         b = BDocument() | ||||
|         b.a = a | ||||
|         b.save() | ||||
| 
 | ||||
|         a.delete() | ||||
| 
 | ||||
|         b = BDocument.objects.first() | ||||
|         b.save(cascade=True) | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -2,7 +2,11 @@ import datetime | ||||
| import os | ||||
| import unittest | ||||
| import uuid | ||||
| import StringIO | ||||
| import tempfile | ||||
| import gridfs | ||||
| 
 | ||||
| from bson import Binary | ||||
| from decimal import Decimal | ||||
| 
 | ||||
| from mongoengine import * | ||||
| @@ -18,6 +22,10 @@ class FieldTest(unittest.TestCase): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
| 
 | ||||
|     def tearDown(self): | ||||
|         self.db.drop_collection('fs.files') | ||||
|         self.db.drop_collection('fs.chunks') | ||||
| 
 | ||||
|     def test_default_values(self): | ||||
|         """Ensure that default field values are used when creating a document. | ||||
|         """ | ||||
| @@ -75,7 +83,6 @@ class FieldTest(unittest.TestCase): | ||||
| 
 | ||||
|         # Retrive data from db and verify it. | ||||
|         ret = HandleNoneFields.objects.all()[0] | ||||
| 
 | ||||
|         self.assertEqual(ret.str_fld, None) | ||||
|         self.assertEqual(ret.int_fld, None) | ||||
|         self.assertEqual(ret.flt_fld, None) | ||||
| @@ -121,6 +128,19 @@ class FieldTest(unittest.TestCase): | ||||
| 
 | ||||
|         self.assertRaises(ValidationError, ret.validate) | ||||
| 
 | ||||
|     def test_int_and_float_ne_operator(self): | ||||
|         class TestDocument(Document): | ||||
|             int_fld = IntField() | ||||
|             float_fld = FloatField() | ||||
| 
 | ||||
|         TestDocument.drop_collection() | ||||
| 
 | ||||
|         TestDocument(int_fld=None, float_fld=None).save() | ||||
|         TestDocument(int_fld=1, float_fld=1).save() | ||||
| 
 | ||||
|         self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count()) | ||||
|         self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count()) | ||||
| 
 | ||||
|     def test_object_id_validation(self): | ||||
|         """Ensure that invalid values cannot be assigned to string fields. | ||||
|         """ | ||||
| @@ -252,26 +272,55 @@ class FieldTest(unittest.TestCase): | ||||
|         person.admin = 'Yes' | ||||
|         self.assertRaises(ValidationError, person.validate) | ||||
| 
 | ||||
|     def test_uuid_validation(self): | ||||
|         """Ensure that invalid values cannot be assigned to UUID fields. | ||||
|     def test_uuid_field_string(self): | ||||
|         """Test UUID fields storing as String | ||||
|         """ | ||||
|         class Person(Document): | ||||
|             api_key = UUIDField() | ||||
|             api_key = UUIDField(binary=False) | ||||
| 
 | ||||
|         Person.drop_collection() | ||||
| 
 | ||||
|         uu = uuid.uuid4() | ||||
|         Person(api_key=uu).save() | ||||
|         self.assertEqual(1, Person.objects(api_key=uu).count()) | ||||
| 
 | ||||
|         person = Person() | ||||
|         # any uuid type is valid | ||||
|         person.api_key = uuid.uuid4() | ||||
|         person.validate() | ||||
|         person.api_key = uuid.uuid1() | ||||
|         valid = (uuid.uuid4(), uuid.uuid1()) | ||||
|         for api_key in valid: | ||||
|             person.api_key = api_key | ||||
|             person.validate() | ||||
| 
 | ||||
|         # last g cannot belong to an hex number | ||||
|         person.api_key = '9d159858-549b-4975-9f98-dd2f987c113g' | ||||
|         invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', | ||||
|                    '9d159858-549b-4975-9f98-dd2f987c113') | ||||
|         for api_key in invalid: | ||||
|             person.api_key = api_key | ||||
|             self.assertRaises(ValidationError, person.validate) | ||||
|         # short strings don't validate | ||||
|         person.api_key = '9d159858-549b-4975-9f98-dd2f987c113' | ||||
| 
 | ||||
|     def test_uuid_field_binary(self): | ||||
|         """Test UUID fields storing as Binary object | ||||
|         """ | ||||
|         class Person(Document): | ||||
|             api_key = UUIDField(binary=True) | ||||
| 
 | ||||
|         Person.drop_collection() | ||||
| 
 | ||||
|         uu = uuid.uuid4() | ||||
|         Person(api_key=uu).save() | ||||
|         self.assertEqual(1, Person.objects(api_key=uu).count()) | ||||
| 
 | ||||
|         person = Person() | ||||
|         valid = (uuid.uuid4(), uuid.uuid1()) | ||||
|         for api_key in valid: | ||||
|             person.api_key = api_key | ||||
|             person.validate() | ||||
| 
 | ||||
|         invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', | ||||
|                    '9d159858-549b-4975-9f98-dd2f987c113') | ||||
|         for api_key in invalid: | ||||
|             person.api_key = api_key | ||||
|             self.assertRaises(ValidationError, person.validate) | ||||
| 
 | ||||
| 
 | ||||
|     def test_datetime_validation(self): | ||||
|         """Ensure that invalid values cannot be assigned to datetime fields. | ||||
|         """ | ||||
| @@ -339,24 +388,6 @@ class FieldTest(unittest.TestCase): | ||||
|         self.assertNotEquals(log.date, d1) | ||||
|         self.assertEquals(log.date, d2) | ||||
| 
 | ||||
|         # Pre UTC microseconds above 1000 is wonky. | ||||
|         # log.date has an invalid microsecond value so I can't construct | ||||
|         # a date to compare. | ||||
|         # | ||||
|         # However, the timedelta is predicable with pre UTC timestamps | ||||
|         # It always adds 16 seconds and [777216-776217] microseconds | ||||
|         for i in xrange(1001, 3113, 33): | ||||
|             d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) | ||||
|             log.date = d1 | ||||
|             log.save() | ||||
|             log.reload() | ||||
|             self.assertNotEquals(log.date, d1) | ||||
| 
 | ||||
|             delta = log.date - d1 | ||||
|             self.assertEquals(delta.seconds, 16) | ||||
|             microseconds = 777216 - (i % 1000) | ||||
|             self.assertEquals(delta.microseconds, microseconds) | ||||
| 
 | ||||
|         LogEntry.drop_collection() | ||||
| 
 | ||||
|     def test_complexdatetime_storage(self): | ||||
| @@ -906,6 +937,61 @@ class FieldTest(unittest.TestCase): | ||||
| 
 | ||||
|         Extensible.drop_collection() | ||||
| 
 | ||||
|     def test_embedded_mapfield_db_field(self): | ||||
| 
 | ||||
|         class Embedded(EmbeddedDocument): | ||||
|             number = IntField(default=0, db_field='i') | ||||
| 
 | ||||
|         class Test(Document): | ||||
|             my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field='x') | ||||
| 
 | ||||
|         Test.drop_collection() | ||||
| 
 | ||||
|         test = Test() | ||||
|         test.my_map['DICTIONARY_KEY'] = Embedded(number=1) | ||||
|         test.save() | ||||
| 
 | ||||
|         Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1) | ||||
| 
 | ||||
|         test = Test.objects.get() | ||||
|         self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2) | ||||
|         doc = self.db.test.find_one() | ||||
|         self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2) | ||||
| 
 | ||||
|     def test_map_field_lookup(self): | ||||
|         """Ensure MapField lookups succeed on Fields without a lookup method""" | ||||
| 
 | ||||
|         class Log(Document): | ||||
|             name = StringField() | ||||
|             visited = MapField(DateTimeField()) | ||||
| 
 | ||||
|         Log.drop_collection() | ||||
|         Log(name="wilson", visited={'friends': datetime.datetime.now()}).save() | ||||
| 
 | ||||
|         self.assertEqual(1, Log.objects( | ||||
|                                 visited__friends__exists=True).count()) | ||||
| 
 | ||||
|     def test_embedded_db_field(self): | ||||
| 
 | ||||
|         class Embedded(EmbeddedDocument): | ||||
|             number = IntField(default=0, db_field='i') | ||||
| 
 | ||||
|         class Test(Document): | ||||
|             embedded = EmbeddedDocumentField(Embedded, db_field='x') | ||||
| 
 | ||||
|         Test.drop_collection() | ||||
| 
 | ||||
|         test = Test() | ||||
|         test.embedded = Embedded(number=1) | ||||
|         test.save() | ||||
| 
 | ||||
|         Test.objects.update_one(inc__embedded__number=1) | ||||
| 
 | ||||
|         test = Test.objects.get() | ||||
|         self.assertEqual(test.embedded.number, 2) | ||||
|         doc = self.db.test.find_one() | ||||
|         self.assertEqual(doc['x']['i'], 2) | ||||
| 
 | ||||
|     def test_embedded_document_validation(self): | ||||
|         """Ensure that invalid embedded documents cannot be assigned to | ||||
|         embedded document fields. | ||||
| @@ -1300,6 +1386,74 @@ class FieldTest(unittest.TestCase): | ||||
|         self.assertEquals(repr(Person.objects(city=None)), | ||||
|                             "[<Person: Person object>]") | ||||
| 
 | ||||
| 
 | ||||
|     def test_generic_reference_choices(self): | ||||
|         """Ensure that a GenericReferenceField can handle choices | ||||
|         """ | ||||
|         class Link(Document): | ||||
|             title = StringField() | ||||
| 
 | ||||
|         class Post(Document): | ||||
|             title = StringField() | ||||
| 
 | ||||
|         class Bookmark(Document): | ||||
|             bookmark_object = GenericReferenceField(choices=(Post,)) | ||||
| 
 | ||||
|         Link.drop_collection() | ||||
|         Post.drop_collection() | ||||
|         Bookmark.drop_collection() | ||||
| 
 | ||||
|         link_1 = Link(title="Pitchfork") | ||||
|         link_1.save() | ||||
| 
 | ||||
|         post_1 = Post(title="Behind the Scenes of the Pavement Reunion") | ||||
|         post_1.save() | ||||
| 
 | ||||
|         bm = Bookmark(bookmark_object=link_1) | ||||
|         self.assertRaises(ValidationError, bm.validate) | ||||
| 
 | ||||
|         bm = Bookmark(bookmark_object=post_1) | ||||
|         bm.save() | ||||
| 
 | ||||
|         bm = Bookmark.objects.first() | ||||
|         self.assertEqual(bm.bookmark_object, post_1) | ||||
| 
 | ||||
|     def test_generic_reference_list_choices(self): | ||||
|         """Ensure that a ListField properly dereferences generic references and | ||||
|         respects choices. | ||||
|         """ | ||||
|         class Link(Document): | ||||
|             title = StringField() | ||||
| 
 | ||||
|         class Post(Document): | ||||
|             title = StringField() | ||||
| 
 | ||||
|         class User(Document): | ||||
|             bookmarks = ListField(GenericReferenceField(choices=(Post,))) | ||||
| 
 | ||||
|         Link.drop_collection() | ||||
|         Post.drop_collection() | ||||
|         User.drop_collection() | ||||
| 
 | ||||
|         link_1 = Link(title="Pitchfork") | ||||
|         link_1.save() | ||||
| 
 | ||||
|         post_1 = Post(title="Behind the Scenes of the Pavement Reunion") | ||||
|         post_1.save() | ||||
| 
 | ||||
|         user = User(bookmarks=[link_1]) | ||||
|         self.assertRaises(ValidationError, user.validate) | ||||
| 
 | ||||
|         user = User(bookmarks=[post_1]) | ||||
|         user.save() | ||||
| 
 | ||||
|         user = User.objects.first() | ||||
|         self.assertEqual(user.bookmarks, [post_1]) | ||||
| 
 | ||||
|         Link.drop_collection() | ||||
|         Post.drop_collection() | ||||
|         User.drop_collection() | ||||
| 
 | ||||
|     def test_binary_fields(self): | ||||
|         """Ensure that binary fields can be stored and retrieved. | ||||
|         """ | ||||
| @@ -1317,7 +1471,7 @@ class FieldTest(unittest.TestCase): | ||||
| 
 | ||||
|         attachment_1 = Attachment.objects().first() | ||||
|         self.assertEqual(MIME_TYPE, attachment_1.content_type) | ||||
|         self.assertEqual(BLOB, attachment_1.blob) | ||||
|         self.assertEqual(BLOB, str(attachment_1.blob)) | ||||
| 
 | ||||
|         Attachment.drop_collection() | ||||
| 
 | ||||
| @@ -1344,7 +1498,7 @@ class FieldTest(unittest.TestCase): | ||||
| 
 | ||||
|         attachment_required = AttachmentRequired() | ||||
|         self.assertRaises(ValidationError, attachment_required.validate) | ||||
|         attachment_required.blob = '\xe6\x00\xc4\xff\x07' | ||||
|         attachment_required.blob = Binary('\xe6\x00\xc4\xff\x07') | ||||
|         attachment_required.validate() | ||||
| 
 | ||||
|         attachment_size_limit = AttachmentSizeLimit(blob='\xe6\x00\xc4\xff\x07') | ||||
| @@ -1356,6 +1510,18 @@ class FieldTest(unittest.TestCase): | ||||
|         AttachmentRequired.drop_collection() | ||||
|         AttachmentSizeLimit.drop_collection() | ||||
| 
 | ||||
|     def test_binary_field_primary(self): | ||||
| 
 | ||||
|         class Attachment(Document): | ||||
|             id = BinaryField(primary_key=True) | ||||
| 
 | ||||
|         Attachment.drop_collection() | ||||
| 
 | ||||
|         att = Attachment(id=uuid.uuid4().bytes).save() | ||||
|         att.delete() | ||||
| 
 | ||||
|         self.assertEqual(0, Attachment.objects.count()) | ||||
| 
 | ||||
|     def test_choices_validation(self): | ||||
|         """Ensure that value is in a container of allowed values. | ||||
|         """ | ||||
| @@ -1456,13 +1622,13 @@ class FieldTest(unittest.TestCase): | ||||
|         """Ensure that file fields can be written to and their data retrieved | ||||
|         """ | ||||
|         class PutFile(Document): | ||||
|             file = FileField() | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         class StreamFile(Document): | ||||
|             file = FileField() | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         class SetFile(Document): | ||||
|             file = FileField() | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         text = 'Hello, World!' | ||||
|         more_text = 'Foo Bar' | ||||
| @@ -1473,53 +1639,68 @@ class FieldTest(unittest.TestCase): | ||||
|         SetFile.drop_collection() | ||||
| 
 | ||||
|         putfile = PutFile() | ||||
|         putfile.file.put(text, content_type=content_type) | ||||
|         putfile.the_file.put(text, content_type=content_type) | ||||
|         putfile.save() | ||||
|         putfile.validate() | ||||
|         result = PutFile.objects.first() | ||||
|         self.assertTrue(putfile == result) | ||||
|         self.assertEquals(result.file.read(), text) | ||||
|         self.assertEquals(result.file.content_type, content_type) | ||||
|         result.file.delete() # Remove file from GridFS | ||||
|         self.assertEquals(result.the_file.read(), text) | ||||
|         self.assertEquals(result.the_file.content_type, content_type) | ||||
|         result.the_file.delete() # Remove file from GridFS | ||||
|         PutFile.objects.delete() | ||||
| 
 | ||||
|         # Ensure file-like objects are stored | ||||
|         putfile = PutFile() | ||||
|         putstring = StringIO.StringIO() | ||||
|         putstring.write(text) | ||||
|         putstring.seek(0) | ||||
|         putfile.the_file.put(putstring, content_type=content_type) | ||||
|         putfile.save() | ||||
|         putfile.validate() | ||||
|         result = PutFile.objects.first() | ||||
|         self.assertTrue(putfile == result) | ||||
|         self.assertEquals(result.the_file.read(), text) | ||||
|         self.assertEquals(result.the_file.content_type, content_type) | ||||
|         result.the_file.delete() | ||||
| 
 | ||||
|         streamfile = StreamFile() | ||||
|         streamfile.file.new_file(content_type=content_type) | ||||
|         streamfile.file.write(text) | ||||
|         streamfile.file.write(more_text) | ||||
|         streamfile.file.close() | ||||
|         streamfile.the_file.new_file(content_type=content_type) | ||||
|         streamfile.the_file.write(text) | ||||
|         streamfile.the_file.write(more_text) | ||||
|         streamfile.the_file.close() | ||||
|         streamfile.save() | ||||
|         streamfile.validate() | ||||
|         result = StreamFile.objects.first() | ||||
|         self.assertTrue(streamfile == result) | ||||
|         self.assertEquals(result.file.read(), text + more_text) | ||||
|         self.assertEquals(result.file.content_type, content_type) | ||||
|         result.file.seek(0) | ||||
|         self.assertEquals(result.file.tell(), 0) | ||||
|         self.assertEquals(result.file.read(len(text)), text) | ||||
|         self.assertEquals(result.file.tell(), len(text)) | ||||
|         self.assertEquals(result.file.read(len(more_text)), more_text) | ||||
|         self.assertEquals(result.file.tell(), len(text + more_text)) | ||||
|         result.file.delete() | ||||
|         self.assertEquals(result.the_file.read(), text + more_text) | ||||
|         self.assertEquals(result.the_file.content_type, content_type) | ||||
|         result.the_file.seek(0) | ||||
|         self.assertEquals(result.the_file.tell(), 0) | ||||
|         self.assertEquals(result.the_file.read(len(text)), text) | ||||
|         self.assertEquals(result.the_file.tell(), len(text)) | ||||
|         self.assertEquals(result.the_file.read(len(more_text)), more_text) | ||||
|         self.assertEquals(result.the_file.tell(), len(text + more_text)) | ||||
|         result.the_file.delete() | ||||
| 
 | ||||
|         # Ensure deleted file returns None | ||||
|         self.assertTrue(result.file.read() == None) | ||||
|         self.assertTrue(result.the_file.read() == None) | ||||
| 
 | ||||
|         setfile = SetFile() | ||||
|         setfile.file = text | ||||
|         setfile.the_file = text | ||||
|         setfile.save() | ||||
|         setfile.validate() | ||||
|         result = SetFile.objects.first() | ||||
|         self.assertTrue(setfile == result) | ||||
|         self.assertEquals(result.file.read(), text) | ||||
|         self.assertEquals(result.the_file.read(), text) | ||||
| 
 | ||||
|         # Try replacing file with new one | ||||
|         result.file.replace(more_text) | ||||
|         result.the_file.replace(more_text) | ||||
|         result.save() | ||||
|         result.validate() | ||||
|         result = SetFile.objects.first() | ||||
|         self.assertTrue(setfile == result) | ||||
|         self.assertEquals(result.file.read(), more_text) | ||||
|         result.file.delete() | ||||
|         self.assertEquals(result.the_file.read(), more_text) | ||||
|         result.the_file.delete() | ||||
| 
 | ||||
|         PutFile.drop_collection() | ||||
|         StreamFile.drop_collection() | ||||
| @@ -1527,28 +1708,71 @@ class FieldTest(unittest.TestCase): | ||||
| 
 | ||||
|         # Make sure FileField is optional and not required | ||||
|         class DemoFile(Document): | ||||
|             file = FileField() | ||||
|             the_file = FileField() | ||||
|         DemoFile.objects.create() | ||||
| 
 | ||||
| 
 | ||||
|     def test_file_field_no_default(self): | ||||
| 
 | ||||
|         class GridDocument(Document): | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         GridDocument.drop_collection() | ||||
| 
 | ||||
|         with tempfile.TemporaryFile() as f: | ||||
|             f.write("Hello World!") | ||||
|             f.flush() | ||||
| 
 | ||||
|             # Test without default | ||||
|             doc_a = GridDocument() | ||||
|             doc_a.save() | ||||
| 
 | ||||
| 
 | ||||
|             doc_b = GridDocument.objects.with_id(doc_a.id) | ||||
|             doc_b.the_file.replace(f, filename='doc_b') | ||||
|             doc_b.save() | ||||
|             self.assertNotEquals(doc_b.the_file.grid_id, None) | ||||
| 
 | ||||
|             # Test it matches | ||||
|             doc_c = GridDocument.objects.with_id(doc_b.id) | ||||
|             self.assertEquals(doc_b.the_file.grid_id, doc_c.the_file.grid_id) | ||||
| 
 | ||||
|             # Test with default | ||||
|             doc_d = GridDocument(the_file='') | ||||
|             doc_d.save() | ||||
| 
 | ||||
|             doc_e = GridDocument.objects.with_id(doc_d.id) | ||||
|             self.assertEquals(doc_d.the_file.grid_id, doc_e.the_file.grid_id) | ||||
| 
 | ||||
|             doc_e.the_file.replace(f, filename='doc_e') | ||||
|             doc_e.save() | ||||
| 
 | ||||
|             doc_f = GridDocument.objects.with_id(doc_e.id) | ||||
|             self.assertEquals(doc_e.the_file.grid_id, doc_f.the_file.grid_id) | ||||
| 
 | ||||
|         db = GridDocument._get_db() | ||||
|         grid_fs = gridfs.GridFS(db) | ||||
|         self.assertEquals(['doc_b', 'doc_e'], grid_fs.list()) | ||||
| 
 | ||||
|     def test_file_uniqueness(self): | ||||
|         """Ensure that each instance of a FileField is unique | ||||
|         """ | ||||
|         class TestFile(Document): | ||||
|             name = StringField() | ||||
|             file = FileField() | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         # First instance | ||||
|         testfile = TestFile() | ||||
|         testfile.name = "Hello, World!" | ||||
|         testfile.file.put('Hello, World!') | ||||
|         testfile.save() | ||||
|         test_file = TestFile() | ||||
|         test_file.name = "Hello, World!" | ||||
|         test_file.the_file.put('Hello, World!') | ||||
|         test_file.save() | ||||
| 
 | ||||
|         # Second instance | ||||
|         testfiledupe = TestFile() | ||||
|         data = testfiledupe.file.read() # Should be None | ||||
|         test_file_dupe = TestFile() | ||||
|         data = test_file_dupe.the_file.read() # Should be None | ||||
| 
 | ||||
|         self.assertTrue(testfile.name != testfiledupe.name) | ||||
|         self.assertTrue(testfile.file.read() != data) | ||||
|         self.assertTrue(test_file.name != test_file_dupe.name) | ||||
|         self.assertTrue(test_file.the_file.read() != data) | ||||
| 
 | ||||
|         TestFile.drop_collection() | ||||
| 
 | ||||
| @@ -1556,17 +1780,25 @@ class FieldTest(unittest.TestCase): | ||||
|         """Ensure that a boolean test of a FileField indicates its presence | ||||
|         """ | ||||
|         class TestFile(Document): | ||||
|             file = FileField() | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         testfile = TestFile() | ||||
|         self.assertFalse(bool(testfile.file)) | ||||
|         testfile.file = 'Hello, World!' | ||||
|         testfile.file.content_type = 'text/plain' | ||||
|         testfile.save() | ||||
|         self.assertTrue(bool(testfile.file)) | ||||
|         test_file = TestFile() | ||||
|         self.assertFalse(bool(test_file.the_file)) | ||||
|         test_file.the_file = 'Hello, World!' | ||||
|         test_file.the_file.content_type = 'text/plain' | ||||
|         test_file.save() | ||||
|         self.assertTrue(bool(test_file.the_file)) | ||||
| 
 | ||||
|         TestFile.drop_collection() | ||||
| 
 | ||||
|     def test_file_cmp(self): | ||||
|         """Test comparing against other types""" | ||||
|         class TestFile(Document): | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         test_file = TestFile() | ||||
|         self.assertFalse(test_file.the_file in [{"test": 1}]) | ||||
| 
 | ||||
|     def test_image_field(self): | ||||
| 
 | ||||
|         class TestImage(Document): | ||||
| @@ -1630,30 +1862,30 @@ class FieldTest(unittest.TestCase): | ||||
| 
 | ||||
| 
 | ||||
|     def test_file_multidb(self): | ||||
|         register_connection('testfiles', 'testfiles') | ||||
|         register_connection('test_files', 'test_files') | ||||
|         class TestFile(Document): | ||||
|             name = StringField() | ||||
|             file = FileField(db_alias="testfiles", | ||||
|             the_file = FileField(db_alias="test_files", | ||||
|                                  collection_name="macumba") | ||||
| 
 | ||||
|         TestFile.drop_collection() | ||||
| 
 | ||||
|         # delete old filesystem | ||||
|         get_db("testfiles").macumba.files.drop() | ||||
|         get_db("testfiles").macumba.chunks.drop() | ||||
|         get_db("test_files").macumba.files.drop() | ||||
|         get_db("test_files").macumba.chunks.drop() | ||||
| 
 | ||||
|         # First instance | ||||
|         testfile = TestFile() | ||||
|         testfile.name = "Hello, World!" | ||||
|         testfile.file.put('Hello, World!', | ||||
|         test_file = TestFile() | ||||
|         test_file.name = "Hello, World!" | ||||
|         test_file.the_file.put('Hello, World!', | ||||
|                           name="hello.txt") | ||||
|         testfile.save() | ||||
|         test_file.save() | ||||
| 
 | ||||
|         data = get_db("testfiles").macumba.files.find_one() | ||||
|         data = get_db("test_files").macumba.files.find_one() | ||||
|         self.assertEquals(data.get('name'), 'hello.txt') | ||||
| 
 | ||||
|         testfile = TestFile.objects.first() | ||||
|         self.assertEquals(testfile.file.read(), | ||||
|         test_file = TestFile.objects.first() | ||||
|         self.assertEquals(test_file.the_file.read(), | ||||
|                           'Hello, World!') | ||||
| 
 | ||||
|     def test_geo_indexes(self): | ||||
| @@ -1828,6 +2060,8 @@ class FieldTest(unittest.TestCase): | ||||
|             name = StringField() | ||||
|             like = GenericEmbeddedDocumentField() | ||||
| 
 | ||||
|         Person.drop_collection() | ||||
| 
 | ||||
|         person = Person(name='Test User') | ||||
|         person.like = Car(name='Fiat') | ||||
|         person.save() | ||||
| @@ -1841,6 +2075,59 @@ class FieldTest(unittest.TestCase): | ||||
|         person = Person.objects.first() | ||||
|         self.assertTrue(isinstance(person.like, Dish)) | ||||
| 
 | ||||
|     def test_generic_embedded_document_choices(self): | ||||
|         """Ensure you can limit GenericEmbeddedDocument choices | ||||
|         """ | ||||
|         class Car(EmbeddedDocument): | ||||
|             name = StringField() | ||||
| 
 | ||||
|         class Dish(EmbeddedDocument): | ||||
|             food = StringField(required=True) | ||||
|             number = IntField() | ||||
| 
 | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             like = GenericEmbeddedDocumentField(choices=(Dish,)) | ||||
| 
 | ||||
|         Person.drop_collection() | ||||
| 
 | ||||
|         person = Person(name='Test User') | ||||
|         person.like = Car(name='Fiat') | ||||
|         self.assertRaises(ValidationError, person.validate) | ||||
| 
 | ||||
|         person.like = Dish(food="arroz", number=15) | ||||
|         person.save() | ||||
| 
 | ||||
|         person = Person.objects.first() | ||||
|         self.assertTrue(isinstance(person.like, Dish)) | ||||
| 
 | ||||
|     def test_generic_list_embedded_document_choices(self): | ||||
|         """Ensure you can limit GenericEmbeddedDocument choices inside a list | ||||
|         field | ||||
|         """ | ||||
|         class Car(EmbeddedDocument): | ||||
|             name = StringField() | ||||
| 
 | ||||
|         class Dish(EmbeddedDocument): | ||||
|             food = StringField(required=True) | ||||
|             number = IntField() | ||||
| 
 | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             likes = ListField(GenericEmbeddedDocumentField(choices=(Dish,))) | ||||
| 
 | ||||
|         Person.drop_collection() | ||||
| 
 | ||||
|         person = Person(name='Test User') | ||||
|         person.likes = [Car(name='Fiat')] | ||||
|         self.assertRaises(ValidationError, person.validate) | ||||
| 
 | ||||
|         person.likes = [Dish(food="arroz", number=15)] | ||||
|         person.save() | ||||
| 
 | ||||
|         person = Person.objects.first() | ||||
|         self.assertTrue(isinstance(person.likes[0], Dish)) | ||||
| 
 | ||||
|     def test_recursive_validation(self): | ||||
|         """Ensure that a validation result to_dict is available. | ||||
|         """ | ||||
| @@ -1880,49 +2167,11 @@ class FieldTest(unittest.TestCase): | ||||
|         self.assertTrue(1 in error_dict['comments']) | ||||
|         self.assertTrue('content' in error_dict['comments'][1]) | ||||
|         self.assertEquals(error_dict['comments'][1]['content'], | ||||
|                           u'Field is required ("content")') | ||||
|                           'Field is required') | ||||
| 
 | ||||
|         post.comments[1].content = 'here we go' | ||||
|         post.validate() | ||||
| 
 | ||||
| 
 | ||||
| class ValidatorErrorTest(unittest.TestCase): | ||||
| 
 | ||||
|     def test_to_dict(self): | ||||
|         """Ensure a ValidationError handles error to_dict correctly. | ||||
|         """ | ||||
|         error = ValidationError('root') | ||||
|         self.assertEquals(error.to_dict(), {}) | ||||
| 
 | ||||
|         # 1st level error schema | ||||
|         error.errors = {'1st': ValidationError('bad 1st'), } | ||||
|         self.assertTrue('1st' in error.to_dict()) | ||||
|         self.assertEquals(error.to_dict()['1st'], 'bad 1st') | ||||
| 
 | ||||
|         # 2nd level error schema | ||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ | ||||
|             '2nd': ValidationError('bad 2nd'), | ||||
|         })} | ||||
|         self.assertTrue('1st' in error.to_dict()) | ||||
|         self.assertTrue(isinstance(error.to_dict()['1st'], dict)) | ||||
|         self.assertTrue('2nd' in error.to_dict()['1st']) | ||||
|         self.assertEquals(error.to_dict()['1st']['2nd'], 'bad 2nd') | ||||
| 
 | ||||
|         # moar levels | ||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ | ||||
|             '2nd': ValidationError('bad 2nd', errors={ | ||||
|                 '3rd': ValidationError('bad 3rd', errors={ | ||||
|                     '4th': ValidationError('Inception'), | ||||
|                 }), | ||||
|             }), | ||||
|         })} | ||||
|         self.assertTrue('1st' in error.to_dict()) | ||||
|         self.assertTrue('2nd' in error.to_dict()['1st']) | ||||
|         self.assertTrue('3rd' in error.to_dict()['1st']['2nd']) | ||||
|         self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd']) | ||||
|         self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'], | ||||
|                           'Inception') | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -480,7 +480,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|         self.assertEqual(person.name, "User C") | ||||
| 
 | ||||
|     def test_bulk_insert(self): | ||||
|         """Ensure that query by array position works. | ||||
|         """Ensure that bulk insert works | ||||
|         """ | ||||
| 
 | ||||
|         class Comment(EmbeddedDocument): | ||||
| @@ -490,7 +490,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|             comments = ListField(EmbeddedDocumentField(Comment)) | ||||
| 
 | ||||
|         class Blog(Document): | ||||
|             title = StringField() | ||||
|             title = StringField(unique=True) | ||||
|             tags = ListField(StringField()) | ||||
|             posts = ListField(EmbeddedDocumentField(Post)) | ||||
| 
 | ||||
| @@ -563,6 +563,81 @@ class QuerySetTest(unittest.TestCase): | ||||
|         obj_id = Blog.objects.insert(blog1, load_bulk=False) | ||||
|         self.assertEquals(obj_id.__class__.__name__, 'ObjectId') | ||||
| 
 | ||||
|         Blog.drop_collection() | ||||
|         post3 = Post(comments=[comment1, comment1]) | ||||
|         blog1 = Blog(title="foo", posts=[post1, post2]) | ||||
|         blog2 = Blog(title="bar", posts=[post2, post3]) | ||||
|         blog3 = Blog(title="baz", posts=[post1, post2]) | ||||
|         Blog.objects.insert([blog1, blog2]) | ||||
| 
 | ||||
|         def throw_operation_error_not_unique(): | ||||
|             Blog.objects.insert([blog2, blog3], safe=True) | ||||
| 
 | ||||
|         self.assertRaises(OperationError, throw_operation_error_not_unique) | ||||
|         self.assertEqual(Blog.objects.count(), 2) | ||||
| 
 | ||||
|         Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True}) | ||||
|         self.assertEqual(Blog.objects.count(), 3) | ||||
| 
 | ||||
|     def test_get_changed_fields_query_count(self): | ||||
| 
 | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             owns = ListField(ReferenceField('Organization')) | ||||
|             projects = ListField(ReferenceField('Project')) | ||||
| 
 | ||||
|         class Organization(Document): | ||||
|             name = StringField() | ||||
|             owner = ReferenceField('Person') | ||||
|             employees = ListField(ReferenceField('Person')) | ||||
| 
 | ||||
|         class Project(Document): | ||||
|             name = StringField() | ||||
| 
 | ||||
|         Person.drop_collection() | ||||
|         Organization.drop_collection() | ||||
|         Project.drop_collection() | ||||
| 
 | ||||
|         r1 = Project(name="r1").save() | ||||
|         r2 = Project(name="r2").save() | ||||
|         r3 = Project(name="r3").save() | ||||
|         p1 = Person(name="p1", projects=[r1, r2]).save() | ||||
|         p2 = Person(name="p2", projects=[r2]).save() | ||||
|         o1 = Organization(name="o1", employees=[p1]).save() | ||||
| 
 | ||||
|         with query_counter() as q: | ||||
|             self.assertEqual(q, 0) | ||||
| 
 | ||||
|             fresh_o1 = Organization.objects.get(id=o1.id) | ||||
|             self.assertEqual(1, q) | ||||
|             fresh_o1._get_changed_fields() | ||||
|             self.assertEqual(1, q) | ||||
| 
 | ||||
|         with query_counter() as q: | ||||
|             self.assertEqual(q, 0) | ||||
| 
 | ||||
|             fresh_o1 = Organization.objects.get(id=o1.id) | ||||
|             fresh_o1.save() | ||||
| 
 | ||||
|             self.assertEquals(q, 2) | ||||
| 
 | ||||
|         with query_counter() as q: | ||||
|             self.assertEqual(q, 0) | ||||
| 
 | ||||
|             fresh_o1 = Organization.objects.get(id=o1.id) | ||||
|             fresh_o1.save(cascade=False) | ||||
| 
 | ||||
|             self.assertEquals(q, 2) | ||||
| 
 | ||||
|         with query_counter() as q: | ||||
|             self.assertEqual(q, 0) | ||||
| 
 | ||||
|             fresh_o1 = Organization.objects.get(id=o1.id) | ||||
|             fresh_o1.employees.append(p2) | ||||
|             fresh_o1.save(cascade=False) | ||||
| 
 | ||||
|             self.assertEquals(q, 3) | ||||
| 
 | ||||
|     def test_slave_okay(self): | ||||
|         """Ensures that a query can take slave_okay syntax | ||||
|         """ | ||||
| @@ -619,17 +694,38 @@ class QuerySetTest(unittest.TestCase): | ||||
|         self.assertEqual(people1, people2) | ||||
|         self.assertEqual(people1, people3) | ||||
| 
 | ||||
|     def test_repr_iteration(self): | ||||
|         """Ensure that QuerySet __repr__ can handle loops | ||||
|         """ | ||||
|         self.Person(name='Person 1').save() | ||||
|         self.Person(name='Person 2').save() | ||||
|     def test_repr(self): | ||||
|         """Test repr behavior isnt destructive""" | ||||
| 
 | ||||
|         queryset = self.Person.objects | ||||
|         self.assertEquals('[<Person: Person object>, <Person: Person object>]', repr(queryset)) | ||||
|         for person in queryset: | ||||
|             self.assertEquals('.. queryset mid-iteration ..', repr(queryset)) | ||||
|         class Doc(Document): | ||||
|             number = IntField() | ||||
| 
 | ||||
|             def __repr__(self): | ||||
|                return "<Doc: %s>" % self.number | ||||
| 
 | ||||
|         Doc.drop_collection() | ||||
| 
 | ||||
|         for i in xrange(1000): | ||||
|             Doc(number=i).save() | ||||
| 
 | ||||
|         docs = Doc.objects.order_by('number') | ||||
| 
 | ||||
|         self.assertEquals(docs.count(), 1000) | ||||
|         self.assertEquals(len(docs), 1000) | ||||
| 
 | ||||
|         docs_string = "%s" % docs | ||||
|         self.assertTrue("Doc: 0" in docs_string) | ||||
| 
 | ||||
|         self.assertEquals(docs.count(), 1000) | ||||
|         self.assertEquals(len(docs), 1000) | ||||
| 
 | ||||
|         # Limit and skip | ||||
|         self.assertEquals('[<Doc: 1>, <Doc: 2>, <Doc: 3>]', "%s" % docs[1:4]) | ||||
| 
 | ||||
|         self.assertEquals(docs.count(), 3) | ||||
|         self.assertEquals(len(docs), 3) | ||||
|         for doc in docs: | ||||
|             self.assertEqual('.. queryset mid-iteration ..', repr(docs)) | ||||
| 
 | ||||
|     def test_regex_query_shortcuts(self): | ||||
|         """Ensure that contains, startswith, endswith, etc work. | ||||
| @@ -1327,6 +1423,37 @@ class QuerySetTest(unittest.TestCase): | ||||
|         self.Person.objects(name='Test User').delete() | ||||
|         self.assertEqual(1, BlogPost.objects.count()) | ||||
| 
 | ||||
|     def test_reverse_delete_rule_cascade_self_referencing(self): | ||||
|         """Ensure self-referencing CASCADE deletes do not result in infinite loop | ||||
|         """ | ||||
|         class Category(Document): | ||||
|             name = StringField() | ||||
|             parent = ReferenceField('self', reverse_delete_rule=CASCADE) | ||||
| 
 | ||||
|         num_children = 3 | ||||
|         base = Category(name='Root') | ||||
|         base.save() | ||||
| 
 | ||||
|         # Create a simple parent-child tree | ||||
|         for i in range(num_children): | ||||
|             child_name = 'Child-%i' % i | ||||
|             child = Category(name=child_name, parent=base) | ||||
|             child.save() | ||||
| 
 | ||||
|             for i in range(num_children): | ||||
|                 child_child_name = 'Child-Child-%i' % i | ||||
|                 child_child = Category(name=child_child_name, parent=child) | ||||
|                 child_child.save() | ||||
| 
 | ||||
|         tree_size = 1 + num_children + (num_children * num_children) | ||||
|         self.assertEquals(tree_size, Category.objects.count()) | ||||
|         self.assertEquals(num_children, Category.objects(parent=base).count()) | ||||
| 
 | ||||
|         # The delete should effectively wipe out the Category collection | ||||
|         # without resulting in infinite parent-child cascade recursion | ||||
|         base.delete() | ||||
|         self.assertEquals(0, Category.objects.count()) | ||||
| 
 | ||||
|     def test_reverse_delete_rule_nullify(self): | ||||
|         """Ensure nullification of references to deleted documents. | ||||
|         """ | ||||
| @@ -1371,6 +1498,36 @@ class QuerySetTest(unittest.TestCase): | ||||
| 
 | ||||
|         self.assertRaises(OperationError, self.Person.objects.delete) | ||||
| 
 | ||||
|     def test_reverse_delete_rule_pull(self): | ||||
|         """Ensure pulling of references to deleted documents. | ||||
|         """ | ||||
|         class BlogPost(Document): | ||||
|             content = StringField() | ||||
|             authors = ListField(ReferenceField(self.Person, | ||||
|                 reverse_delete_rule=PULL)) | ||||
| 
 | ||||
|         BlogPost.drop_collection() | ||||
|         self.Person.drop_collection() | ||||
| 
 | ||||
|         me = self.Person(name='Test User') | ||||
|         me.save() | ||||
| 
 | ||||
|         someoneelse = self.Person(name='Some-one Else') | ||||
|         someoneelse.save() | ||||
| 
 | ||||
|         post = BlogPost(content='Watching TV', authors=[me, someoneelse]) | ||||
|         post.save() | ||||
| 
 | ||||
|         another = BlogPost(content='Chilling Out', authors=[someoneelse]) | ||||
|         another.save() | ||||
| 
 | ||||
|         someoneelse.delete() | ||||
|         post.reload() | ||||
|         another.reload() | ||||
| 
 | ||||
|         self.assertEqual(post.authors, [me]) | ||||
|         self.assertEqual(another.authors, []) | ||||
| 
 | ||||
|     def test_update(self): | ||||
|         """Ensure that atomic updates work properly. | ||||
|         """ | ||||
| @@ -1421,7 +1578,7 @@ class QuerySetTest(unittest.TestCase): | ||||
| 
 | ||||
|         BlogPost.drop_collection() | ||||
| 
 | ||||
|     def test_update_push_and_pull(self): | ||||
|     def test_update_push_and_pull_add_to_set(self): | ||||
|         """Ensure that the 'pull' update operation works correctly. | ||||
|         """ | ||||
|         class BlogPost(Document): | ||||
| @@ -1454,6 +1611,52 @@ class QuerySetTest(unittest.TestCase): | ||||
|         post.reload() | ||||
|         self.assertEqual(post.tags, ["code", "mongodb"]) | ||||
| 
 | ||||
|     def test_add_to_set_each(self): | ||||
|         class Item(Document): | ||||
|             name = StringField(required=True) | ||||
|             description = StringField(max_length=50) | ||||
|             parents = ListField(ReferenceField('self')) | ||||
| 
 | ||||
|         Item.drop_collection() | ||||
| 
 | ||||
|         item = Item(name='test item').save() | ||||
|         parent_1 = Item(name='parent 1').save() | ||||
|         parent_2 = Item(name='parent 2').save() | ||||
| 
 | ||||
|         item.update(add_to_set__parents=[parent_1, parent_2, parent_1]) | ||||
|         item.reload() | ||||
| 
 | ||||
|         self.assertEqual([parent_1, parent_2], item.parents) | ||||
| 
 | ||||
|     def test_pull_nested(self): | ||||
| 
 | ||||
|         class User(Document): | ||||
|             name = StringField() | ||||
| 
 | ||||
|         class Collaborator(EmbeddedDocument): | ||||
|             user = StringField() | ||||
| 
 | ||||
|             def __unicode__(self): | ||||
|                 return '%s' % self.user | ||||
| 
 | ||||
|         class Site(Document): | ||||
|             name = StringField(max_length=75, unique=True, required=True) | ||||
|             collaborators = ListField(EmbeddedDocumentField(Collaborator)) | ||||
| 
 | ||||
| 
 | ||||
|         Site.drop_collection() | ||||
| 
 | ||||
|         c = Collaborator(user='Esteban') | ||||
|         s = Site(name="test", collaborators=[c]) | ||||
|         s.save() | ||||
| 
 | ||||
|         Site.objects(id=s.id).update_one(pull__collaborators__user='Esteban') | ||||
|         self.assertEqual(Site.objects.first().collaborators, []) | ||||
| 
 | ||||
|         def pull_all(): | ||||
|             Site.objects(id=s.id).update_one(pull_all__collaborators__user=['Ross']) | ||||
| 
 | ||||
|         self.assertRaises(InvalidQueryError, pull_all) | ||||
| 
 | ||||
|     def test_update_one_pop_generic_reference(self): | ||||
| 
 | ||||
| @@ -1849,9 +2052,9 @@ class QuerySetTest(unittest.TestCase): | ||||
| 
 | ||||
|         # Check item_frequencies works for non-list fields | ||||
|         def test_assertions(f): | ||||
|             self.assertEqual(set(['1', '2']), set(f.keys())) | ||||
|             self.assertEqual(f['1'], 1) | ||||
|             self.assertEqual(f['2'], 2) | ||||
|             self.assertEqual(set([1, 2]), set(f.keys())) | ||||
|             self.assertEqual(f[1], 1) | ||||
|             self.assertEqual(f[2], 2) | ||||
| 
 | ||||
|         exec_js = BlogPost.objects.item_frequencies('hits') | ||||
|         map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True) | ||||
| @@ -1951,7 +2154,6 @@ class QuerySetTest(unittest.TestCase): | ||||
|             data = EmbeddedDocumentField(Data, required=True) | ||||
|             extra = EmbeddedDocumentField(Extra) | ||||
| 
 | ||||
| 
 | ||||
|         Person.drop_collection() | ||||
| 
 | ||||
|         p = Person() | ||||
| @@ -1969,6 +2171,52 @@ class QuerySetTest(unittest.TestCase): | ||||
|         ot = Person.objects.item_frequencies('extra.tag', map_reduce=True) | ||||
|         self.assertEquals(ot, {None: 1.0, u'friend': 1.0}) | ||||
| 
 | ||||
|     def test_item_frequencies_with_0_values(self): | ||||
|         class Test(Document): | ||||
|             val = IntField() | ||||
| 
 | ||||
|         Test.drop_collection() | ||||
|         t = Test() | ||||
|         t.val = 0 | ||||
|         t.save() | ||||
| 
 | ||||
|         ot = Test.objects.item_frequencies('val', map_reduce=True) | ||||
|         self.assertEquals(ot, {0: 1}) | ||||
|         ot = Test.objects.item_frequencies('val', map_reduce=False) | ||||
|         self.assertEquals(ot, {0: 1}) | ||||
| 
 | ||||
|     def test_item_frequencies_with_False_values(self): | ||||
|         class Test(Document): | ||||
|             val = BooleanField() | ||||
| 
 | ||||
|         Test.drop_collection() | ||||
|         t = Test() | ||||
|         t.val = False | ||||
|         t.save() | ||||
| 
 | ||||
|         ot = Test.objects.item_frequencies('val', map_reduce=True) | ||||
|         self.assertEquals(ot, {False: 1}) | ||||
|         ot = Test.objects.item_frequencies('val', map_reduce=False) | ||||
|         self.assertEquals(ot, {False: 1}) | ||||
| 
 | ||||
|     def test_item_frequencies_normalize(self): | ||||
|         class Test(Document): | ||||
|             val = IntField() | ||||
| 
 | ||||
|         Test.drop_collection() | ||||
| 
 | ||||
|         for i in xrange(50): | ||||
|             Test(val=1).save() | ||||
| 
 | ||||
|         for i in xrange(20): | ||||
|             Test(val=2).save() | ||||
| 
 | ||||
|         freqs = Test.objects.item_frequencies('val', map_reduce=False, normalize=True) | ||||
|         self.assertEquals(freqs, {1: 50.0/70, 2: 20.0/70}) | ||||
| 
 | ||||
|         freqs = Test.objects.item_frequencies('val', map_reduce=True, normalize=True) | ||||
|         self.assertEquals(freqs, {1: 50.0/70, 2: 20.0/70}) | ||||
| 
 | ||||
|     def test_average(self): | ||||
|         """Ensure that field can be averaged correctly. | ||||
|         """ | ||||
| @@ -2038,28 +2286,29 @@ class QuerySetTest(unittest.TestCase): | ||||
|             date = DateTimeField(default=datetime.now) | ||||
| 
 | ||||
|             @queryset_manager | ||||
|             def objects(doc_cls, queryset): | ||||
|                 return queryset(deleted=False) | ||||
|             def objects(cls, qryset): | ||||
|                 opts = {"deleted": False} | ||||
|                 return qryset(**opts) | ||||
| 
 | ||||
|             @queryset_manager | ||||
|             def music_posts(doc_cls, queryset): | ||||
|                 return queryset(tags='music', deleted=False).order_by('-date') | ||||
|             def music_posts(doc_cls, queryset, deleted=False): | ||||
|                 return queryset(tags='music', | ||||
|                                 deleted=deleted).order_by('date') | ||||
| 
 | ||||
|         BlogPost.drop_collection() | ||||
| 
 | ||||
|         post1 = BlogPost(tags=['music', 'film']) | ||||
|         post1.save() | ||||
|         post2 = BlogPost(tags=['music']) | ||||
|         post2.save() | ||||
|         post3 = BlogPost(tags=['film', 'actors']) | ||||
|         post3.save() | ||||
|         post4 = BlogPost(tags=['film', 'actors'], deleted=True) | ||||
|         post4.save() | ||||
|         post1 = BlogPost(tags=['music', 'film']).save() | ||||
|         post2 = BlogPost(tags=['music']).save() | ||||
|         post3 = BlogPost(tags=['film', 'actors']).save() | ||||
|         post4 = BlogPost(tags=['film', 'actors', 'music'], deleted=True).save() | ||||
| 
 | ||||
|         self.assertEqual([p.id for p in BlogPost.objects], | ||||
|         self.assertEqual([p.id for p in BlogPost.objects()], | ||||
|                          [post1.id, post2.id, post3.id]) | ||||
|         self.assertEqual([p.id for p in BlogPost.music_posts], | ||||
|                          [post2.id, post1.id]) | ||||
|         self.assertEqual([p.id for p in BlogPost.music_posts()], | ||||
|                          [post1.id, post2.id]) | ||||
| 
 | ||||
|         self.assertEqual([p.id for p in BlogPost.music_posts(True)], | ||||
|                          [post4.id]) | ||||
| 
 | ||||
|         BlogPost.drop_collection() | ||||
| 
 | ||||
| @@ -2899,6 +3148,19 @@ class QuerySetTest(unittest.TestCase): | ||||
|         self.assertEqual(plist[1], (20, False)) | ||||
|         self.assertEqual(plist[2], (30, True)) | ||||
| 
 | ||||
|     def test_scalar_primary_key(self): | ||||
| 
 | ||||
|         class SettingValue(Document): | ||||
|             key = StringField(primary_key=True) | ||||
|             value = StringField() | ||||
| 
 | ||||
|         SettingValue.drop_collection() | ||||
|         s = SettingValue(key="test", value="test value") | ||||
|         s.save() | ||||
| 
 | ||||
|         val = SettingValue.objects.scalar('key', 'value') | ||||
|         self.assertEqual(list(val), [('test', 'test value')]) | ||||
| 
 | ||||
|     def test_scalar_cursor_behaviour(self): | ||||
|         """Ensure that a query returns a valid set of results. | ||||
|         """ | ||||
							
								
								
									
										32
									
								
								tests/test_replicaset_connection.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								tests/test_replicaset_connection.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| import unittest | ||||
| import pymongo | ||||
| from pymongo import ReadPreference, ReplicaSetConnection | ||||
|  | ||||
| import mongoengine | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db, get_connection, ConnectionError | ||||
|  | ||||
|  | ||||
| class ConnectionTest(unittest.TestCase): | ||||
|  | ||||
|     def tearDown(self): | ||||
|         mongoengine.connection._connection_settings = {} | ||||
|         mongoengine.connection._connections = {} | ||||
|         mongoengine.connection._dbs = {} | ||||
|  | ||||
|     def test_replicaset_uri_passes_read_preference(self): | ||||
|         """Requires a replica set called "rs" on port 27017 | ||||
|         """ | ||||
|  | ||||
|         try: | ||||
|             conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY) | ||||
|         except ConnectionError, e: | ||||
|             return | ||||
|  | ||||
|         if not isinstance(conn, ReplicaSetConnection): | ||||
|             return | ||||
|  | ||||
|         self.assertEquals(conn.read_preference, ReadPreference.SECONDARY_ONLY) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
		Reference in New Issue
	
	Block a user