Compare commits

..

66 Commits

Author SHA1 Message Date
Ross Lawley
9cc6164026 Version bump 2012-08-07 10:05:01 +01:00
Ross Lawley
475488b9f2 Added support for distinct and db_alias (MongoEngine/mongoengine#59) 2012-08-07 10:04:05 +01:00
Ross Lawley
95b1783834 Updated changelog 2012-08-07 09:31:51 +01:00
Anthony Nemitz
12c8b5c0b9 Make chained querysets work if constraining the same fields.
Refs hmarr/mongoengine#554
2012-08-07 08:59:56 +01:00
Ross Lawley
f99b7a811b Fixed error in Binary Field 2012-08-07 08:53:58 +01:00
Ross Lawley
2801b38c75 Version Bump 2012-08-03 14:36:31 +01:00
Ross Lawley
dc3fea875e Merge branch 'master' of github.com:MongoEngine/mongoengine 2012-08-03 12:56:27 +01:00
Ross Lawley
aab8c2b687 Merge pull request #57 from filipd/patch-1
Added reference to the official repository in the README
2012-08-03 04:56:55 -07:00
Filip Dupanović
3577773af3 Added reference to the official repository 2012-08-03 14:55:18 +03:00
Ross Lawley
8ac9e6dc19 Updated the documents 2012-08-02 14:11:02 +01:00
Ross Lawley
4b3cea9e78 Added Binary support to UUID (MongoEngine/mongoengine#47) 2012-08-01 16:03:33 +01:00
Ross Lawley
2420b5e937 Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46) 2012-08-01 15:14:56 +01:00
Ross Lawley
f23a976bea Added Tommi Komulainen to the contributors list
refs MongoEngine/mongoengine#48
2012-08-01 15:01:21 +01:00
Ross Lawley
4226cd08f1 Updated Changelog 2012-08-01 15:00:14 +01:00
Ross Lawley
7a230f1693 Merge branch 'master' of github.com:MongoEngine/mongoengine 2012-08-01 14:58:06 +01:00
Ross Lawley
a43d0d4612 Fixed BinaryField python value issue (MongoEngine/mongoengine#48) 2012-08-01 14:57:46 +01:00
Ross Lawley
78a40a0c70 Merge pull request #41 from wpjunior/patch-13
Small fix in SequenceField
2012-08-01 06:26:58 -07:00
Ross Lawley
2c69d8f0b0 Updated License 2012-08-01 13:54:24 +01:00
Ross Lawley
0018c38b83 Fixed queryset manager issue (MongoEngine/mongoengine#52) 2012-08-01 13:51:51 +01:00
Ross Lawley
8df81571fc Fixed FileField comparision
Refs hmarr/mongoengine#547
2012-08-01 13:28:28 +01:00
Ross Lawley
48f988acd7 Merge pull request #44 from faulkner/fix-notes
Proper syntax for RST notes (so they actually render).
2012-07-26 08:17:45 -07:00
Ross Lawley
6526923345 Fixed recursion loading bug in _get_changed_fields
fixes hmarr/mongoengine#548
2012-07-26 16:00:32 +01:00
Ross Lawley
24fd1acce6 Version bump 2012-07-26 14:14:10 +01:00
Ross Lawley
cbb9235dc5 Merge branch 'master' of github.com:hmarr/mongoengine 2012-07-25 15:12:34 +01:00
Ross Lawley
19ec2c9bc9 Merge pull request #545 from maxcountryman/patch-1
Correcting typo in DynamicField docstring
2012-07-25 07:12:07 -07:00
Ross Lawley
6459d4c0b6 Fixed issue with custom queryset manager expecting explict variable names
If using / expecting kwargs you have to call the queryset manager
explicitly.
2012-07-25 14:55:10 +01:00
Chris Faulkner
1304f2721f Proper syntax for RST notes (so they actually render). 2012-07-24 14:06:43 -07:00
Wilson Júnior
8bde0c0e53 Small fix in SequenceField 2012-07-23 12:31:47 -03:00
Ross Lawley
598ffd3e5c Fixed documentation 2012-07-23 15:32:02 +01:00
Max Countryman
601f0eb168 Correcting typo in DynamicField docstring 2012-07-20 19:12:43 -07:00
Ross Lawley
3070e0bf5d Fix for inheritance bug and db_alias 2012-07-20 10:34:08 +01:00
Ross Lawley
83c11a9834 Version bump 2012-07-19 16:12:21 +01:00
Ross Lawley
5c912b930e Removed tests testing MongoDB not mongoengine 2012-07-19 16:03:29 +01:00
Ross Lawley
1b17fb0ae7 Updated validation error messages
refs hmarr/mongoengine#539
2012-07-19 15:04:12 +01:00
Ross Lawley
d83e67c121 Added support for null / zero / false values in item_frequencies
refs /MongoEngine/mongoengine#40
2012-07-19 12:08:07 +01:00
Ross Lawley
ae39ed94c9 Fixed cascade save edge case
refs MongoEngine/mongoengine#40
2012-07-19 11:52:26 +01:00
Ross Lawley
1e51180d42 Fixed geo index creation bug
fixes MongoEngine/mongoengine#36
2012-07-19 11:39:52 +01:00
Ross Lawley
87ba69d02e Updated changelog 2012-07-19 10:35:37 +01:00
Ross Lawley
8879d5560b Added support for args / kwargs and queryset_manager
Closes MongoEngine/mongoengine#37
2012-07-19 10:32:33 +01:00
Ross Lawley
c1621ee39c Merge pull request #39 from wpjunior/tests2
More one test
2012-07-18 06:10:26 -07:00
Ross Lawley
b0aa98edb4 Deref list custom id fix 2012-07-18 14:09:24 +01:00
Wilson Júnior
a7a2fe0216 added more tests 2012-07-18 06:37:23 -03:00
Ross Lawley
8e50f5fa3c Version bump 2012-07-11 16:59:24 +01:00
Ross Lawley
31793520bf Updated changelog / AUTHORS
refs hmarr/mongoengine#529
2012-07-11 16:38:15 +01:00
Ross Lawley
0b6b0368c5 Merge branch 'master' of https://github.com/elasticsales/mongoengine 2012-07-11 16:36:35 +01:00
Ross Lawley
d1d30a9280 Added test and updated changelog
refs hmarr/mongoengine#527
2012-07-11 16:34:28 +01:00
Ross Lawley
420c6f2d1e Merge branch 'patch-10' of https://github.com/wpjunior/mongoengine 2012-07-11 16:33:16 +01:00
Ross Lawley
34f06c4971 Updated changelog / AUTHORS
refs hmarr/mongoengine#524
2012-07-11 16:27:43 +01:00
Ross Lawley
9cc4bbd49d Merge branch 'patch-1' of https://github.com/daevaorn/mongoengine 2012-07-11 16:26:50 +01:00
Ross Lawley
f66b312869 Updated api docs
fixes hmarr/mongoengine#526
2012-07-11 16:25:40 +01:00
Ross Lawley
2405ba8708 Updated Changelog / AUTHORS
refs hmarr/mongoengine#531
2012-07-11 16:11:13 +01:00
Ross Lawley
a91b6bff8b Merge branch 'master' of https://github.com/agonzalezro/mongoengine 2012-07-11 16:09:33 +01:00
Ross Lawley
450dc11a68 Unicode fixes
refs hmarr/mongoengine#533 MongoEngine/mongoengine#32
2012-07-11 16:01:24 +01:00
Ross Lawley
1ce2f84ce5 Updated docs regarding fields
refs hmarr/mongoengine#535
2012-07-11 15:56:34 +01:00
Ross Lawley
f55b241cfa Trying to bump travis 2012-07-11 15:45:31 +01:00
Ross Lawley
34d08ce8ef Only dereference fields than need it
Fixes MongoEngine/mongoengine#31
2012-07-11 15:23:27 +01:00
Ross Lawley
4f5aa8c43b Fixed config / added test 2012-07-11 14:29:35 +01:00
Ross Lawley
27b375060d Updated changelog / AUTHORS
refs MongoEngine/mongoengine#32
2012-07-11 14:25:38 +01:00
Ross Lawley
cbfdc401f7 Merge pull request #32 from jaimeirurzun/master
Fix _transform_update to accept unicode fields

Thanks jaimeirurzun
2012-07-11 06:24:29 -07:00
Ross Lawley
b58bf3e0ce Added support for addToSet and each
fixes MongoEngine/mongoengine#33
2012-07-11 14:22:50 +01:00
Jaime Irurzun
1fff7e9aca Fix _transform_update to accept unicode fields 2012-07-10 10:40:14 +01:00
Álex González
494b981b13 Default value for direction 2012-07-02 10:05:25 +02:00
Álex González
dd93995bd0 Forced cast to list 2012-07-02 10:01:22 +02:00
Thomas Steinacher
b3bb4add9c Fix error dict with nested validation. 2012-06-27 13:46:06 -07:00
Wilson Júnior
d305e71c27 Fixes for __ne operator in IntField and FloatField 2012-06-25 15:53:42 -03:00
Alexander Koshelev
0d92baa670 Exclude tests from installation 2012-06-24 03:08:49 +04:00
20 changed files with 796 additions and 277 deletions

View File

@@ -109,4 +109,10 @@ that much better:
* Meir Kriheli * Meir Kriheli
* Andrey Fedoseev * Andrey Fedoseev
* aparajita * aparajita
* Tristan Escalada * Tristan Escalada
* Alexander Koshelev
* Jaime Irurzun
* Alexandre González
* Thomas Steinacher
* Tommi Komulainen
* Peter Landry

View File

@@ -1,5 +1,5 @@
Copyright (c) 2009-2010 Harry Marr Copyright (c) 2009-2012 See AUTHORS
Permission is hereby granted, free of charge, to any person Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without files (the "Software"), to deal in the Software without
@@ -8,10 +8,10 @@ copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following Software is furnished to do so, subject to the following
conditions: conditions:
The above copyright notice and this permission notice shall be The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software. included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND

View File

@@ -2,6 +2,7 @@
MongoEngine MongoEngine
=========== ===========
:Info: MongoEngine is an ORM-like layer on top of PyMongo. :Info: MongoEngine is an ORM-like layer on top of PyMongo.
:Repository: https://github.com/MongoEngine/mongoengine
:Author: Harry Marr (http://github.com/hmarr) :Author: Harry Marr (http://github.com/hmarr)
:Maintainer: Ross Lawley (http://github.com/rozza) :Maintainer: Ross Lawley (http://github.com/rozza)

View File

@@ -47,25 +47,28 @@ Querying
Fields Fields
====== ======
.. autoclass:: mongoengine.StringField .. autoclass:: mongoengine.BinaryField
.. autoclass:: mongoengine.URLField .. autoclass:: mongoengine.BooleanField
.. autoclass:: mongoengine.EmailField
.. autoclass:: mongoengine.IntField
.. autoclass:: mongoengine.FloatField
.. autoclass:: mongoengine.DecimalField
.. autoclass:: mongoengine.DateTimeField
.. autoclass:: mongoengine.ComplexDateTimeField .. autoclass:: mongoengine.ComplexDateTimeField
.. autoclass:: mongoengine.ListField .. autoclass:: mongoengine.DateTimeField
.. autoclass:: mongoengine.SortedListField .. autoclass:: mongoengine.DecimalField
.. autoclass:: mongoengine.DictField .. autoclass:: mongoengine.DictField
.. autoclass:: mongoengine.DynamicField
.. autoclass:: mongoengine.EmailField
.. autoclass:: mongoengine.EmbeddedDocumentField
.. autoclass:: mongoengine.FileField
.. autoclass:: mongoengine.FloatField
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
.. autoclass:: mongoengine.GenericReferenceField
.. autoclass:: mongoengine.GeoPointField
.. autoclass:: mongoengine.ImageField
.. autoclass:: mongoengine.IntField
.. autoclass:: mongoengine.ListField
.. autoclass:: mongoengine.MapField .. autoclass:: mongoengine.MapField
.. autoclass:: mongoengine.ObjectIdField .. autoclass:: mongoengine.ObjectIdField
.. autoclass:: mongoengine.ReferenceField .. autoclass:: mongoengine.ReferenceField
.. autoclass:: mongoengine.GenericReferenceField
.. autoclass:: mongoengine.EmbeddedDocumentField
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
.. autoclass:: mongoengine.BooleanField
.. autoclass:: mongoengine.FileField
.. autoclass:: mongoengine.BinaryField
.. autoclass:: mongoengine.GeoPointField
.. autoclass:: mongoengine.SequenceField .. autoclass:: mongoengine.SequenceField
.. autoclass:: mongoengine.SortedListField
.. autoclass:: mongoengine.StringField
.. autoclass:: mongoengine.URLField
.. autoclass:: mongoengine.UUIDField

View File

@@ -2,8 +2,55 @@
Changelog Changelog
========= =========
Changes in 0.6.20
=================
- Added support for distinct and db_alias (MongoEngine/mongoengine#59)
- Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554)
- Fixed BinaryField lookup re (MongoEngine/mongoengine#48)
Changes in 0.6.19
=================
- Added Binary support to UUID (MongoEngine/mongoengine#47)
- Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46)
- Fixed BinaryField python value issue (MongoEngine/mongoengine#48)
- Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41)
- Fixed queryset manager issue (MongoEngine/mongoengine#52)
- Fixed FileField comparision (hmarr/mongoengine#547)
Changes in 0.6.18
=================
- Fixed recursion loading bug in _get_changed_fields
Changes in 0.6.17
=================
- Fixed issue with custom queryset manager expecting explict variable names
Changes in 0.6.16
=================
- Fixed issue where db_alias wasn't inherited
Changes in 0.6.15
=================
- Updated validation error messages
- Added support for null / zero / false values in item_frequencies
- Fixed cascade save edge case
- Fixed geo index creation through reference fields
- Added support for args / kwargs when using @queryset_manager
- Deref list custom id fix
Changes in 0.6.14
=================
- Fixed error dict with nested validation
- Fixed Int/Float fields and not equals None
- Exclude tests from installation
- Allow tuples for index meta
- Fixed use of str in instance checks
- Fixed unicode support in transform update
- Added support for add_to_set and each
Changes in 0.6.13 Changes in 0.6.13
================ =================
- Fixed EmbeddedDocument db_field validation issue - Fixed EmbeddedDocument db_field validation issue
- Fixed StringField unicode issue - Fixed StringField unicode issue
- Fixes __repr__ modifying the cursor - Fixes __repr__ modifying the cursor

View File

@@ -62,28 +62,31 @@ not provided. Default values may optionally be a callable, which will be called
to retrieve the value (such as in the above example). The field types available to retrieve the value (such as in the above example). The field types available
are as follows: are as follows:
* :class:`~mongoengine.StringField` * :class:`~mongoengine.BinaryField`
* :class:`~mongoengine.URLField` * :class:`~mongoengine.BooleanField`
* :class:`~mongoengine.EmailField`
* :class:`~mongoengine.IntField`
* :class:`~mongoengine.FloatField`
* :class:`~mongoengine.DecimalField`
* :class:`~mongoengine.DateTimeField`
* :class:`~mongoengine.ComplexDateTimeField` * :class:`~mongoengine.ComplexDateTimeField`
* :class:`~mongoengine.ListField` * :class:`~mongoengine.DateTimeField`
* :class:`~mongoengine.SortedListField` * :class:`~mongoengine.DecimalField`
* :class:`~mongoengine.DictField` * :class:`~mongoengine.DictField`
* :class:`~mongoengine.DynamicField`
* :class:`~mongoengine.EmailField`
* :class:`~mongoengine.EmbeddedDocumentField`
* :class:`~mongoengine.FileField`
* :class:`~mongoengine.FloatField`
* :class:`~mongoengine.GenericEmbeddedDocumentField`
* :class:`~mongoengine.GenericReferenceField`
* :class:`~mongoengine.GeoPointField`
* :class:`~mongoengine.ImageField`
* :class:`~mongoengine.IntField`
* :class:`~mongoengine.ListField`
* :class:`~mongoengine.MapField` * :class:`~mongoengine.MapField`
* :class:`~mongoengine.ObjectIdField` * :class:`~mongoengine.ObjectIdField`
* :class:`~mongoengine.ReferenceField` * :class:`~mongoengine.ReferenceField`
* :class:`~mongoengine.GenericReferenceField`
* :class:`~mongoengine.EmbeddedDocumentField`
* :class:`~mongoengine.GenericEmbeddedDocumentField`
* :class:`~mongoengine.BooleanField`
* :class:`~mongoengine.FileField`
* :class:`~mongoengine.BinaryField`
* :class:`~mongoengine.GeoPointField`
* :class:`~mongoengine.SequenceField` * :class:`~mongoengine.SequenceField`
* :class:`~mongoengine.SortedListField`
* :class:`~mongoengine.StringField`
* :class:`~mongoengine.URLField`
* :class:`~mongoengine.UUIDField`
Field arguments Field arguments
--------------- ---------------
@@ -256,6 +259,35 @@ as the constructor's argument::
content = StringField() content = StringField()
.. _one-to-many-with-listfields:
One to Many with ListFields
'''''''''''''''''''''''''''
If you are implementing a one to many relationship via a list of references,
then the references are stored as DBRefs and to query you need to pass an
instance of the object to the query::
class User(Document):
name = StringField()
class Page(Document):
content = StringField()
authors = ListField(ReferenceField(User))
bob = User(name="Bob Jones").save()
john = User(name="John Smith").save()
Page(content="Test Page", authors=[bob, john]).save()
Page(content="Another Page", authors=[john]).save()
# Find all pages Bob authored
Page.objects(authors__in=[bob])
# Find all pages that both Bob and John have authored
Page.objects(authors__all=[bob, john])
Dealing with deletion of referred documents Dealing with deletion of referred documents
''''''''''''''''''''''''''''''''''''''''''' '''''''''''''''''''''''''''''''''''''''''''
By default, MongoDB doesn't check the integrity of your data, so deleting By default, MongoDB doesn't check the integrity of your data, so deleting

View File

@@ -232,7 +232,7 @@ custom manager methods as you like::
BlogPost(title='test1', published=False).save() BlogPost(title='test1', published=False).save()
BlogPost(title='test2', published=True).save() BlogPost(title='test2', published=True).save()
assert len(BlogPost.objects) == 2 assert len(BlogPost.objects) == 2
assert len(BlogPost.live_posts) == 1 assert len(BlogPost.live_posts()) == 1
Custom QuerySets Custom QuerySets
================ ================
@@ -243,11 +243,16 @@ a document, set ``queryset_class`` to the custom class in a
:class:`~mongoengine.Document`\ s ``meta`` dictionary:: :class:`~mongoengine.Document`\ s ``meta`` dictionary::
class AwesomerQuerySet(QuerySet): class AwesomerQuerySet(QuerySet):
pass
def get_awesome(self):
return self.filter(awesome=True)
class Page(Document): class Page(Document):
meta = {'queryset_class': AwesomerQuerySet} meta = {'queryset_class': AwesomerQuerySet}
# To call:
Page.objects.get_awesome()
.. versionadded:: 0.4 .. versionadded:: 0.4
Aggregation Aggregation

View File

@@ -12,7 +12,7 @@ from signals import *
__all__ = (document.__all__ + fields.__all__ + connection.__all__ + __all__ = (document.__all__ + fields.__all__ + connection.__all__ +
queryset.__all__ + signals.__all__) queryset.__all__ + signals.__all__)
VERSION = (0, 6, 13) VERSION = (0, 6, 20)
def get_version(): def get_version():

View File

@@ -1,4 +1,5 @@
import warnings import warnings
from collections import defaultdict
from queryset import QuerySet, QuerySetManager from queryset import QuerySet, QuerySetManager
from queryset import DoesNotExist, MultipleObjectsReturned from queryset import DoesNotExist, MultipleObjectsReturned
@@ -53,9 +54,9 @@ class ValidationError(AssertionError):
message = super(ValidationError, self).__getattribute__(name) message = super(ValidationError, self).__getattribute__(name)
if name == 'message': if name == 'message':
if self.field_name: if self.field_name:
message = '%s ("%s")' % (message, self.field_name) message = '%s' % message
if self.errors: if self.errors:
message = '%s:\n%s' % (message, self._format_errors()) message = '%s(%s)' % (message, self._format_errors())
return message return message
def _get_message(self): def _get_message(self):
@@ -93,17 +94,20 @@ class ValidationError(AssertionError):
def _format_errors(self): def _format_errors(self):
"""Returns a string listing all errors within a document""" """Returns a string listing all errors within a document"""
def format_error(field, value, prefix=''): def generate_key(value, prefix=''):
prefix = "%s.%s" % (prefix, field) if prefix else "%s" % field if isinstance(value, list):
value = ' '.join([generate_key(k) for k in value])
if isinstance(value, dict): if isinstance(value, dict):
value = ' '.join(
[generate_key(v, k) for k, v in value.iteritems()])
return '\n'.join( results = "%s.%s" % (prefix, value) if prefix else value
[format_error(k, value[k], prefix) for k in value]) return results
else:
return "%s: %s" % (prefix, value)
return '\n'.join( error_dict = defaultdict(list)
[format_error(k, v) for k, v in self.to_dict().items()]) for k, v in self.to_dict().iteritems():
error_dict[generate_key(v)].append(k)
return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()])
_document_registry = {} _document_registry = {}
@@ -267,8 +271,10 @@ class ComplexBaseField(BaseField):
if instance is None: if instance is None:
# Document class being used rather than a document object # Document class being used rather than a document object
return self return self
from fields import GenericReferenceField, ReferenceField
if not self._dereference and instance._initialised: dereference = self.field is None or isinstance(self.field,
(GenericReferenceField, ReferenceField))
if not self._dereference and instance._initialised and dereference:
from dereference import DeReference from dereference import DeReference
self._dereference = DeReference() # Cached self._dereference = DeReference() # Cached
instance._data[self.name] = self._dereference( instance._data[self.name] = self._dereference(
@@ -403,11 +409,11 @@ class ComplexBaseField(BaseField):
for k, v in sequence: for k, v in sequence:
try: try:
self.field._validate(v) self.field._validate(v)
except (ValidationError, AssertionError), error: except ValidationError, error:
if hasattr(error, 'errors'): errors[k] = error.errors or error
errors[k] = error.errors except (ValueError, AssertionError), error:
else: errors[k] = error
errors[k] = error
if errors: if errors:
field_class = self.field.__class__.__name__ field_class = self.field.__class__.__name__
self.error('Invalid %s item (%s)' % (field_class, value), self.error('Invalid %s item (%s)' % (field_class, value),
@@ -643,8 +649,13 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
del(attrs['meta']['collection']) del(attrs['meta']['collection'])
if base._get_collection_name(): if base._get_collection_name():
collection = base._get_collection_name() collection = base._get_collection_name()
# Propagate index options.
for key in ('index_background', 'index_drop_dups', 'index_opts'): # Propagate inherited values
keys_to_propogate = (
'index_background', 'index_drop_dups', 'index_opts',
'allow_inheritance', 'queryset_class', 'db_alias',
)
for key in keys_to_propogate:
if key in base._meta: if key in base._meta:
base_meta[key] = base._meta[key] base_meta[key] = base._meta[key]
@@ -653,11 +664,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
abstract_base_indexes += base._meta.get('indexes', []) abstract_base_indexes += base._meta.get('indexes', [])
else: else:
base_indexes += base._meta.get('indexes', []) base_indexes += base._meta.get('indexes', [])
# Propagate 'allow_inheritance'
if 'allow_inheritance' in base._meta:
base_meta['allow_inheritance'] = base._meta['allow_inheritance']
if 'queryset_class' in base._meta:
base_meta['queryset_class'] = base._meta['queryset_class']
try: try:
base_meta['objects'] = base.__getattribute__(base, 'objects') base_meta['objects'] = base.__getattribute__(base, 'objects')
except TypeError: except TypeError:
@@ -665,6 +671,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
except AttributeError: except AttributeError:
pass pass
# defaults
meta = { meta = {
'abstract': False, 'abstract': False,
'collection': collection, 'collection': collection,
@@ -704,7 +711,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
meta['queryset_class'] = manager.queryset_class meta['queryset_class'] = manager.queryset_class
new_class.objects = manager new_class.objects = manager
indicies = meta['indexes'] + abstract_base_indexes indicies = list(meta['indexes']) + abstract_base_indexes
user_indexes = [QuerySet._build_index_spec(new_class, spec) user_indexes = [QuerySet._build_index_spec(new_class, spec)
for spec in indicies] + base_indexes for spec in indicies] + base_indexes
new_class._meta['indexes'] = user_indexes new_class._meta['indexes'] = user_indexes
@@ -897,8 +904,7 @@ class BaseDocument(object):
errors[field.name] = ValidationError('Field is required', errors[field.name] = ValidationError('Field is required',
field_name=field.name) field_name=field.name)
if errors: if errors:
raise ValidationError('Errors encountered validating document', raise ValidationError('ValidationError', errors=errors)
errors=errors)
def to_mongo(self): def to_mongo(self):
"""Return data dictionary ready for use with MongoDB. """Return data dictionary ready for use with MongoDB.
@@ -1006,9 +1012,10 @@ Invalid data to create a `%s` instance.\n%s""".strip() % (cls._class_name, error
field_list.update(self._dynamic_fields) field_list.update(self._dynamic_fields)
for field_name in field_list: for field_name in field_list:
db_field_name = self._db_field_map.get(field_name, field_name) db_field_name = self._db_field_map.get(field_name, field_name)
key = '%s.' % db_field_name key = '%s.' % db_field_name
field = getattr(self, field_name, None) field = self._data.get(field_name, None)
if hasattr(field, 'id'): if hasattr(field, 'id'):
if field.id in inspected: if field.id in inspected:
continue continue
@@ -1111,7 +1118,11 @@ Invalid data to create a `%s` instance.\n%s""".strip() % (cls._class_name, error
inspected = inspected or [] inspected = inspected or []
geo_indices = [] geo_indices = []
inspected.append(cls) inspected.append(cls)
from fields import EmbeddedDocumentField, GeoPointField
for field in cls._fields.values(): for field in cls._fields.values():
if not isinstance(field, (EmbeddedDocumentField, GeoPointField)):
continue
if hasattr(field, 'document_type'): if hasattr(field, 'document_type'):
field_cls = field.document_type field_cls = field.document_type
if field_cls in inspected: if field_cls in inspected:

View File

@@ -34,7 +34,9 @@ class DeReference(object):
doc_type = None doc_type = None
if instance and instance._fields: if instance and instance._fields:
doc_type = instance._fields[name].field doc_type = instance._fields[name]
if hasattr(doc_type, 'field'):
doc_type = doc_type.field
if isinstance(doc_type, ReferenceField): if isinstance(doc_type, ReferenceField):
doc_type = doc_type.document_type doc_type = doc_type.document_type
@@ -166,7 +168,7 @@ class DeReference(object):
else: else:
data[k] = v data[k] = v
if k in self.object_map: if k in self.object_map and not is_list:
data[k] = self.object_map[k] data[k] = self.object_map[k]
elif hasattr(v, '_fields'): elif hasattr(v, '_fields'):
for field_name, field in v._fields.iteritems(): for field_name, field in v._fields.iteritems():

View File

@@ -248,11 +248,16 @@ class Document(BaseDocument):
_refs = kwargs.get('_refs', []) or [] _refs = kwargs.get('_refs', []) or []
for name, cls in self._fields.items(): for name, cls in self._fields.items():
if not isinstance(cls, (ReferenceField, GenericReferenceField)): if not isinstance(cls, (ReferenceField, GenericReferenceField)):
continue continue
ref = getattr(self, name) ref = getattr(self, name)
if not ref: if not ref:
continue continue
if isinstance(ref, DBRef):
continue
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
if ref and ref_id not in _refs: if ref and ref_id not in _refs:
_refs.append(ref_id) _refs.append(ref_id)
@@ -370,7 +375,7 @@ class DynamicDocument(Document):
:class:`~mongoengine.DynamicField` and data can be attributed to that :class:`~mongoengine.DynamicField` and data can be attributed to that
field. field.
..note:: .. note::
There is one caveat on Dynamic Documents: fields cannot start with `_` There is one caveat on Dynamic Documents: fields cannot start with `_`
""" """

View File

@@ -4,9 +4,9 @@ import decimal
import gridfs import gridfs
import re import re
import uuid import uuid
import warnings
from bson import Binary, DBRef, SON, ObjectId from bson import Binary, DBRef, SON, ObjectId
from base import (BaseField, ComplexBaseField, ObjectIdField, from base import (BaseField, ComplexBaseField, ObjectIdField,
ValidationError, get_document, BaseDocument) ValidationError, get_document, BaseDocument)
from queryset import DO_NOTHING, QuerySet from queryset import DO_NOTHING, QuerySet
@@ -167,6 +167,9 @@ class IntField(BaseField):
self.error('Integer value is too large') self.error('Integer value is too large')
def prepare_query_value(self, op, value): def prepare_query_value(self, op, value):
if value is None:
return value
return int(value) return int(value)
@@ -194,6 +197,9 @@ class FloatField(BaseField):
self.error('Float value is too large') self.error('Float value is too large')
def prepare_query_value(self, op, value): def prepare_query_value(self, op, value):
if value is None:
return value
return float(value) return float(value)
@@ -445,7 +451,7 @@ class GenericEmbeddedDocumentField(BaseField):
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`. Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
..note :: You can use the choices param to limit the acceptable .. note:: You can use the choices param to limit the acceptable
EmbeddedDocument types EmbeddedDocument types
""" """
@@ -477,7 +483,10 @@ class GenericEmbeddedDocumentField(BaseField):
class DynamicField(BaseField): class DynamicField(BaseField):
"""Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data""" """A truly dynamic field type capable of handling different and varying
types of data.
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
def to_mongo(self, value): def to_mongo(self, value):
"""Convert a Python type to a MongoDBcompatible type. """Convert a Python type to a MongoDBcompatible type.
@@ -521,6 +530,8 @@ class ListField(ComplexBaseField):
"""A list field that wraps a standard field, allowing multiple instances """A list field that wraps a standard field, allowing multiple instances
of the field to be used as a list in the database. of the field to be used as a list in the database.
If using with ReferenceFields see: :ref:`one-to-many-with-listfields`
.. note:: .. note::
Required means it cannot be empty - as the default for ListFields is [] Required means it cannot be empty - as the default for ListFields is []
""" """
@@ -757,10 +768,10 @@ class GenericReferenceField(BaseField):
"""A reference to *any* :class:`~mongoengine.document.Document` subclass """A reference to *any* :class:`~mongoengine.document.Document` subclass
that will be automatically dereferenced on access (lazily). that will be automatically dereferenced on access (lazily).
..note :: Any documents used as a generic reference must be registered in the .. note:: Any documents used as a generic reference must be registered in the
document registry. Importing the model will automatically register it. document registry. Importing the model will automatically register it.
..note :: You can use the choices param to limit the acceptable Document types .. note:: You can use the choices param to limit the acceptable Document types
.. versionadded:: 0.3 .. versionadded:: 0.3
""" """
@@ -834,13 +845,9 @@ class BinaryField(BaseField):
def to_mongo(self, value): def to_mongo(self, value):
return Binary(value) return Binary(value)
def to_python(self, value):
# Returns str not unicode as this is binary data
return str(value)
def validate(self, value): def validate(self, value):
if not isinstance(value, str): if not isinstance(value, (basestring, Binary)):
self.error('BinaryField only accepts string values') self.error('BinaryField only accepts string or bson Binary values')
if self.max_bytes is not None and len(value) > self.max_bytes: if self.max_bytes is not None and len(value) > self.max_bytes:
self.error('Binary value is too long') self.error('Binary value is too long')
@@ -897,6 +904,8 @@ class GridFSProxy(object):
return '<%s: %s>' % (self.__class__.__name__, self.grid_id) return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
def __cmp__(self, other): def __cmp__(self, other):
if not isinstance(other, GridFSProxy):
return -1
return cmp((self.grid_id, self.collection_name, self.db_alias), return cmp((self.grid_id, self.collection_name, self.db_alias),
(other.grid_id, other.collection_name, other.db_alias)) (other.grid_id, other.collection_name, other.db_alias))
@@ -1011,7 +1020,7 @@ class FileField(BaseField):
def __set__(self, instance, value): def __set__(self, instance, value):
key = self.name key = self.name
if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, str): if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, basestring):
# using "FileField() = file/string" notation # using "FileField() = file/string" notation
grid_file = instance._data.get(self.name) grid_file = instance._data.get(self.name)
# If a file already exists, delete it # If a file already exists, delete it
@@ -1279,7 +1288,7 @@ class SequenceField(IntField):
instance._data[self.name] = value instance._data[self.name] = value
instance._mark_as_changed(self.name) instance._mark_as_changed(self.name)
return value return int(value) if value else None
def __set__(self, instance, value): def __set__(self, instance, value):
@@ -1299,17 +1308,40 @@ class UUIDField(BaseField):
.. versionadded:: 0.6 .. versionadded:: 0.6
""" """
_binary = None
def __init__(self, **kwargs): def __init__(self, binary=None, **kwargs):
"""
Store UUID data in the database
:param binary: (optional) boolean store as binary.
.. versionchanged:: 0.6.19
"""
if binary is None:
binary = False
msg = ("UUIDFields will soon default to store as binary, please "
"configure binary=False if you wish to store as a string")
warnings.warn(msg, FutureWarning)
self._binary = binary
super(UUIDField, self).__init__(**kwargs) super(UUIDField, self).__init__(**kwargs)
def to_python(self, value): def to_python(self, value):
if not isinstance(value, basestring): if not self._binary:
value = unicode(value) if not isinstance(value, basestring):
return uuid.UUID(value) value = unicode(value)
return uuid.UUID(value)
return value
def to_mongo(self, value): def to_mongo(self, value):
return unicode(value) if not self._binary:
return unicode(value)
return value
def prepare_query_value(self, op, value):
if value is None:
return None
return self.to_mongo(value)
def validate(self, value): def validate(self, value):
if not isinstance(value, uuid.UUID): if not isinstance(value, uuid.UUID):

View File

@@ -4,6 +4,8 @@ import copy
import itertools import itertools
import operator import operator
from functools import partial
import pymongo import pymongo
from bson.code import Code from bson.code import Code
@@ -481,7 +483,6 @@ class QuerySet(object):
self._collection.ensure_index(index_spec, self._collection.ensure_index(index_spec,
background=background, **index_opts) background=background, **index_opts)
@classmethod @classmethod
def _build_index_spec(cls, doc_cls, spec): def _build_index_spec(cls, doc_cls, spec):
"""Build a PyMongo index spec from a MongoEngine index spec. """Build a PyMongo index spec from a MongoEngine index spec.
@@ -492,6 +493,7 @@ class QuerySet(object):
spec = {'fields': spec} spec = {'fields': spec}
index_list = [] index_list = []
direction = None
use_types = doc_cls._meta.get('allow_inheritance', True) use_types = doc_cls._meta.get('allow_inheritance', True)
for key in spec['fields']: for key in spec['fields']:
# Get ASCENDING direction from +, DESCENDING from -, and GEO2D from * # Get ASCENDING direction from +, DESCENDING from -, and GEO2D from *
@@ -639,7 +641,7 @@ class QuerySet(object):
from mongoengine.fields import ReferenceField, GenericReferenceField from mongoengine.fields import ReferenceField, GenericReferenceField
if isinstance(field, (ReferenceField, GenericReferenceField)): if isinstance(field, (ReferenceField, GenericReferenceField)):
raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts)) raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts))
if getattr(field, 'field', None): if hasattr(getattr(field, 'field', None), 'lookup_member'):
new_field = field.field.lookup_member(field_name) new_field = field.field.lookup_member(field_name)
else: else:
# Look up subfield on the previous field # Look up subfield on the previous field
@@ -702,7 +704,7 @@ class QuerySet(object):
cleaned_fields = [] cleaned_fields = []
for field in fields: for field in fields:
append_field = True append_field = True
if isinstance(field, str): if isinstance(field, basestring):
parts.append(field) parts.append(field)
append_field = False append_field = False
else: else:
@@ -763,8 +765,22 @@ class QuerySet(object):
key = '.'.join(parts) key = '.'.join(parts)
if op is None or key not in mongo_query: if op is None or key not in mongo_query:
mongo_query[key] = value mongo_query[key] = value
elif key in mongo_query and isinstance(mongo_query[key], dict): elif key in mongo_query:
mongo_query[key].update(value) if isinstance(mongo_query[key], dict) and isinstance(value, dict):
mongo_query[key].update(value)
elif isinstance(mongo_query[key], list):
mongo_query[key].append(value)
else:
mongo_query[key] = [mongo_query[key], value]
for k, v in mongo_query.items():
if isinstance(v, list):
value = [{k:val} for val in v]
if '$and' in mongo_query.keys():
mongo_query['$and'].append(value)
else:
mongo_query['$and'] = value
del mongo_query[k]
return mongo_query return mongo_query
@@ -804,19 +820,18 @@ class QuerySet(object):
keyword argument called :attr:`defaults`. keyword argument called :attr:`defaults`.
.. note:: This requires two separate operations and therefore a .. note:: This requires two separate operations and therefore a
race condition exists. Because there are no transactions in mongoDB race condition exists. Because there are no transactions in mongoDB
other approaches should be investigated, to ensure you don't other approaches should be investigated, to ensure you don't
accidently duplicate data when using this method. accidently duplicate data when using this method.
:param write_options: optional extra keyword arguments used if we :param write_options: optional extra keyword arguments used if we
have to create a new document. have to create a new document.
Passes any write_options onto :meth:`~mongoengine.Document.save` Passes any write_options onto :meth:`~mongoengine.Document.save`
.. versionadded:: 0.3
:param auto_save: if the object is to be saved automatically if not found. :param auto_save: if the object is to be saved automatically if not found.
.. versionadded:: 0.6 .. versionchanged:: 0.6 - added `auto_save`
.. versionadded:: 0.3
""" """
defaults = query.get('defaults', {}) defaults = query.get('defaults', {})
if 'defaults' in query: if 'defaults' in query:
@@ -1153,7 +1168,8 @@ class QuerySet(object):
.. versionchanged:: 0.5 - Fixed handling references .. versionchanged:: 0.5 - Fixed handling references
""" """
from dereference import DeReference from dereference import DeReference
return DeReference()(self._cursor.distinct(field), 1) return DeReference()(self._cursor.distinct(field), 1,
name=field, instance=self._document)
def only(self, *fields): def only(self, *fields):
"""Load only a subset of this document's fields. :: """Load only a subset of this document's fields. ::
@@ -1373,7 +1389,7 @@ class QuerySet(object):
cleaned_fields = [] cleaned_fields = []
for field in fields: for field in fields:
append_field = True append_field = True
if isinstance(field, str): if isinstance(field, basestring):
# Convert the S operator to $ # Convert the S operator to $
if field == 'S': if field == 'S':
field = '$' field = '$'
@@ -1387,11 +1403,16 @@ class QuerySet(object):
# Convert value to proper value # Convert value to proper value
field = cleaned_fields[-1] field = cleaned_fields[-1]
if op in (None, 'set', 'push', 'pull', 'addToSet'): if op in (None, 'set', 'push', 'pull'):
if field.required or value is not None: if field.required or value is not None:
value = field.prepare_query_value(op, value) value = field.prepare_query_value(op, value)
elif op in ('pushAll', 'pullAll'): elif op in ('pushAll', 'pullAll'):
value = [field.prepare_query_value(op, v) for v in value] value = [field.prepare_query_value(op, v) for v in value]
elif op == 'addToSet':
if isinstance(value, (list, tuple, set)):
value = [field.prepare_query_value(op, v) for v in value]
elif field.required or value is not None:
value = field.prepare_query_value(op, value)
key = '.'.join(parts) key = '.'.join(parts)
@@ -1407,6 +1428,8 @@ class QuerySet(object):
parts.reverse() parts.reverse()
for key in parts: for key in parts:
value = {key: value} value = {key: value}
elif op == 'addToSet' and isinstance(value, list):
value = {key: {"$each": value}}
else: else:
value = {key: value} value = {key: value}
key = '$' + op key = '$' + op
@@ -1710,10 +1733,11 @@ class QuerySet(object):
def _item_frequencies_map_reduce(self, field, normalize=False): def _item_frequencies_map_reduce(self, field, normalize=False):
map_func = """ map_func = """
function() { function() {
path = '{{~%(field)s}}'.split('.'); var path = '{{~%(field)s}}'.split('.');
field = this; var field = this;
for (p in path) { for (p in path) {
if (field) if (typeof field != 'undefined')
field = field[path[p]]; field = field[path[p]];
else else
break; break;
@@ -1722,7 +1746,7 @@ class QuerySet(object):
field.forEach(function(item) { field.forEach(function(item) {
emit(item, 1); emit(item, 1);
}); });
} else if (field) { } else if (typeof field != 'undefined') {
emit(field, 1); emit(field, 1);
} else { } else {
emit(null, 1); emit(null, 1);
@@ -1746,12 +1770,12 @@ class QuerySet(object):
if isinstance(key, float): if isinstance(key, float):
if int(key) == key: if int(key) == key:
key = int(key) key = int(key)
key = str(key) frequencies[key] = int(f.value)
frequencies[key] = f.value
if normalize: if normalize:
count = sum(frequencies.values()) count = sum(frequencies.values())
frequencies = dict([(k, v / count) for k, v in frequencies.items()]) frequencies = dict([(k, float(v) / count)
for k, v in frequencies.items()])
return frequencies return frequencies
@@ -1759,31 +1783,28 @@ class QuerySet(object):
"""Uses exec_js to execute""" """Uses exec_js to execute"""
freq_func = """ freq_func = """
function(path) { function(path) {
path = path.split('.'); var path = path.split('.');
if (options.normalize) { var total = 0.0;
var total = 0.0; db[collection].find(query).forEach(function(doc) {
db[collection].find(query).forEach(function(doc) { var field = doc;
field = doc; for (p in path) {
for (p in path) { if (field)
if (field) field = field[path[p]];
field = field[path[p]]; else
else break;
break; }
} if (field && field.constructor == Array) {
if (field && field.constructor == Array) { total += field.length;
total += field.length; } else {
} else { total++;
total++; }
} });
});
}
var frequencies = {}; var frequencies = {};
var types = {};
var inc = 1.0; var inc = 1.0;
if (options.normalize) {
inc /= total;
}
db[collection].find(query).forEach(function(doc) { db[collection].find(query).forEach(function(doc) {
field = doc; field = doc;
for (p in path) { for (p in path) {
@@ -1798,17 +1819,28 @@ class QuerySet(object):
}); });
} else { } else {
var item = field; var item = field;
types[item] = item;
frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]);
} }
}); });
return frequencies; return [total, frequencies, types];
} }
""" """
data = self.exec_js(freq_func, field, normalize=normalize) total, data, types = self.exec_js(freq_func, field)
if 'undefined' in data: values = dict([(types.get(k), int(v)) for k, v in data.iteritems()])
data[None] = data['undefined']
del(data['undefined']) if normalize:
return data values = dict([(k, float(v) / total) for k, v in values.items()])
frequencies = {}
for k, v in values.iteritems():
if isinstance(k, float):
if int(k) == k:
k = int(k)
frequencies[k] = v
return frequencies
def __repr__(self): def __repr__(self):
"""Provides the string representation of the QuerySet """Provides the string representation of the QuerySet
@@ -1844,6 +1876,17 @@ class QuerySet(object):
class QuerySetManager(object): class QuerySetManager(object):
"""
The default QuerySet Manager.
Custom QuerySet Manager functions can extend this class and users can
add extra queryset functionality. Any custom manager methods must accept a
:class:`~mongoengine.Document` class as its first argument, and a
:class:`~mongoengine.queryset.QuerySet` as its second argument.
The method function should return a :class:`~mongoengine.queryset.QuerySet`
, probably the same one that was passed in, but modified in some way.
"""
get_queryset = None get_queryset = None
@@ -1864,10 +1907,13 @@ class QuerySetManager(object):
queryset_class = owner._meta['queryset_class'] or QuerySet queryset_class = owner._meta['queryset_class'] or QuerySet
queryset = queryset_class(owner, owner._get_collection()) queryset = queryset_class(owner, owner._get_collection())
if self.get_queryset: if self.get_queryset:
if self.get_queryset.func_code.co_argcount == 1: arg_count = self.get_queryset.func_code.co_argcount
if arg_count == 1:
queryset = self.get_queryset(queryset) queryset = self.get_queryset(queryset)
else: elif arg_count == 2:
queryset = self.get_queryset(owner, queryset) queryset = self.get_queryset(owner, queryset)
else:
queryset = partial(self.get_queryset, owner, queryset)
return queryset return queryset

View File

@@ -5,7 +5,7 @@
%define srcname mongoengine %define srcname mongoengine
Name: python-%{srcname} Name: python-%{srcname}
Version: 0.6.13 Version: 0.6.20
Release: 1%{?dist} Release: 1%{?dist}
Summary: A Python Document-Object Mapper for working with MongoDB Summary: A Python Document-Object Mapper for working with MongoDB

View File

@@ -5,9 +5,9 @@ test = nosetests
verbosity = 2 verbosity = 2
detailed-errors = 1 detailed-errors = 1
#with-coverage = 1 #with-coverage = 1
cover-html = 1 #cover-erase = 1
cover-html-dir = ../htmlcov #cover-html = 1
cover-package = mongoengine #cover-html-dir = ../htmlcov
cover-erase = 1 #cover-package = mongoengine
where = tests where = tests
#tests = test_bugfix.py #tests = test_bugfix.py

View File

@@ -35,7 +35,7 @@ CLASSIFIERS = [
setup(name='mongoengine', setup(name='mongoengine',
version=VERSION, version=VERSION,
packages=find_packages(), packages=find_packages(exclude=('tests',)),
author='Harry Marr', author='Harry Marr',
author_email='harry.marr@{nospam}gmail.com', author_email='harry.marr@{nospam}gmail.com',
maintainer="Ross Lawley", maintainer="Ross Lawley",

View File

@@ -810,7 +810,7 @@ class FieldTest(unittest.TestCase):
room = Room.objects.first().select_related() room = Room.objects.first().select_related()
self.assertEquals(room.staffs_with_position[0]['staff'], sarah) self.assertEquals(room.staffs_with_position[0]['staff'], sarah)
self.assertEquals(room.staffs_with_position[1]['staff'], bob) self.assertEquals(room.staffs_with_position[1]['staff'], bob)
def test_document_reload_no_inheritance(self): def test_document_reload_no_inheritance(self):
class Foo(Document): class Foo(Document):
meta = {'allow_inheritance': False} meta = {'allow_inheritance': False}
@@ -841,3 +841,25 @@ class FieldTest(unittest.TestCase):
self.assertEquals(type(foo.bar), Bar) self.assertEquals(type(foo.bar), Bar)
self.assertEquals(type(foo.baz), Baz) self.assertEquals(type(foo.baz), Baz)
def test_list_lookup_not_checked_in_map(self):
"""Ensure we dereference list data correctly
"""
class Comment(Document):
id = IntField(primary_key=True)
text = StringField()
class Message(Document):
id = IntField(primary_key=True)
comments = ListField(ReferenceField(Comment))
Comment.drop_collection()
Message.drop_collection()
c1 = Comment(id=0, text='zero').save()
c2 = Comment(id=1, text='one').save()
Message(id=1, comments=[c1, c2]).save()
msg = Message.objects.get(id=1)
self.assertEqual(0, msg.comments[0].id)
self.assertEqual(1, msg.comments[1].id)

View File

@@ -684,6 +684,29 @@ class DocumentTest(unittest.TestCase):
self.assertEquals(Person.objects.get(name="Jack").rank, "Corporal") self.assertEquals(Person.objects.get(name="Jack").rank, "Corporal")
self.assertEquals(Person.objects.get(name="Fred").rank, "Private") self.assertEquals(Person.objects.get(name="Fred").rank, "Private")
def test_db_embedded_doc_field_load(self):
"""Ensure we load embedded document data correctly
"""
class Rank(EmbeddedDocument):
title = StringField(required=True)
class Person(Document):
name = StringField(required=True)
rank_ = EmbeddedDocumentField(Rank, required=False, db_field='rank')
@property
def rank(self):
return self.rank_.title if self.rank_ is not None else "Private"
Person.drop_collection()
Person(name="Jack", rank_=Rank(title="Corporal")).save()
Person(name="Fred").save()
self.assertEquals(Person.objects.get(name="Jack").rank, "Corporal")
self.assertEquals(Person.objects.get(name="Fred").rank, "Private")
def test_explicit_geo2d_index(self): def test_explicit_geo2d_index(self):
"""Ensure that geo2d indexes work when created via meta[indexes] """Ensure that geo2d indexes work when created via meta[indexes]
""" """
@@ -849,15 +872,26 @@ class DocumentTest(unittest.TestCase):
def test_geo_indexes_recursion(self): def test_geo_indexes_recursion(self):
class User(Document): class Location(Document):
channel = ReferenceField('Channel') name = StringField()
location = GeoPointField() location = GeoPointField()
class Channel(Document): class Parent(Document):
user = ReferenceField('User') name = StringField()
location = GeoPointField() location = ReferenceField(Location)
self.assertEquals(len(User._geo_indices()), 2) Location.drop_collection()
Parent.drop_collection()
list(Parent.objects)
collection = Parent._get_collection()
info = collection.index_information()
self.assertFalse('location_2d' in info)
self.assertEquals(len(Parent._geo_indices()), 0)
self.assertEquals(len(Location._geo_indices()), 1)
def test_covered_index(self): def test_covered_index(self):
"""Ensure that covered indexes can be used """Ensure that covered indexes can be used
@@ -2965,7 +2999,7 @@ class DocumentTest(unittest.TestCase):
self.assertEqual(User.objects.first(), bob) self.assertEqual(User.objects.first(), bob)
self.assertEqual(Book.objects.first(), hp) self.assertEqual(Book.objects.first(), hp)
# DeRefecence # DeReference
class AuthorBooks(Document): class AuthorBooks(Document):
author = ReferenceField(User) author = ReferenceField(User)
book = ReferenceField(Book) book = ReferenceField(Book)
@@ -2993,6 +3027,18 @@ class DocumentTest(unittest.TestCase):
self.assertEqual(Book._get_collection(), get_db("testdb-2")[Book._get_collection_name()]) self.assertEqual(Book._get_collection(), get_db("testdb-2")[Book._get_collection_name()])
self.assertEqual(AuthorBooks._get_collection(), get_db("testdb-3")[AuthorBooks._get_collection_name()]) self.assertEqual(AuthorBooks._get_collection(), get_db("testdb-3")[AuthorBooks._get_collection_name()])
def test_db_alias_propagates(self):
"""db_alias propagates?
"""
class A(Document):
name = StringField()
meta = {"db_alias": "testdb-1", "allow_inheritance": True}
class B(A):
pass
self.assertEquals('testdb-1', B._meta.get('db_alias'))
def test_db_ref_usage(self): def test_db_ref_usage(self):
""" DB Ref usage in __raw__ queries """ """ DB Ref usage in __raw__ queries """
@@ -3103,7 +3149,7 @@ class ValidatorErrorTest(unittest.TestCase):
self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'], self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'],
'Inception') 'Inception')
self.assertEquals(error.message, "root:\n1st.2nd.3rd.4th: Inception") self.assertEquals(error.message, "root(2nd.3rd.4th.Inception: ['1st'])")
def test_model_validation(self): def test_model_validation(self):
@@ -3114,13 +3160,11 @@ class ValidatorErrorTest(unittest.TestCase):
try: try:
User().validate() User().validate()
except ValidationError, e: except ValidationError, e:
expected_error_message = """Errors encountered validating document: expected_error_message = """ValidationError(Field is required: ['username', 'name'])"""
username: Field is required ("username")
name: Field is required ("name")"""
self.assertEquals(e.message, expected_error_message) self.assertEquals(e.message, expected_error_message)
self.assertEquals(e.to_dict(), { self.assertEquals(e.to_dict(), {
'username': 'Field is required ("username")', 'username': 'Field is required',
'name': u'Field is required ("name")'}) 'name': 'Field is required'})
def test_spaces_in_keys(self): def test_spaces_in_keys(self):
@@ -3138,5 +3182,43 @@ name: Field is required ("name")"""
one = Doc.objects.filter(**{'hello world': 1}).count() one = Doc.objects.filter(**{'hello world': 1}).count()
self.assertEqual(1, one) self.assertEqual(1, one)
def test_fields_rewrite(self):
class BasePerson(Document):
name = StringField()
age = IntField()
meta = {'abstract': True}
class Person(BasePerson):
name = StringField(required=True)
p = Person(age=15)
self.assertRaises(ValidationError, p.validate)
def test_cascaded_save_wrong_reference(self):
class ADocument(Document):
val = IntField()
class BDocument(Document):
a = ReferenceField(ADocument)
ADocument.drop_collection()
BDocument.drop_collection()
a = ADocument()
a.val = 15
a.save()
b = BDocument()
b.a = a
b.save()
a.delete()
b = BDocument.objects.first()
b.save(cascade=True)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -6,6 +6,7 @@ import StringIO
import tempfile import tempfile
import gridfs import gridfs
from bson import Binary
from decimal import Decimal from decimal import Decimal
from mongoengine import * from mongoengine import *
@@ -127,6 +128,19 @@ class FieldTest(unittest.TestCase):
self.assertRaises(ValidationError, ret.validate) self.assertRaises(ValidationError, ret.validate)
def test_int_and_float_ne_operator(self):
class TestDocument(Document):
int_fld = IntField()
float_fld = FloatField()
TestDocument.drop_collection()
TestDocument(int_fld=None, float_fld=None).save()
TestDocument(int_fld=1, float_fld=1).save()
self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count())
self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count())
def test_object_id_validation(self): def test_object_id_validation(self):
"""Ensure that invalid values cannot be assigned to string fields. """Ensure that invalid values cannot be assigned to string fields.
""" """
@@ -258,25 +272,56 @@ class FieldTest(unittest.TestCase):
person.admin = 'Yes' person.admin = 'Yes'
self.assertRaises(ValidationError, person.validate) self.assertRaises(ValidationError, person.validate)
def test_uuid_validation(self): def test_uuid_field_string(self):
"""Ensure that invalid values cannot be assigned to UUID fields. """Test UUID fields storing as String
""" """
class Person(Document): class Person(Document):
api_key = UUIDField() api_key = UUIDField(binary=False)
Person.drop_collection()
uu = uuid.uuid4()
Person(api_key=uu).save()
self.assertEqual(1, Person.objects(api_key=uu).count())
self.assertEqual(uu, Person.objects.first().api_key)
person = Person() person = Person()
# any uuid type is valid valid = (uuid.uuid4(), uuid.uuid1())
person.api_key = uuid.uuid4() for api_key in valid:
person.validate() person.api_key = api_key
person.api_key = uuid.uuid1() person.validate()
person.validate()
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
'9d159858-549b-4975-9f98-dd2f987c113')
for api_key in invalid:
person.api_key = api_key
self.assertRaises(ValidationError, person.validate)
def test_uuid_field_binary(self):
"""Test UUID fields storing as Binary object
"""
class Person(Document):
api_key = UUIDField(binary=True)
Person.drop_collection()
uu = uuid.uuid4()
Person(api_key=uu).save()
self.assertEqual(1, Person.objects(api_key=uu).count())
self.assertEqual(uu, Person.objects.first().api_key)
person = Person()
valid = (uuid.uuid4(), uuid.uuid1())
for api_key in valid:
person.api_key = api_key
person.validate()
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
'9d159858-549b-4975-9f98-dd2f987c113')
for api_key in invalid:
person.api_key = api_key
self.assertRaises(ValidationError, person.validate)
# last g cannot belong to an hex number
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113g'
self.assertRaises(ValidationError, person.validate)
# short strings don't validate
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113'
self.assertRaises(ValidationError, person.validate)
def test_datetime_validation(self): def test_datetime_validation(self):
"""Ensure that invalid values cannot be assigned to datetime fields. """Ensure that invalid values cannot be assigned to datetime fields.
@@ -345,24 +390,6 @@ class FieldTest(unittest.TestCase):
self.assertNotEquals(log.date, d1) self.assertNotEquals(log.date, d1)
self.assertEquals(log.date, d2) self.assertEquals(log.date, d2)
# Pre UTC microseconds above 1000 is wonky.
# log.date has an invalid microsecond value so I can't construct
# a date to compare.
#
# However, the timedelta is predicable with pre UTC timestamps
# It always adds 16 seconds and [777216-776217] microseconds
for i in xrange(1001, 3113, 33):
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i)
log.date = d1
log.save()
log.reload()
self.assertNotEquals(log.date, d1)
delta = log.date - d1
self.assertEquals(delta.seconds, 16)
microseconds = 777216 - (i % 1000)
self.assertEquals(delta.microseconds, microseconds)
LogEntry.drop_collection() LogEntry.drop_collection()
def test_complexdatetime_storage(self): def test_complexdatetime_storage(self):
@@ -933,6 +960,19 @@ class FieldTest(unittest.TestCase):
doc = self.db.test.find_one() doc = self.db.test.find_one()
self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2) self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2)
def test_map_field_lookup(self):
"""Ensure MapField lookups succeed on Fields without a lookup method"""
class Log(Document):
name = StringField()
visited = MapField(DateTimeField())
Log.drop_collection()
Log(name="wilson", visited={'friends': datetime.datetime.now()}).save()
self.assertEqual(1, Log.objects(
visited__friends__exists=True).count())
def test_embedded_db_field(self): def test_embedded_db_field(self):
class Embedded(EmbeddedDocument): class Embedded(EmbeddedDocument):
@@ -1433,7 +1473,7 @@ class FieldTest(unittest.TestCase):
attachment_1 = Attachment.objects().first() attachment_1 = Attachment.objects().first()
self.assertEqual(MIME_TYPE, attachment_1.content_type) self.assertEqual(MIME_TYPE, attachment_1.content_type)
self.assertEqual(BLOB, attachment_1.blob) self.assertEqual(BLOB, str(attachment_1.blob))
Attachment.drop_collection() Attachment.drop_collection()
@@ -1460,7 +1500,7 @@ class FieldTest(unittest.TestCase):
attachment_required = AttachmentRequired() attachment_required = AttachmentRequired()
self.assertRaises(ValidationError, attachment_required.validate) self.assertRaises(ValidationError, attachment_required.validate)
attachment_required.blob = '\xe6\x00\xc4\xff\x07' attachment_required.blob = Binary('\xe6\x00\xc4\xff\x07')
attachment_required.validate() attachment_required.validate()
attachment_size_limit = AttachmentSizeLimit(blob='\xe6\x00\xc4\xff\x07') attachment_size_limit = AttachmentSizeLimit(blob='\xe6\x00\xc4\xff\x07')
@@ -1472,6 +1512,18 @@ class FieldTest(unittest.TestCase):
AttachmentRequired.drop_collection() AttachmentRequired.drop_collection()
AttachmentSizeLimit.drop_collection() AttachmentSizeLimit.drop_collection()
def test_binary_field_primary(self):
class Attachment(Document):
id = BinaryField(primary_key=True)
Attachment.drop_collection()
att = Attachment(id=uuid.uuid4().bytes).save()
att.delete()
self.assertEqual(0, Attachment.objects.count())
def test_choices_validation(self): def test_choices_validation(self):
"""Ensure that value is in a container of allowed values. """Ensure that value is in a container of allowed values.
""" """
@@ -1572,13 +1624,13 @@ class FieldTest(unittest.TestCase):
"""Ensure that file fields can be written to and their data retrieved """Ensure that file fields can be written to and their data retrieved
""" """
class PutFile(Document): class PutFile(Document):
file = FileField() the_file = FileField()
class StreamFile(Document): class StreamFile(Document):
file = FileField() the_file = FileField()
class SetFile(Document): class SetFile(Document):
file = FileField() the_file = FileField()
text = 'Hello, World!' text = 'Hello, World!'
more_text = 'Foo Bar' more_text = 'Foo Bar'
@@ -1589,14 +1641,14 @@ class FieldTest(unittest.TestCase):
SetFile.drop_collection() SetFile.drop_collection()
putfile = PutFile() putfile = PutFile()
putfile.file.put(text, content_type=content_type) putfile.the_file.put(text, content_type=content_type)
putfile.save() putfile.save()
putfile.validate() putfile.validate()
result = PutFile.objects.first() result = PutFile.objects.first()
self.assertTrue(putfile == result) self.assertTrue(putfile == result)
self.assertEquals(result.file.read(), text) self.assertEquals(result.the_file.read(), text)
self.assertEquals(result.file.content_type, content_type) self.assertEquals(result.the_file.content_type, content_type)
result.file.delete() # Remove file from GridFS result.the_file.delete() # Remove file from GridFS
PutFile.objects.delete() PutFile.objects.delete()
# Ensure file-like objects are stored # Ensure file-like objects are stored
@@ -1604,53 +1656,53 @@ class FieldTest(unittest.TestCase):
putstring = StringIO.StringIO() putstring = StringIO.StringIO()
putstring.write(text) putstring.write(text)
putstring.seek(0) putstring.seek(0)
putfile.file.put(putstring, content_type=content_type) putfile.the_file.put(putstring, content_type=content_type)
putfile.save() putfile.save()
putfile.validate() putfile.validate()
result = PutFile.objects.first() result = PutFile.objects.first()
self.assertTrue(putfile == result) self.assertTrue(putfile == result)
self.assertEquals(result.file.read(), text) self.assertEquals(result.the_file.read(), text)
self.assertEquals(result.file.content_type, content_type) self.assertEquals(result.the_file.content_type, content_type)
result.file.delete() result.the_file.delete()
streamfile = StreamFile() streamfile = StreamFile()
streamfile.file.new_file(content_type=content_type) streamfile.the_file.new_file(content_type=content_type)
streamfile.file.write(text) streamfile.the_file.write(text)
streamfile.file.write(more_text) streamfile.the_file.write(more_text)
streamfile.file.close() streamfile.the_file.close()
streamfile.save() streamfile.save()
streamfile.validate() streamfile.validate()
result = StreamFile.objects.first() result = StreamFile.objects.first()
self.assertTrue(streamfile == result) self.assertTrue(streamfile == result)
self.assertEquals(result.file.read(), text + more_text) self.assertEquals(result.the_file.read(), text + more_text)
self.assertEquals(result.file.content_type, content_type) self.assertEquals(result.the_file.content_type, content_type)
result.file.seek(0) result.the_file.seek(0)
self.assertEquals(result.file.tell(), 0) self.assertEquals(result.the_file.tell(), 0)
self.assertEquals(result.file.read(len(text)), text) self.assertEquals(result.the_file.read(len(text)), text)
self.assertEquals(result.file.tell(), len(text)) self.assertEquals(result.the_file.tell(), len(text))
self.assertEquals(result.file.read(len(more_text)), more_text) self.assertEquals(result.the_file.read(len(more_text)), more_text)
self.assertEquals(result.file.tell(), len(text + more_text)) self.assertEquals(result.the_file.tell(), len(text + more_text))
result.file.delete() result.the_file.delete()
# Ensure deleted file returns None # Ensure deleted file returns None
self.assertTrue(result.file.read() == None) self.assertTrue(result.the_file.read() == None)
setfile = SetFile() setfile = SetFile()
setfile.file = text setfile.the_file = text
setfile.save() setfile.save()
setfile.validate() setfile.validate()
result = SetFile.objects.first() result = SetFile.objects.first()
self.assertTrue(setfile == result) self.assertTrue(setfile == result)
self.assertEquals(result.file.read(), text) self.assertEquals(result.the_file.read(), text)
# Try replacing file with new one # Try replacing file with new one
result.file.replace(more_text) result.the_file.replace(more_text)
result.save() result.save()
result.validate() result.validate()
result = SetFile.objects.first() result = SetFile.objects.first()
self.assertTrue(setfile == result) self.assertTrue(setfile == result)
self.assertEquals(result.file.read(), more_text) self.assertEquals(result.the_file.read(), more_text)
result.file.delete() result.the_file.delete()
PutFile.drop_collection() PutFile.drop_collection()
StreamFile.drop_collection() StreamFile.drop_collection()
@@ -1658,7 +1710,7 @@ class FieldTest(unittest.TestCase):
# Make sure FileField is optional and not required # Make sure FileField is optional and not required
class DemoFile(Document): class DemoFile(Document):
file = FileField() the_file = FileField()
DemoFile.objects.create() DemoFile.objects.create()
@@ -1709,20 +1761,20 @@ class FieldTest(unittest.TestCase):
""" """
class TestFile(Document): class TestFile(Document):
name = StringField() name = StringField()
file = FileField() the_file = FileField()
# First instance # First instance
testfile = TestFile() test_file = TestFile()
testfile.name = "Hello, World!" test_file.name = "Hello, World!"
testfile.file.put('Hello, World!') test_file.the_file.put('Hello, World!')
testfile.save() test_file.save()
# Second instance # Second instance
testfiledupe = TestFile() test_file_dupe = TestFile()
data = testfiledupe.file.read() # Should be None data = test_file_dupe.the_file.read() # Should be None
self.assertTrue(testfile.name != testfiledupe.name) self.assertTrue(test_file.name != test_file_dupe.name)
self.assertTrue(testfile.file.read() != data) self.assertTrue(test_file.the_file.read() != data)
TestFile.drop_collection() TestFile.drop_collection()
@@ -1730,17 +1782,25 @@ class FieldTest(unittest.TestCase):
"""Ensure that a boolean test of a FileField indicates its presence """Ensure that a boolean test of a FileField indicates its presence
""" """
class TestFile(Document): class TestFile(Document):
file = FileField() the_file = FileField()
testfile = TestFile() test_file = TestFile()
self.assertFalse(bool(testfile.file)) self.assertFalse(bool(test_file.the_file))
testfile.file = 'Hello, World!' test_file.the_file = 'Hello, World!'
testfile.file.content_type = 'text/plain' test_file.the_file.content_type = 'text/plain'
testfile.save() test_file.save()
self.assertTrue(bool(testfile.file)) self.assertTrue(bool(test_file.the_file))
TestFile.drop_collection() TestFile.drop_collection()
def test_file_cmp(self):
"""Test comparing against other types"""
class TestFile(Document):
the_file = FileField()
test_file = TestFile()
self.assertFalse(test_file.the_file in [{"test": 1}])
def test_image_field(self): def test_image_field(self):
class TestImage(Document): class TestImage(Document):
@@ -1804,30 +1864,30 @@ class FieldTest(unittest.TestCase):
def test_file_multidb(self): def test_file_multidb(self):
register_connection('testfiles', 'testfiles') register_connection('test_files', 'test_files')
class TestFile(Document): class TestFile(Document):
name = StringField() name = StringField()
file = FileField(db_alias="testfiles", the_file = FileField(db_alias="test_files",
collection_name="macumba") collection_name="macumba")
TestFile.drop_collection() TestFile.drop_collection()
# delete old filesystem # delete old filesystem
get_db("testfiles").macumba.files.drop() get_db("test_files").macumba.files.drop()
get_db("testfiles").macumba.chunks.drop() get_db("test_files").macumba.chunks.drop()
# First instance # First instance
testfile = TestFile() test_file = TestFile()
testfile.name = "Hello, World!" test_file.name = "Hello, World!"
testfile.file.put('Hello, World!', test_file.the_file.put('Hello, World!',
name="hello.txt") name="hello.txt")
testfile.save() test_file.save()
data = get_db("testfiles").macumba.files.find_one() data = get_db("test_files").macumba.files.find_one()
self.assertEquals(data.get('name'), 'hello.txt') self.assertEquals(data.get('name'), 'hello.txt')
testfile = TestFile.objects.first() test_file = TestFile.objects.first()
self.assertEquals(testfile.file.read(), self.assertEquals(test_file.the_file.read(),
'Hello, World!') 'Hello, World!')
def test_geo_indexes(self): def test_geo_indexes(self):
@@ -2109,7 +2169,7 @@ class FieldTest(unittest.TestCase):
self.assertTrue(1 in error_dict['comments']) self.assertTrue(1 in error_dict['comments'])
self.assertTrue('content' in error_dict['comments'][1]) self.assertTrue('content' in error_dict['comments'][1])
self.assertEquals(error_dict['comments'][1]['content'], self.assertEquals(error_dict['comments'][1]['content'],
u'Field is required ("content")') 'Field is required')
post.comments[1].content = 'here we go' post.comments[1].content = 'here we go'
post.validate() post.validate()

View File

@@ -579,6 +579,64 @@ class QuerySetTest(unittest.TestCase):
Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True}) Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True})
self.assertEqual(Blog.objects.count(), 3) self.assertEqual(Blog.objects.count(), 3)
def test_get_changed_fields_query_count(self):
class Person(Document):
name = StringField()
owns = ListField(ReferenceField('Organization'))
projects = ListField(ReferenceField('Project'))
class Organization(Document):
name = StringField()
owner = ReferenceField('Person')
employees = ListField(ReferenceField('Person'))
class Project(Document):
name = StringField()
Person.drop_collection()
Organization.drop_collection()
Project.drop_collection()
r1 = Project(name="r1").save()
r2 = Project(name="r2").save()
r3 = Project(name="r3").save()
p1 = Person(name="p1", projects=[r1, r2]).save()
p2 = Person(name="p2", projects=[r2]).save()
o1 = Organization(name="o1", employees=[p1]).save()
with query_counter() as q:
self.assertEqual(q, 0)
fresh_o1 = Organization.objects.get(id=o1.id)
self.assertEqual(1, q)
fresh_o1._get_changed_fields()
self.assertEqual(1, q)
with query_counter() as q:
self.assertEqual(q, 0)
fresh_o1 = Organization.objects.get(id=o1.id)
fresh_o1.save()
self.assertEquals(q, 2)
with query_counter() as q:
self.assertEqual(q, 0)
fresh_o1 = Organization.objects.get(id=o1.id)
fresh_o1.save(cascade=False)
self.assertEquals(q, 2)
with query_counter() as q:
self.assertEqual(q, 0)
fresh_o1 = Organization.objects.get(id=o1.id)
fresh_o1.employees.append(p2)
fresh_o1.save(cascade=False)
self.assertEquals(q, 3)
def test_slave_okay(self): def test_slave_okay(self):
"""Ensures that a query can take slave_okay syntax """Ensures that a query can take slave_okay syntax
@@ -769,7 +827,11 @@ class QuerySetTest(unittest.TestCase):
def test_filter_chaining(self): def test_filter_chaining(self):
"""Ensure filters can be chained together. """Ensure filters can be chained together.
""" """
class Blog(Document):
id = StringField(unique=True, primary_key=True)
class BlogPost(Document): class BlogPost(Document):
blog = ReferenceField(Blog)
title = StringField() title = StringField()
is_published = BooleanField() is_published = BooleanField()
published_date = DateTimeField() published_date = DateTimeField()
@@ -778,13 +840,24 @@ class QuerySetTest(unittest.TestCase):
def published(doc_cls, queryset): def published(doc_cls, queryset):
return queryset(is_published=True) return queryset(is_published=True)
blog_post_1 = BlogPost(title="Blog Post #1", Blog.drop_collection()
BlogPost.drop_collection()
blog_1 = Blog(id="1")
blog_2 = Blog(id="2")
blog_3 = Blog(id="3")
blog_1.save()
blog_2.save()
blog_3.save()
blog_post_1 = BlogPost(blog=blog_1, title="Blog Post #1",
is_published = True, is_published = True,
published_date=datetime(2010, 1, 5, 0, 0 ,0)) published_date=datetime(2010, 1, 5, 0, 0 ,0))
blog_post_2 = BlogPost(title="Blog Post #2", blog_post_2 = BlogPost(blog=blog_2, title="Blog Post #2",
is_published = True, is_published = True,
published_date=datetime(2010, 1, 6, 0, 0 ,0)) published_date=datetime(2010, 1, 6, 0, 0 ,0))
blog_post_3 = BlogPost(title="Blog Post #3", blog_post_3 = BlogPost(blog=blog_3, title="Blog Post #3",
is_published = True, is_published = True,
published_date=datetime(2010, 1, 7, 0, 0 ,0)) published_date=datetime(2010, 1, 7, 0, 0 ,0))
@@ -798,7 +871,14 @@ class QuerySetTest(unittest.TestCase):
published_date__lt=datetime(2010, 1, 7, 0, 0 ,0)) published_date__lt=datetime(2010, 1, 7, 0, 0 ,0))
self.assertEqual(published_posts.count(), 2) self.assertEqual(published_posts.count(), 2)
blog_posts = BlogPost.objects
blog_posts = blog_posts.filter(blog__in=[blog_1, blog_2])
blog_posts = blog_posts.filter(blog=blog_3)
self.assertEqual(blog_posts.count(), 0)
BlogPost.drop_collection() BlogPost.drop_collection()
Blog.drop_collection()
def test_ordering(self): def test_ordering(self):
"""Ensure default ordering is applied and can be overridden. """Ensure default ordering is applied and can be overridden.
@@ -1520,7 +1600,7 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection() BlogPost.drop_collection()
def test_update_push_and_pull(self): def test_update_push_and_pull_add_to_set(self):
"""Ensure that the 'pull' update operation works correctly. """Ensure that the 'pull' update operation works correctly.
""" """
class BlogPost(Document): class BlogPost(Document):
@@ -1553,6 +1633,23 @@ class QuerySetTest(unittest.TestCase):
post.reload() post.reload()
self.assertEqual(post.tags, ["code", "mongodb"]) self.assertEqual(post.tags, ["code", "mongodb"])
def test_add_to_set_each(self):
class Item(Document):
name = StringField(required=True)
description = StringField(max_length=50)
parents = ListField(ReferenceField('self'))
Item.drop_collection()
item = Item(name='test item').save()
parent_1 = Item(name='parent 1').save()
parent_2 = Item(name='parent 2').save()
item.update(add_to_set__parents=[parent_1, parent_2, parent_1])
item.reload()
self.assertEqual([parent_1, parent_2], item.parents)
def test_pull_nested(self): def test_pull_nested(self):
class User(Document): class User(Document):
@@ -1977,9 +2074,9 @@ class QuerySetTest(unittest.TestCase):
# Check item_frequencies works for non-list fields # Check item_frequencies works for non-list fields
def test_assertions(f): def test_assertions(f):
self.assertEqual(set(['1', '2']), set(f.keys())) self.assertEqual(set([1, 2]), set(f.keys()))
self.assertEqual(f['1'], 1) self.assertEqual(f[1], 1)
self.assertEqual(f['2'], 2) self.assertEqual(f[2], 2)
exec_js = BlogPost.objects.item_frequencies('hits') exec_js = BlogPost.objects.item_frequencies('hits')
map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True) map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True)
@@ -2079,7 +2176,6 @@ class QuerySetTest(unittest.TestCase):
data = EmbeddedDocumentField(Data, required=True) data = EmbeddedDocumentField(Data, required=True)
extra = EmbeddedDocumentField(Extra) extra = EmbeddedDocumentField(Extra)
Person.drop_collection() Person.drop_collection()
p = Person() p = Person()
@@ -2097,6 +2193,52 @@ class QuerySetTest(unittest.TestCase):
ot = Person.objects.item_frequencies('extra.tag', map_reduce=True) ot = Person.objects.item_frequencies('extra.tag', map_reduce=True)
self.assertEquals(ot, {None: 1.0, u'friend': 1.0}) self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
def test_item_frequencies_with_0_values(self):
class Test(Document):
val = IntField()
Test.drop_collection()
t = Test()
t.val = 0
t.save()
ot = Test.objects.item_frequencies('val', map_reduce=True)
self.assertEquals(ot, {0: 1})
ot = Test.objects.item_frequencies('val', map_reduce=False)
self.assertEquals(ot, {0: 1})
def test_item_frequencies_with_False_values(self):
class Test(Document):
val = BooleanField()
Test.drop_collection()
t = Test()
t.val = False
t.save()
ot = Test.objects.item_frequencies('val', map_reduce=True)
self.assertEquals(ot, {False: 1})
ot = Test.objects.item_frequencies('val', map_reduce=False)
self.assertEquals(ot, {False: 1})
def test_item_frequencies_normalize(self):
class Test(Document):
val = IntField()
Test.drop_collection()
for i in xrange(50):
Test(val=1).save()
for i in xrange(20):
Test(val=2).save()
freqs = Test.objects.item_frequencies('val', map_reduce=False, normalize=True)
self.assertEquals(freqs, {1: 50.0/70, 2: 20.0/70})
freqs = Test.objects.item_frequencies('val', map_reduce=True, normalize=True)
self.assertEquals(freqs, {1: 50.0/70, 2: 20.0/70})
def test_average(self): def test_average(self):
"""Ensure that field can be averaged correctly. """Ensure that field can be averaged correctly.
""" """
@@ -2157,6 +2299,28 @@ class QuerySetTest(unittest.TestCase):
self.assertEquals(Foo.objects.distinct("bar"), [bar]) self.assertEquals(Foo.objects.distinct("bar"), [bar])
def test_distinct_handles_references_to_alias(self):
register_connection('testdb', 'mongoenginetest2')
class Foo(Document):
bar = ReferenceField("Bar")
meta = {'db_alias': 'testdb'}
class Bar(Document):
text = StringField()
meta = {'db_alias': 'testdb'}
Bar.drop_collection()
Foo.drop_collection()
bar = Bar(text="hi")
bar.save()
foo = Foo(bar=bar)
foo.save()
self.assertEquals(Foo.objects.distinct("bar"), [bar])
def test_custom_manager(self): def test_custom_manager(self):
"""Ensure that custom QuerySetManager instances work as expected. """Ensure that custom QuerySetManager instances work as expected.
""" """
@@ -2166,28 +2330,29 @@ class QuerySetTest(unittest.TestCase):
date = DateTimeField(default=datetime.now) date = DateTimeField(default=datetime.now)
@queryset_manager @queryset_manager
def objects(doc_cls, queryset): def objects(cls, qryset):
return queryset(deleted=False) opts = {"deleted": False}
return qryset(**opts)
@queryset_manager @queryset_manager
def music_posts(doc_cls, queryset): def music_posts(doc_cls, queryset, deleted=False):
return queryset(tags='music', deleted=False).order_by('-date') return queryset(tags='music',
deleted=deleted).order_by('date')
BlogPost.drop_collection() BlogPost.drop_collection()
post1 = BlogPost(tags=['music', 'film']) post1 = BlogPost(tags=['music', 'film']).save()
post1.save() post2 = BlogPost(tags=['music']).save()
post2 = BlogPost(tags=['music']) post3 = BlogPost(tags=['film', 'actors']).save()
post2.save() post4 = BlogPost(tags=['film', 'actors', 'music'], deleted=True).save()
post3 = BlogPost(tags=['film', 'actors'])
post3.save()
post4 = BlogPost(tags=['film', 'actors'], deleted=True)
post4.save()
self.assertEqual([p.id for p in BlogPost.objects], self.assertEqual([p.id for p in BlogPost.objects()],
[post1.id, post2.id, post3.id]) [post1.id, post2.id, post3.id])
self.assertEqual([p.id for p in BlogPost.music_posts], self.assertEqual([p.id for p in BlogPost.music_posts()],
[post2.id, post1.id]) [post1.id, post2.id])
self.assertEqual([p.id for p in BlogPost.music_posts(True)],
[post4.id])
BlogPost.drop_collection() BlogPost.drop_collection()