Compare commits
61 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
2801b38c75 | ||
|
dc3fea875e | ||
|
aab8c2b687 | ||
|
3577773af3 | ||
|
8ac9e6dc19 | ||
|
4b3cea9e78 | ||
|
2420b5e937 | ||
|
f23a976bea | ||
|
4226cd08f1 | ||
|
7a230f1693 | ||
|
a43d0d4612 | ||
|
78a40a0c70 | ||
|
2c69d8f0b0 | ||
|
0018c38b83 | ||
|
8df81571fc | ||
|
48f988acd7 | ||
|
6526923345 | ||
|
24fd1acce6 | ||
|
cbb9235dc5 | ||
|
19ec2c9bc9 | ||
|
6459d4c0b6 | ||
|
1304f2721f | ||
|
8bde0c0e53 | ||
|
598ffd3e5c | ||
|
601f0eb168 | ||
|
3070e0bf5d | ||
|
83c11a9834 | ||
|
5c912b930e | ||
|
1b17fb0ae7 | ||
|
d83e67c121 | ||
|
ae39ed94c9 | ||
|
1e51180d42 | ||
|
87ba69d02e | ||
|
8879d5560b | ||
|
c1621ee39c | ||
|
b0aa98edb4 | ||
|
a7a2fe0216 | ||
|
8e50f5fa3c | ||
|
31793520bf | ||
|
0b6b0368c5 | ||
|
d1d30a9280 | ||
|
420c6f2d1e | ||
|
34f06c4971 | ||
|
9cc4bbd49d | ||
|
f66b312869 | ||
|
2405ba8708 | ||
|
a91b6bff8b | ||
|
450dc11a68 | ||
|
1ce2f84ce5 | ||
|
f55b241cfa | ||
|
34d08ce8ef | ||
|
4f5aa8c43b | ||
|
27b375060d | ||
|
cbfdc401f7 | ||
|
b58bf3e0ce | ||
|
1fff7e9aca | ||
|
494b981b13 | ||
|
dd93995bd0 | ||
|
b3bb4add9c | ||
|
d305e71c27 | ||
|
0d92baa670 |
7
AUTHORS
7
AUTHORS
@@ -109,4 +109,9 @@ that much better:
|
||||
* Meir Kriheli
|
||||
* Andrey Fedoseev
|
||||
* aparajita
|
||||
* Tristan Escalada
|
||||
* Tristan Escalada
|
||||
* Alexander Koshelev
|
||||
* Jaime Irurzun
|
||||
* Alexandre González
|
||||
* Thomas Steinacher
|
||||
* Tommi Komulainen
|
8
LICENSE
8
LICENSE
@@ -1,5 +1,5 @@
|
||||
Copyright (c) 2009-2010 Harry Marr
|
||||
|
||||
Copyright (c) 2009-2012 See AUTHORS
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
@@ -8,10 +8,10 @@ copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
|
@@ -2,6 +2,7 @@
|
||||
MongoEngine
|
||||
===========
|
||||
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
||||
:Repository: https://github.com/MongoEngine/mongoengine
|
||||
:Author: Harry Marr (http://github.com/hmarr)
|
||||
:Maintainer: Ross Lawley (http://github.com/rozza)
|
||||
|
||||
|
@@ -47,25 +47,28 @@ Querying
|
||||
Fields
|
||||
======
|
||||
|
||||
.. autoclass:: mongoengine.StringField
|
||||
.. autoclass:: mongoengine.URLField
|
||||
.. autoclass:: mongoengine.EmailField
|
||||
.. autoclass:: mongoengine.IntField
|
||||
.. autoclass:: mongoengine.FloatField
|
||||
.. autoclass:: mongoengine.DecimalField
|
||||
.. autoclass:: mongoengine.DateTimeField
|
||||
.. autoclass:: mongoengine.BinaryField
|
||||
.. autoclass:: mongoengine.BooleanField
|
||||
.. autoclass:: mongoengine.ComplexDateTimeField
|
||||
.. autoclass:: mongoengine.ListField
|
||||
.. autoclass:: mongoengine.SortedListField
|
||||
.. autoclass:: mongoengine.DateTimeField
|
||||
.. autoclass:: mongoengine.DecimalField
|
||||
.. autoclass:: mongoengine.DictField
|
||||
.. autoclass:: mongoengine.DynamicField
|
||||
.. autoclass:: mongoengine.EmailField
|
||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
||||
.. autoclass:: mongoengine.FileField
|
||||
.. autoclass:: mongoengine.FloatField
|
||||
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
||||
.. autoclass:: mongoengine.GenericReferenceField
|
||||
.. autoclass:: mongoengine.GeoPointField
|
||||
.. autoclass:: mongoengine.ImageField
|
||||
.. autoclass:: mongoengine.IntField
|
||||
.. autoclass:: mongoengine.ListField
|
||||
.. autoclass:: mongoengine.MapField
|
||||
.. autoclass:: mongoengine.ObjectIdField
|
||||
.. autoclass:: mongoengine.ReferenceField
|
||||
.. autoclass:: mongoengine.GenericReferenceField
|
||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
||||
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
||||
.. autoclass:: mongoengine.BooleanField
|
||||
.. autoclass:: mongoengine.FileField
|
||||
.. autoclass:: mongoengine.BinaryField
|
||||
.. autoclass:: mongoengine.GeoPointField
|
||||
.. autoclass:: mongoengine.SequenceField
|
||||
.. autoclass:: mongoengine.SortedListField
|
||||
.. autoclass:: mongoengine.StringField
|
||||
.. autoclass:: mongoengine.URLField
|
||||
.. autoclass:: mongoengine.UUIDField
|
||||
|
@@ -2,8 +2,49 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
Changes in 0.6.19
|
||||
=================
|
||||
|
||||
- Added Binary support to UUID (MongoEngine/mongoengine#47)
|
||||
- Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46)
|
||||
- Fixed BinaryField python value issue (MongoEngine/mongoengine#48)
|
||||
- Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41)
|
||||
- Fixed queryset manager issue (MongoEngine/mongoengine#52)
|
||||
- Fixed FileField comparision (hmarr/mongoengine#547)
|
||||
|
||||
Changes in 0.6.18
|
||||
=================
|
||||
- Fixed recursion loading bug in _get_changed_fields
|
||||
|
||||
Changes in 0.6.17
|
||||
=================
|
||||
- Fixed issue with custom queryset manager expecting explict variable names
|
||||
|
||||
Changes in 0.6.16
|
||||
=================
|
||||
- Fixed issue where db_alias wasn't inherited
|
||||
|
||||
Changes in 0.6.15
|
||||
=================
|
||||
- Updated validation error messages
|
||||
- Added support for null / zero / false values in item_frequencies
|
||||
- Fixed cascade save edge case
|
||||
- Fixed geo index creation through reference fields
|
||||
- Added support for args / kwargs when using @queryset_manager
|
||||
- Deref list custom id fix
|
||||
|
||||
Changes in 0.6.14
|
||||
=================
|
||||
- Fixed error dict with nested validation
|
||||
- Fixed Int/Float fields and not equals None
|
||||
- Exclude tests from installation
|
||||
- Allow tuples for index meta
|
||||
- Fixed use of str in instance checks
|
||||
- Fixed unicode support in transform update
|
||||
- Added support for add_to_set and each
|
||||
|
||||
Changes in 0.6.13
|
||||
================
|
||||
=================
|
||||
- Fixed EmbeddedDocument db_field validation issue
|
||||
- Fixed StringField unicode issue
|
||||
- Fixes __repr__ modifying the cursor
|
||||
|
@@ -62,28 +62,31 @@ not provided. Default values may optionally be a callable, which will be called
|
||||
to retrieve the value (such as in the above example). The field types available
|
||||
are as follows:
|
||||
|
||||
* :class:`~mongoengine.StringField`
|
||||
* :class:`~mongoengine.URLField`
|
||||
* :class:`~mongoengine.EmailField`
|
||||
* :class:`~mongoengine.IntField`
|
||||
* :class:`~mongoengine.FloatField`
|
||||
* :class:`~mongoengine.DecimalField`
|
||||
* :class:`~mongoengine.DateTimeField`
|
||||
* :class:`~mongoengine.BinaryField`
|
||||
* :class:`~mongoengine.BooleanField`
|
||||
* :class:`~mongoengine.ComplexDateTimeField`
|
||||
* :class:`~mongoengine.ListField`
|
||||
* :class:`~mongoengine.SortedListField`
|
||||
* :class:`~mongoengine.DateTimeField`
|
||||
* :class:`~mongoengine.DecimalField`
|
||||
* :class:`~mongoengine.DictField`
|
||||
* :class:`~mongoengine.DynamicField`
|
||||
* :class:`~mongoengine.EmailField`
|
||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||
* :class:`~mongoengine.FileField`
|
||||
* :class:`~mongoengine.FloatField`
|
||||
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
||||
* :class:`~mongoengine.GenericReferenceField`
|
||||
* :class:`~mongoengine.GeoPointField`
|
||||
* :class:`~mongoengine.ImageField`
|
||||
* :class:`~mongoengine.IntField`
|
||||
* :class:`~mongoengine.ListField`
|
||||
* :class:`~mongoengine.MapField`
|
||||
* :class:`~mongoengine.ObjectIdField`
|
||||
* :class:`~mongoengine.ReferenceField`
|
||||
* :class:`~mongoengine.GenericReferenceField`
|
||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
||||
* :class:`~mongoengine.BooleanField`
|
||||
* :class:`~mongoengine.FileField`
|
||||
* :class:`~mongoengine.BinaryField`
|
||||
* :class:`~mongoengine.GeoPointField`
|
||||
* :class:`~mongoengine.SequenceField`
|
||||
* :class:`~mongoengine.SortedListField`
|
||||
* :class:`~mongoengine.StringField`
|
||||
* :class:`~mongoengine.URLField`
|
||||
* :class:`~mongoengine.UUIDField`
|
||||
|
||||
Field arguments
|
||||
---------------
|
||||
@@ -256,6 +259,35 @@ as the constructor's argument::
|
||||
content = StringField()
|
||||
|
||||
|
||||
.. _one-to-many-with-listfields:
|
||||
|
||||
One to Many with ListFields
|
||||
'''''''''''''''''''''''''''
|
||||
|
||||
If you are implementing a one to many relationship via a list of references,
|
||||
then the references are stored as DBRefs and to query you need to pass an
|
||||
instance of the object to the query::
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
class Page(Document):
|
||||
content = StringField()
|
||||
authors = ListField(ReferenceField(User))
|
||||
|
||||
bob = User(name="Bob Jones").save()
|
||||
john = User(name="John Smith").save()
|
||||
|
||||
Page(content="Test Page", authors=[bob, john]).save()
|
||||
Page(content="Another Page", authors=[john]).save()
|
||||
|
||||
# Find all pages Bob authored
|
||||
Page.objects(authors__in=[bob])
|
||||
|
||||
# Find all pages that both Bob and John have authored
|
||||
Page.objects(authors__all=[bob, john])
|
||||
|
||||
|
||||
Dealing with deletion of referred documents
|
||||
'''''''''''''''''''''''''''''''''''''''''''
|
||||
By default, MongoDB doesn't check the integrity of your data, so deleting
|
||||
|
@@ -232,7 +232,7 @@ custom manager methods as you like::
|
||||
BlogPost(title='test1', published=False).save()
|
||||
BlogPost(title='test2', published=True).save()
|
||||
assert len(BlogPost.objects) == 2
|
||||
assert len(BlogPost.live_posts) == 1
|
||||
assert len(BlogPost.live_posts()) == 1
|
||||
|
||||
Custom QuerySets
|
||||
================
|
||||
@@ -243,11 +243,16 @@ a document, set ``queryset_class`` to the custom class in a
|
||||
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
|
||||
|
||||
class AwesomerQuerySet(QuerySet):
|
||||
pass
|
||||
|
||||
def get_awesome(self):
|
||||
return self.filter(awesome=True)
|
||||
|
||||
class Page(Document):
|
||||
meta = {'queryset_class': AwesomerQuerySet}
|
||||
|
||||
# To call:
|
||||
Page.objects.get_awesome()
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
Aggregation
|
||||
|
@@ -12,7 +12,7 @@ from signals import *
|
||||
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
||||
queryset.__all__ + signals.__all__)
|
||||
|
||||
VERSION = (0, 6, 13)
|
||||
VERSION = (0, 6, 19)
|
||||
|
||||
|
||||
def get_version():
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
|
||||
from queryset import QuerySet, QuerySetManager
|
||||
from queryset import DoesNotExist, MultipleObjectsReturned
|
||||
@@ -53,9 +54,9 @@ class ValidationError(AssertionError):
|
||||
message = super(ValidationError, self).__getattribute__(name)
|
||||
if name == 'message':
|
||||
if self.field_name:
|
||||
message = '%s ("%s")' % (message, self.field_name)
|
||||
message = '%s' % message
|
||||
if self.errors:
|
||||
message = '%s:\n%s' % (message, self._format_errors())
|
||||
message = '%s(%s)' % (message, self._format_errors())
|
||||
return message
|
||||
|
||||
def _get_message(self):
|
||||
@@ -93,17 +94,20 @@ class ValidationError(AssertionError):
|
||||
def _format_errors(self):
|
||||
"""Returns a string listing all errors within a document"""
|
||||
|
||||
def format_error(field, value, prefix=''):
|
||||
prefix = "%s.%s" % (prefix, field) if prefix else "%s" % field
|
||||
def generate_key(value, prefix=''):
|
||||
if isinstance(value, list):
|
||||
value = ' '.join([generate_key(k) for k in value])
|
||||
if isinstance(value, dict):
|
||||
value = ' '.join(
|
||||
[generate_key(v, k) for k, v in value.iteritems()])
|
||||
|
||||
return '\n'.join(
|
||||
[format_error(k, value[k], prefix) for k in value])
|
||||
else:
|
||||
return "%s: %s" % (prefix, value)
|
||||
results = "%s.%s" % (prefix, value) if prefix else value
|
||||
return results
|
||||
|
||||
return '\n'.join(
|
||||
[format_error(k, v) for k, v in self.to_dict().items()])
|
||||
error_dict = defaultdict(list)
|
||||
for k, v in self.to_dict().iteritems():
|
||||
error_dict[generate_key(v)].append(k)
|
||||
return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()])
|
||||
|
||||
|
||||
_document_registry = {}
|
||||
@@ -267,8 +271,10 @@ class ComplexBaseField(BaseField):
|
||||
if instance is None:
|
||||
# Document class being used rather than a document object
|
||||
return self
|
||||
|
||||
if not self._dereference and instance._initialised:
|
||||
from fields import GenericReferenceField, ReferenceField
|
||||
dereference = self.field is None or isinstance(self.field,
|
||||
(GenericReferenceField, ReferenceField))
|
||||
if not self._dereference and instance._initialised and dereference:
|
||||
from dereference import DeReference
|
||||
self._dereference = DeReference() # Cached
|
||||
instance._data[self.name] = self._dereference(
|
||||
@@ -403,11 +409,11 @@ class ComplexBaseField(BaseField):
|
||||
for k, v in sequence:
|
||||
try:
|
||||
self.field._validate(v)
|
||||
except (ValidationError, AssertionError), error:
|
||||
if hasattr(error, 'errors'):
|
||||
errors[k] = error.errors
|
||||
else:
|
||||
errors[k] = error
|
||||
except ValidationError, error:
|
||||
errors[k] = error.errors or error
|
||||
except (ValueError, AssertionError), error:
|
||||
errors[k] = error
|
||||
|
||||
if errors:
|
||||
field_class = self.field.__class__.__name__
|
||||
self.error('Invalid %s item (%s)' % (field_class, value),
|
||||
@@ -643,8 +649,13 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
del(attrs['meta']['collection'])
|
||||
if base._get_collection_name():
|
||||
collection = base._get_collection_name()
|
||||
# Propagate index options.
|
||||
for key in ('index_background', 'index_drop_dups', 'index_opts'):
|
||||
|
||||
# Propagate inherited values
|
||||
keys_to_propogate = (
|
||||
'index_background', 'index_drop_dups', 'index_opts',
|
||||
'allow_inheritance', 'queryset_class', 'db_alias',
|
||||
)
|
||||
for key in keys_to_propogate:
|
||||
if key in base._meta:
|
||||
base_meta[key] = base._meta[key]
|
||||
|
||||
@@ -653,11 +664,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
abstract_base_indexes += base._meta.get('indexes', [])
|
||||
else:
|
||||
base_indexes += base._meta.get('indexes', [])
|
||||
# Propagate 'allow_inheritance'
|
||||
if 'allow_inheritance' in base._meta:
|
||||
base_meta['allow_inheritance'] = base._meta['allow_inheritance']
|
||||
if 'queryset_class' in base._meta:
|
||||
base_meta['queryset_class'] = base._meta['queryset_class']
|
||||
try:
|
||||
base_meta['objects'] = base.__getattribute__(base, 'objects')
|
||||
except TypeError:
|
||||
@@ -665,6 +671,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# defaults
|
||||
meta = {
|
||||
'abstract': False,
|
||||
'collection': collection,
|
||||
@@ -704,7 +711,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
meta['queryset_class'] = manager.queryset_class
|
||||
new_class.objects = manager
|
||||
|
||||
indicies = meta['indexes'] + abstract_base_indexes
|
||||
indicies = list(meta['indexes']) + abstract_base_indexes
|
||||
user_indexes = [QuerySet._build_index_spec(new_class, spec)
|
||||
for spec in indicies] + base_indexes
|
||||
new_class._meta['indexes'] = user_indexes
|
||||
@@ -897,8 +904,7 @@ class BaseDocument(object):
|
||||
errors[field.name] = ValidationError('Field is required',
|
||||
field_name=field.name)
|
||||
if errors:
|
||||
raise ValidationError('Errors encountered validating document',
|
||||
errors=errors)
|
||||
raise ValidationError('ValidationError', errors=errors)
|
||||
|
||||
def to_mongo(self):
|
||||
"""Return data dictionary ready for use with MongoDB.
|
||||
@@ -1006,9 +1012,10 @@ Invalid data to create a `%s` instance.\n%s""".strip() % (cls._class_name, error
|
||||
field_list.update(self._dynamic_fields)
|
||||
|
||||
for field_name in field_list:
|
||||
|
||||
db_field_name = self._db_field_map.get(field_name, field_name)
|
||||
key = '%s.' % db_field_name
|
||||
field = getattr(self, field_name, None)
|
||||
field = self._data.get(field_name, None)
|
||||
if hasattr(field, 'id'):
|
||||
if field.id in inspected:
|
||||
continue
|
||||
@@ -1111,7 +1118,11 @@ Invalid data to create a `%s` instance.\n%s""".strip() % (cls._class_name, error
|
||||
inspected = inspected or []
|
||||
geo_indices = []
|
||||
inspected.append(cls)
|
||||
|
||||
from fields import EmbeddedDocumentField, GeoPointField
|
||||
for field in cls._fields.values():
|
||||
if not isinstance(field, (EmbeddedDocumentField, GeoPointField)):
|
||||
continue
|
||||
if hasattr(field, 'document_type'):
|
||||
field_cls = field.document_type
|
||||
if field_cls in inspected:
|
||||
|
@@ -166,7 +166,7 @@ class DeReference(object):
|
||||
else:
|
||||
data[k] = v
|
||||
|
||||
if k in self.object_map:
|
||||
if k in self.object_map and not is_list:
|
||||
data[k] = self.object_map[k]
|
||||
elif hasattr(v, '_fields'):
|
||||
for field_name, field in v._fields.iteritems():
|
||||
|
@@ -248,11 +248,16 @@ class Document(BaseDocument):
|
||||
_refs = kwargs.get('_refs', []) or []
|
||||
|
||||
for name, cls in self._fields.items():
|
||||
|
||||
if not isinstance(cls, (ReferenceField, GenericReferenceField)):
|
||||
continue
|
||||
|
||||
ref = getattr(self, name)
|
||||
if not ref:
|
||||
continue
|
||||
if isinstance(ref, DBRef):
|
||||
continue
|
||||
|
||||
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
||||
if ref and ref_id not in _refs:
|
||||
_refs.append(ref_id)
|
||||
@@ -370,7 +375,7 @@ class DynamicDocument(Document):
|
||||
:class:`~mongoengine.DynamicField` and data can be attributed to that
|
||||
field.
|
||||
|
||||
..note::
|
||||
.. note::
|
||||
|
||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||
"""
|
||||
|
@@ -4,9 +4,9 @@ import decimal
|
||||
import gridfs
|
||||
import re
|
||||
import uuid
|
||||
import warnings
|
||||
|
||||
from bson import Binary, DBRef, SON, ObjectId
|
||||
|
||||
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
||||
ValidationError, get_document, BaseDocument)
|
||||
from queryset import DO_NOTHING, QuerySet
|
||||
@@ -167,6 +167,9 @@ class IntField(BaseField):
|
||||
self.error('Integer value is too large')
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
return int(value)
|
||||
|
||||
|
||||
@@ -194,6 +197,9 @@ class FloatField(BaseField):
|
||||
self.error('Float value is too large')
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
return float(value)
|
||||
|
||||
|
||||
@@ -445,7 +451,7 @@ class GenericEmbeddedDocumentField(BaseField):
|
||||
|
||||
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
||||
|
||||
..note :: You can use the choices param to limit the acceptable
|
||||
.. note:: You can use the choices param to limit the acceptable
|
||||
EmbeddedDocument types
|
||||
"""
|
||||
|
||||
@@ -477,7 +483,10 @@ class GenericEmbeddedDocumentField(BaseField):
|
||||
|
||||
|
||||
class DynamicField(BaseField):
|
||||
"""Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
|
||||
"""A truly dynamic field type capable of handling different and varying
|
||||
types of data.
|
||||
|
||||
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
|
||||
|
||||
def to_mongo(self, value):
|
||||
"""Convert a Python type to a MongoDBcompatible type.
|
||||
@@ -521,6 +530,8 @@ class ListField(ComplexBaseField):
|
||||
"""A list field that wraps a standard field, allowing multiple instances
|
||||
of the field to be used as a list in the database.
|
||||
|
||||
If using with ReferenceFields see: :ref:`one-to-many-with-listfields`
|
||||
|
||||
.. note::
|
||||
Required means it cannot be empty - as the default for ListFields is []
|
||||
"""
|
||||
@@ -757,10 +768,10 @@ class GenericReferenceField(BaseField):
|
||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
||||
that will be automatically dereferenced on access (lazily).
|
||||
|
||||
..note :: Any documents used as a generic reference must be registered in the
|
||||
.. note:: Any documents used as a generic reference must be registered in the
|
||||
document registry. Importing the model will automatically register it.
|
||||
|
||||
..note :: You can use the choices param to limit the acceptable Document types
|
||||
.. note:: You can use the choices param to limit the acceptable Document types
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
@@ -834,13 +845,9 @@ class BinaryField(BaseField):
|
||||
def to_mongo(self, value):
|
||||
return Binary(value)
|
||||
|
||||
def to_python(self, value):
|
||||
# Returns str not unicode as this is binary data
|
||||
return str(value)
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, str):
|
||||
self.error('BinaryField only accepts string values')
|
||||
if not isinstance(value, (basestring, Binary)):
|
||||
self.error('BinaryField only accepts string or bson Binary values')
|
||||
|
||||
if self.max_bytes is not None and len(value) > self.max_bytes:
|
||||
self.error('Binary value is too long')
|
||||
@@ -897,6 +904,8 @@ class GridFSProxy(object):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
|
||||
|
||||
def __cmp__(self, other):
|
||||
if not isinstance(other, GridFSProxy):
|
||||
return -1
|
||||
return cmp((self.grid_id, self.collection_name, self.db_alias),
|
||||
(other.grid_id, other.collection_name, other.db_alias))
|
||||
|
||||
@@ -1011,7 +1020,7 @@ class FileField(BaseField):
|
||||
|
||||
def __set__(self, instance, value):
|
||||
key = self.name
|
||||
if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, str):
|
||||
if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, basestring):
|
||||
# using "FileField() = file/string" notation
|
||||
grid_file = instance._data.get(self.name)
|
||||
# If a file already exists, delete it
|
||||
@@ -1279,7 +1288,7 @@ class SequenceField(IntField):
|
||||
instance._data[self.name] = value
|
||||
instance._mark_as_changed(self.name)
|
||||
|
||||
return value
|
||||
return int(value) if value else None
|
||||
|
||||
def __set__(self, instance, value):
|
||||
|
||||
@@ -1299,17 +1308,40 @@ class UUIDField(BaseField):
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
_binary = None
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
def __init__(self, binary=None, **kwargs):
|
||||
"""
|
||||
Store UUID data in the database
|
||||
|
||||
:param binary: (optional) boolean store as binary.
|
||||
|
||||
.. versionchanged:: 0.6.19
|
||||
"""
|
||||
if binary is None:
|
||||
binary = False
|
||||
msg = ("UUIDFields will soon default to store as binary, please "
|
||||
"configure binary=False if you wish to store as a string")
|
||||
warnings.warn(msg, FutureWarning)
|
||||
self._binary = binary
|
||||
super(UUIDField, self).__init__(**kwargs)
|
||||
|
||||
def to_python(self, value):
|
||||
if not isinstance(value, basestring):
|
||||
value = unicode(value)
|
||||
return uuid.UUID(value)
|
||||
if not self.binary:
|
||||
if not isinstance(value, basestring):
|
||||
value = unicode(value)
|
||||
return uuid.UUID(value)
|
||||
return value
|
||||
|
||||
def to_mongo(self, value):
|
||||
return unicode(value)
|
||||
if not self._binary:
|
||||
return unicode(value)
|
||||
return value
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return None
|
||||
return self.to_mongo(value)
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, uuid.UUID):
|
||||
|
@@ -4,6 +4,8 @@ import copy
|
||||
import itertools
|
||||
import operator
|
||||
|
||||
from functools import partial
|
||||
|
||||
import pymongo
|
||||
from bson.code import Code
|
||||
|
||||
@@ -481,7 +483,6 @@ class QuerySet(object):
|
||||
self._collection.ensure_index(index_spec,
|
||||
background=background, **index_opts)
|
||||
|
||||
|
||||
@classmethod
|
||||
def _build_index_spec(cls, doc_cls, spec):
|
||||
"""Build a PyMongo index spec from a MongoEngine index spec.
|
||||
@@ -492,6 +493,7 @@ class QuerySet(object):
|
||||
spec = {'fields': spec}
|
||||
|
||||
index_list = []
|
||||
direction = None
|
||||
use_types = doc_cls._meta.get('allow_inheritance', True)
|
||||
for key in spec['fields']:
|
||||
# Get ASCENDING direction from +, DESCENDING from -, and GEO2D from *
|
||||
@@ -639,7 +641,7 @@ class QuerySet(object):
|
||||
from mongoengine.fields import ReferenceField, GenericReferenceField
|
||||
if isinstance(field, (ReferenceField, GenericReferenceField)):
|
||||
raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts))
|
||||
if getattr(field, 'field', None):
|
||||
if hasattr(getattr(field, 'field', None), 'lookup_member'):
|
||||
new_field = field.field.lookup_member(field_name)
|
||||
else:
|
||||
# Look up subfield on the previous field
|
||||
@@ -702,7 +704,7 @@ class QuerySet(object):
|
||||
cleaned_fields = []
|
||||
for field in fields:
|
||||
append_field = True
|
||||
if isinstance(field, str):
|
||||
if isinstance(field, basestring):
|
||||
parts.append(field)
|
||||
append_field = False
|
||||
else:
|
||||
@@ -804,19 +806,18 @@ class QuerySet(object):
|
||||
keyword argument called :attr:`defaults`.
|
||||
|
||||
.. note:: This requires two separate operations and therefore a
|
||||
race condition exists. Because there are no transactions in mongoDB
|
||||
other approaches should be investigated, to ensure you don't
|
||||
accidently duplicate data when using this method.
|
||||
race condition exists. Because there are no transactions in mongoDB
|
||||
other approaches should be investigated, to ensure you don't
|
||||
accidently duplicate data when using this method.
|
||||
|
||||
:param write_options: optional extra keyword arguments used if we
|
||||
have to create a new document.
|
||||
Passes any write_options onto :meth:`~mongoengine.Document.save`
|
||||
|
||||
.. versionadded:: 0.3
|
||||
|
||||
:param auto_save: if the object is to be saved automatically if not found.
|
||||
|
||||
.. versionadded:: 0.6
|
||||
.. versionchanged:: 0.6 - added `auto_save`
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
defaults = query.get('defaults', {})
|
||||
if 'defaults' in query:
|
||||
@@ -1373,7 +1374,7 @@ class QuerySet(object):
|
||||
cleaned_fields = []
|
||||
for field in fields:
|
||||
append_field = True
|
||||
if isinstance(field, str):
|
||||
if isinstance(field, basestring):
|
||||
# Convert the S operator to $
|
||||
if field == 'S':
|
||||
field = '$'
|
||||
@@ -1387,11 +1388,16 @@ class QuerySet(object):
|
||||
# Convert value to proper value
|
||||
field = cleaned_fields[-1]
|
||||
|
||||
if op in (None, 'set', 'push', 'pull', 'addToSet'):
|
||||
if op in (None, 'set', 'push', 'pull'):
|
||||
if field.required or value is not None:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op in ('pushAll', 'pullAll'):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif op == 'addToSet':
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif field.required or value is not None:
|
||||
value = field.prepare_query_value(op, value)
|
||||
|
||||
key = '.'.join(parts)
|
||||
|
||||
@@ -1407,6 +1413,8 @@ class QuerySet(object):
|
||||
parts.reverse()
|
||||
for key in parts:
|
||||
value = {key: value}
|
||||
elif op == 'addToSet' and isinstance(value, list):
|
||||
value = {key: {"$each": value}}
|
||||
else:
|
||||
value = {key: value}
|
||||
key = '$' + op
|
||||
@@ -1710,10 +1718,11 @@ class QuerySet(object):
|
||||
def _item_frequencies_map_reduce(self, field, normalize=False):
|
||||
map_func = """
|
||||
function() {
|
||||
path = '{{~%(field)s}}'.split('.');
|
||||
field = this;
|
||||
var path = '{{~%(field)s}}'.split('.');
|
||||
var field = this;
|
||||
|
||||
for (p in path) {
|
||||
if (field)
|
||||
if (typeof field != 'undefined')
|
||||
field = field[path[p]];
|
||||
else
|
||||
break;
|
||||
@@ -1722,7 +1731,7 @@ class QuerySet(object):
|
||||
field.forEach(function(item) {
|
||||
emit(item, 1);
|
||||
});
|
||||
} else if (field) {
|
||||
} else if (typeof field != 'undefined') {
|
||||
emit(field, 1);
|
||||
} else {
|
||||
emit(null, 1);
|
||||
@@ -1746,12 +1755,12 @@ class QuerySet(object):
|
||||
if isinstance(key, float):
|
||||
if int(key) == key:
|
||||
key = int(key)
|
||||
key = str(key)
|
||||
frequencies[key] = f.value
|
||||
frequencies[key] = int(f.value)
|
||||
|
||||
if normalize:
|
||||
count = sum(frequencies.values())
|
||||
frequencies = dict([(k, v / count) for k, v in frequencies.items()])
|
||||
frequencies = dict([(k, float(v) / count)
|
||||
for k, v in frequencies.items()])
|
||||
|
||||
return frequencies
|
||||
|
||||
@@ -1759,31 +1768,28 @@ class QuerySet(object):
|
||||
"""Uses exec_js to execute"""
|
||||
freq_func = """
|
||||
function(path) {
|
||||
path = path.split('.');
|
||||
var path = path.split('.');
|
||||
|
||||
if (options.normalize) {
|
||||
var total = 0.0;
|
||||
db[collection].find(query).forEach(function(doc) {
|
||||
field = doc;
|
||||
for (p in path) {
|
||||
if (field)
|
||||
field = field[path[p]];
|
||||
else
|
||||
break;
|
||||
}
|
||||
if (field && field.constructor == Array) {
|
||||
total += field.length;
|
||||
} else {
|
||||
total++;
|
||||
}
|
||||
});
|
||||
}
|
||||
var total = 0.0;
|
||||
db[collection].find(query).forEach(function(doc) {
|
||||
var field = doc;
|
||||
for (p in path) {
|
||||
if (field)
|
||||
field = field[path[p]];
|
||||
else
|
||||
break;
|
||||
}
|
||||
if (field && field.constructor == Array) {
|
||||
total += field.length;
|
||||
} else {
|
||||
total++;
|
||||
}
|
||||
});
|
||||
|
||||
var frequencies = {};
|
||||
var types = {};
|
||||
var inc = 1.0;
|
||||
if (options.normalize) {
|
||||
inc /= total;
|
||||
}
|
||||
|
||||
db[collection].find(query).forEach(function(doc) {
|
||||
field = doc;
|
||||
for (p in path) {
|
||||
@@ -1798,17 +1804,28 @@ class QuerySet(object):
|
||||
});
|
||||
} else {
|
||||
var item = field;
|
||||
types[item] = item;
|
||||
frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]);
|
||||
}
|
||||
});
|
||||
return frequencies;
|
||||
return [total, frequencies, types];
|
||||
}
|
||||
"""
|
||||
data = self.exec_js(freq_func, field, normalize=normalize)
|
||||
if 'undefined' in data:
|
||||
data[None] = data['undefined']
|
||||
del(data['undefined'])
|
||||
return data
|
||||
total, data, types = self.exec_js(freq_func, field)
|
||||
values = dict([(types.get(k), int(v)) for k, v in data.iteritems()])
|
||||
|
||||
if normalize:
|
||||
values = dict([(k, float(v) / total) for k, v in values.items()])
|
||||
|
||||
frequencies = {}
|
||||
for k, v in values.iteritems():
|
||||
if isinstance(k, float):
|
||||
if int(k) == k:
|
||||
k = int(k)
|
||||
|
||||
frequencies[k] = v
|
||||
|
||||
return frequencies
|
||||
|
||||
def __repr__(self):
|
||||
"""Provides the string representation of the QuerySet
|
||||
@@ -1844,6 +1861,17 @@ class QuerySet(object):
|
||||
|
||||
|
||||
class QuerySetManager(object):
|
||||
"""
|
||||
The default QuerySet Manager.
|
||||
|
||||
Custom QuerySet Manager functions can extend this class and users can
|
||||
add extra queryset functionality. Any custom manager methods must accept a
|
||||
:class:`~mongoengine.Document` class as its first argument, and a
|
||||
:class:`~mongoengine.queryset.QuerySet` as its second argument.
|
||||
|
||||
The method function should return a :class:`~mongoengine.queryset.QuerySet`
|
||||
, probably the same one that was passed in, but modified in some way.
|
||||
"""
|
||||
|
||||
get_queryset = None
|
||||
|
||||
@@ -1864,10 +1892,13 @@ class QuerySetManager(object):
|
||||
queryset_class = owner._meta['queryset_class'] or QuerySet
|
||||
queryset = queryset_class(owner, owner._get_collection())
|
||||
if self.get_queryset:
|
||||
if self.get_queryset.func_code.co_argcount == 1:
|
||||
arg_count = self.get_queryset.func_code.co_argcount
|
||||
if arg_count == 1:
|
||||
queryset = self.get_queryset(queryset)
|
||||
else:
|
||||
elif arg_count == 2:
|
||||
queryset = self.get_queryset(owner, queryset)
|
||||
else:
|
||||
queryset = partial(self.get_queryset, owner, queryset)
|
||||
return queryset
|
||||
|
||||
|
||||
|
@@ -5,7 +5,7 @@
|
||||
%define srcname mongoengine
|
||||
|
||||
Name: python-%{srcname}
|
||||
Version: 0.6.13
|
||||
Version: 0.6.19
|
||||
Release: 1%{?dist}
|
||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||
|
||||
|
@@ -5,9 +5,9 @@ test = nosetests
|
||||
verbosity = 2
|
||||
detailed-errors = 1
|
||||
#with-coverage = 1
|
||||
cover-html = 1
|
||||
cover-html-dir = ../htmlcov
|
||||
cover-package = mongoengine
|
||||
cover-erase = 1
|
||||
#cover-erase = 1
|
||||
#cover-html = 1
|
||||
#cover-html-dir = ../htmlcov
|
||||
#cover-package = mongoengine
|
||||
where = tests
|
||||
#tests = test_bugfix.py
|
||||
|
2
setup.py
2
setup.py
@@ -35,7 +35,7 @@ CLASSIFIERS = [
|
||||
|
||||
setup(name='mongoengine',
|
||||
version=VERSION,
|
||||
packages=find_packages(),
|
||||
packages=find_packages(exclude=('tests',)),
|
||||
author='Harry Marr',
|
||||
author_email='harry.marr@{nospam}gmail.com',
|
||||
maintainer="Ross Lawley",
|
||||
|
@@ -810,7 +810,7 @@ class FieldTest(unittest.TestCase):
|
||||
room = Room.objects.first().select_related()
|
||||
self.assertEquals(room.staffs_with_position[0]['staff'], sarah)
|
||||
self.assertEquals(room.staffs_with_position[1]['staff'], bob)
|
||||
|
||||
|
||||
def test_document_reload_no_inheritance(self):
|
||||
class Foo(Document):
|
||||
meta = {'allow_inheritance': False}
|
||||
@@ -841,3 +841,25 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
self.assertEquals(type(foo.bar), Bar)
|
||||
self.assertEquals(type(foo.baz), Baz)
|
||||
|
||||
def test_list_lookup_not_checked_in_map(self):
|
||||
"""Ensure we dereference list data correctly
|
||||
"""
|
||||
class Comment(Document):
|
||||
id = IntField(primary_key=True)
|
||||
text = StringField()
|
||||
|
||||
class Message(Document):
|
||||
id = IntField(primary_key=True)
|
||||
comments = ListField(ReferenceField(Comment))
|
||||
|
||||
Comment.drop_collection()
|
||||
Message.drop_collection()
|
||||
|
||||
c1 = Comment(id=0, text='zero').save()
|
||||
c2 = Comment(id=1, text='one').save()
|
||||
Message(id=1, comments=[c1, c2]).save()
|
||||
|
||||
msg = Message.objects.get(id=1)
|
||||
self.assertEqual(0, msg.comments[0].id)
|
||||
self.assertEqual(1, msg.comments[1].id)
|
@@ -684,6 +684,29 @@ class DocumentTest(unittest.TestCase):
|
||||
self.assertEquals(Person.objects.get(name="Jack").rank, "Corporal")
|
||||
self.assertEquals(Person.objects.get(name="Fred").rank, "Private")
|
||||
|
||||
def test_db_embedded_doc_field_load(self):
|
||||
"""Ensure we load embedded document data correctly
|
||||
"""
|
||||
class Rank(EmbeddedDocument):
|
||||
title = StringField(required=True)
|
||||
|
||||
class Person(Document):
|
||||
name = StringField(required=True)
|
||||
rank_ = EmbeddedDocumentField(Rank, required=False, db_field='rank')
|
||||
|
||||
@property
|
||||
def rank(self):
|
||||
return self.rank_.title if self.rank_ is not None else "Private"
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(name="Jack", rank_=Rank(title="Corporal")).save()
|
||||
|
||||
Person(name="Fred").save()
|
||||
|
||||
self.assertEquals(Person.objects.get(name="Jack").rank, "Corporal")
|
||||
self.assertEquals(Person.objects.get(name="Fred").rank, "Private")
|
||||
|
||||
def test_explicit_geo2d_index(self):
|
||||
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||
"""
|
||||
@@ -849,15 +872,26 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
def test_geo_indexes_recursion(self):
|
||||
|
||||
class User(Document):
|
||||
channel = ReferenceField('Channel')
|
||||
class Location(Document):
|
||||
name = StringField()
|
||||
location = GeoPointField()
|
||||
|
||||
class Channel(Document):
|
||||
user = ReferenceField('User')
|
||||
location = GeoPointField()
|
||||
class Parent(Document):
|
||||
name = StringField()
|
||||
location = ReferenceField(Location)
|
||||
|
||||
self.assertEquals(len(User._geo_indices()), 2)
|
||||
Location.drop_collection()
|
||||
Parent.drop_collection()
|
||||
|
||||
list(Parent.objects)
|
||||
|
||||
collection = Parent._get_collection()
|
||||
info = collection.index_information()
|
||||
|
||||
self.assertFalse('location_2d' in info)
|
||||
|
||||
self.assertEquals(len(Parent._geo_indices()), 0)
|
||||
self.assertEquals(len(Location._geo_indices()), 1)
|
||||
|
||||
def test_covered_index(self):
|
||||
"""Ensure that covered indexes can be used
|
||||
@@ -2965,7 +2999,7 @@ class DocumentTest(unittest.TestCase):
|
||||
self.assertEqual(User.objects.first(), bob)
|
||||
self.assertEqual(Book.objects.first(), hp)
|
||||
|
||||
# DeRefecence
|
||||
# DeReference
|
||||
class AuthorBooks(Document):
|
||||
author = ReferenceField(User)
|
||||
book = ReferenceField(Book)
|
||||
@@ -2993,6 +3027,18 @@ class DocumentTest(unittest.TestCase):
|
||||
self.assertEqual(Book._get_collection(), get_db("testdb-2")[Book._get_collection_name()])
|
||||
self.assertEqual(AuthorBooks._get_collection(), get_db("testdb-3")[AuthorBooks._get_collection_name()])
|
||||
|
||||
def test_db_alias_propagates(self):
|
||||
"""db_alias propagates?
|
||||
"""
|
||||
class A(Document):
|
||||
name = StringField()
|
||||
meta = {"db_alias": "testdb-1", "allow_inheritance": True}
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
self.assertEquals('testdb-1', B._meta.get('db_alias'))
|
||||
|
||||
def test_db_ref_usage(self):
|
||||
""" DB Ref usage in __raw__ queries """
|
||||
|
||||
@@ -3103,7 +3149,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||
'Inception')
|
||||
|
||||
self.assertEquals(error.message, "root:\n1st.2nd.3rd.4th: Inception")
|
||||
self.assertEquals(error.message, "root(2nd.3rd.4th.Inception: ['1st'])")
|
||||
|
||||
def test_model_validation(self):
|
||||
|
||||
@@ -3114,13 +3160,11 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
try:
|
||||
User().validate()
|
||||
except ValidationError, e:
|
||||
expected_error_message = """Errors encountered validating document:
|
||||
username: Field is required ("username")
|
||||
name: Field is required ("name")"""
|
||||
expected_error_message = """ValidationError(Field is required: ['username', 'name'])"""
|
||||
self.assertEquals(e.message, expected_error_message)
|
||||
self.assertEquals(e.to_dict(), {
|
||||
'username': 'Field is required ("username")',
|
||||
'name': u'Field is required ("name")'})
|
||||
'username': 'Field is required',
|
||||
'name': 'Field is required'})
|
||||
|
||||
def test_spaces_in_keys(self):
|
||||
|
||||
@@ -3138,5 +3182,43 @@ name: Field is required ("name")"""
|
||||
one = Doc.objects.filter(**{'hello world': 1}).count()
|
||||
self.assertEqual(1, one)
|
||||
|
||||
|
||||
def test_fields_rewrite(self):
|
||||
class BasePerson(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {'abstract': True}
|
||||
|
||||
class Person(BasePerson):
|
||||
name = StringField(required=True)
|
||||
|
||||
|
||||
p = Person(age=15)
|
||||
self.assertRaises(ValidationError, p.validate)
|
||||
|
||||
def test_cascaded_save_wrong_reference(self):
|
||||
|
||||
class ADocument(Document):
|
||||
val = IntField()
|
||||
|
||||
class BDocument(Document):
|
||||
a = ReferenceField(ADocument)
|
||||
|
||||
ADocument.drop_collection()
|
||||
BDocument.drop_collection()
|
||||
|
||||
a = ADocument()
|
||||
a.val = 15
|
||||
a.save()
|
||||
|
||||
b = BDocument()
|
||||
b.a = a
|
||||
b.save()
|
||||
|
||||
a.delete()
|
||||
|
||||
b = BDocument.objects.first()
|
||||
b.save(cascade=True)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -6,6 +6,7 @@ import StringIO
|
||||
import tempfile
|
||||
import gridfs
|
||||
|
||||
from bson import Binary
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import *
|
||||
@@ -127,6 +128,19 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(ValidationError, ret.validate)
|
||||
|
||||
def test_int_and_float_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
int_fld = IntField()
|
||||
float_fld = FloatField()
|
||||
|
||||
TestDocument.drop_collection()
|
||||
|
||||
TestDocument(int_fld=None, float_fld=None).save()
|
||||
TestDocument(int_fld=1, float_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count())
|
||||
self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count())
|
||||
|
||||
def test_object_id_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to string fields.
|
||||
"""
|
||||
@@ -258,25 +272,54 @@ class FieldTest(unittest.TestCase):
|
||||
person.admin = 'Yes'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_uuid_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to UUID fields.
|
||||
def test_uuid_field_string(self):
|
||||
"""Test UUID fields storing as String
|
||||
"""
|
||||
class Person(Document):
|
||||
api_key = UUIDField()
|
||||
api_key = UUIDField(binary=False)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
|
||||
person = Person()
|
||||
# any uuid type is valid
|
||||
person.api_key = uuid.uuid4()
|
||||
person.validate()
|
||||
person.api_key = uuid.uuid1()
|
||||
person.validate()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
for api_key in valid:
|
||||
person.api_key = api_key
|
||||
person.validate()
|
||||
|
||||
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
|
||||
'9d159858-549b-4975-9f98-dd2f987c113')
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_uuid_field_binary(self):
|
||||
"""Test UUID fields storing as Binary object
|
||||
"""
|
||||
class Person(Document):
|
||||
api_key = UUIDField(binary=True)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
|
||||
person = Person()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
for api_key in valid:
|
||||
person.api_key = api_key
|
||||
person.validate()
|
||||
|
||||
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
|
||||
'9d159858-549b-4975-9f98-dd2f987c113')
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
# last g cannot belong to an hex number
|
||||
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113g'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
# short strings don't validate
|
||||
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_datetime_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime fields.
|
||||
@@ -345,24 +388,6 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertNotEquals(log.date, d1)
|
||||
self.assertEquals(log.date, d2)
|
||||
|
||||
# Pre UTC microseconds above 1000 is wonky.
|
||||
# log.date has an invalid microsecond value so I can't construct
|
||||
# a date to compare.
|
||||
#
|
||||
# However, the timedelta is predicable with pre UTC timestamps
|
||||
# It always adds 16 seconds and [777216-776217] microseconds
|
||||
for i in xrange(1001, 3113, 33):
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEquals(log.date, d1)
|
||||
|
||||
delta = log.date - d1
|
||||
self.assertEquals(delta.seconds, 16)
|
||||
microseconds = 777216 - (i % 1000)
|
||||
self.assertEquals(delta.microseconds, microseconds)
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
def test_complexdatetime_storage(self):
|
||||
@@ -933,6 +958,19 @@ class FieldTest(unittest.TestCase):
|
||||
doc = self.db.test.find_one()
|
||||
self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2)
|
||||
|
||||
def test_map_field_lookup(self):
|
||||
"""Ensure MapField lookups succeed on Fields without a lookup method"""
|
||||
|
||||
class Log(Document):
|
||||
name = StringField()
|
||||
visited = MapField(DateTimeField())
|
||||
|
||||
Log.drop_collection()
|
||||
Log(name="wilson", visited={'friends': datetime.datetime.now()}).save()
|
||||
|
||||
self.assertEqual(1, Log.objects(
|
||||
visited__friends__exists=True).count())
|
||||
|
||||
def test_embedded_db_field(self):
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
@@ -1433,7 +1471,7 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
attachment_1 = Attachment.objects().first()
|
||||
self.assertEqual(MIME_TYPE, attachment_1.content_type)
|
||||
self.assertEqual(BLOB, attachment_1.blob)
|
||||
self.assertEqual(BLOB, str(attachment_1.blob))
|
||||
|
||||
Attachment.drop_collection()
|
||||
|
||||
@@ -1460,7 +1498,7 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
attachment_required = AttachmentRequired()
|
||||
self.assertRaises(ValidationError, attachment_required.validate)
|
||||
attachment_required.blob = '\xe6\x00\xc4\xff\x07'
|
||||
attachment_required.blob = Binary('\xe6\x00\xc4\xff\x07')
|
||||
attachment_required.validate()
|
||||
|
||||
attachment_size_limit = AttachmentSizeLimit(blob='\xe6\x00\xc4\xff\x07')
|
||||
@@ -1472,6 +1510,18 @@ class FieldTest(unittest.TestCase):
|
||||
AttachmentRequired.drop_collection()
|
||||
AttachmentSizeLimit.drop_collection()
|
||||
|
||||
def test_binary_field_primary(self):
|
||||
|
||||
class Attachment(Document):
|
||||
id = BinaryField(primary_key=True)
|
||||
|
||||
Attachment.drop_collection()
|
||||
|
||||
att = Attachment(id=uuid.uuid4().bytes).save()
|
||||
att.delete()
|
||||
|
||||
self.assertEqual(0, Attachment.objects.count())
|
||||
|
||||
def test_choices_validation(self):
|
||||
"""Ensure that value is in a container of allowed values.
|
||||
"""
|
||||
@@ -1572,13 +1622,13 @@ class FieldTest(unittest.TestCase):
|
||||
"""Ensure that file fields can be written to and their data retrieved
|
||||
"""
|
||||
class PutFile(Document):
|
||||
file = FileField()
|
||||
the_file = FileField()
|
||||
|
||||
class StreamFile(Document):
|
||||
file = FileField()
|
||||
the_file = FileField()
|
||||
|
||||
class SetFile(Document):
|
||||
file = FileField()
|
||||
the_file = FileField()
|
||||
|
||||
text = 'Hello, World!'
|
||||
more_text = 'Foo Bar'
|
||||
@@ -1589,14 +1639,14 @@ class FieldTest(unittest.TestCase):
|
||||
SetFile.drop_collection()
|
||||
|
||||
putfile = PutFile()
|
||||
putfile.file.put(text, content_type=content_type)
|
||||
putfile.the_file.put(text, content_type=content_type)
|
||||
putfile.save()
|
||||
putfile.validate()
|
||||
result = PutFile.objects.first()
|
||||
self.assertTrue(putfile == result)
|
||||
self.assertEquals(result.file.read(), text)
|
||||
self.assertEquals(result.file.content_type, content_type)
|
||||
result.file.delete() # Remove file from GridFS
|
||||
self.assertEquals(result.the_file.read(), text)
|
||||
self.assertEquals(result.the_file.content_type, content_type)
|
||||
result.the_file.delete() # Remove file from GridFS
|
||||
PutFile.objects.delete()
|
||||
|
||||
# Ensure file-like objects are stored
|
||||
@@ -1604,53 +1654,53 @@ class FieldTest(unittest.TestCase):
|
||||
putstring = StringIO.StringIO()
|
||||
putstring.write(text)
|
||||
putstring.seek(0)
|
||||
putfile.file.put(putstring, content_type=content_type)
|
||||
putfile.the_file.put(putstring, content_type=content_type)
|
||||
putfile.save()
|
||||
putfile.validate()
|
||||
result = PutFile.objects.first()
|
||||
self.assertTrue(putfile == result)
|
||||
self.assertEquals(result.file.read(), text)
|
||||
self.assertEquals(result.file.content_type, content_type)
|
||||
result.file.delete()
|
||||
self.assertEquals(result.the_file.read(), text)
|
||||
self.assertEquals(result.the_file.content_type, content_type)
|
||||
result.the_file.delete()
|
||||
|
||||
streamfile = StreamFile()
|
||||
streamfile.file.new_file(content_type=content_type)
|
||||
streamfile.file.write(text)
|
||||
streamfile.file.write(more_text)
|
||||
streamfile.file.close()
|
||||
streamfile.the_file.new_file(content_type=content_type)
|
||||
streamfile.the_file.write(text)
|
||||
streamfile.the_file.write(more_text)
|
||||
streamfile.the_file.close()
|
||||
streamfile.save()
|
||||
streamfile.validate()
|
||||
result = StreamFile.objects.first()
|
||||
self.assertTrue(streamfile == result)
|
||||
self.assertEquals(result.file.read(), text + more_text)
|
||||
self.assertEquals(result.file.content_type, content_type)
|
||||
result.file.seek(0)
|
||||
self.assertEquals(result.file.tell(), 0)
|
||||
self.assertEquals(result.file.read(len(text)), text)
|
||||
self.assertEquals(result.file.tell(), len(text))
|
||||
self.assertEquals(result.file.read(len(more_text)), more_text)
|
||||
self.assertEquals(result.file.tell(), len(text + more_text))
|
||||
result.file.delete()
|
||||
self.assertEquals(result.the_file.read(), text + more_text)
|
||||
self.assertEquals(result.the_file.content_type, content_type)
|
||||
result.the_file.seek(0)
|
||||
self.assertEquals(result.the_file.tell(), 0)
|
||||
self.assertEquals(result.the_file.read(len(text)), text)
|
||||
self.assertEquals(result.the_file.tell(), len(text))
|
||||
self.assertEquals(result.the_file.read(len(more_text)), more_text)
|
||||
self.assertEquals(result.the_file.tell(), len(text + more_text))
|
||||
result.the_file.delete()
|
||||
|
||||
# Ensure deleted file returns None
|
||||
self.assertTrue(result.file.read() == None)
|
||||
self.assertTrue(result.the_file.read() == None)
|
||||
|
||||
setfile = SetFile()
|
||||
setfile.file = text
|
||||
setfile.the_file = text
|
||||
setfile.save()
|
||||
setfile.validate()
|
||||
result = SetFile.objects.first()
|
||||
self.assertTrue(setfile == result)
|
||||
self.assertEquals(result.file.read(), text)
|
||||
self.assertEquals(result.the_file.read(), text)
|
||||
|
||||
# Try replacing file with new one
|
||||
result.file.replace(more_text)
|
||||
result.the_file.replace(more_text)
|
||||
result.save()
|
||||
result.validate()
|
||||
result = SetFile.objects.first()
|
||||
self.assertTrue(setfile == result)
|
||||
self.assertEquals(result.file.read(), more_text)
|
||||
result.file.delete()
|
||||
self.assertEquals(result.the_file.read(), more_text)
|
||||
result.the_file.delete()
|
||||
|
||||
PutFile.drop_collection()
|
||||
StreamFile.drop_collection()
|
||||
@@ -1658,7 +1708,7 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
# Make sure FileField is optional and not required
|
||||
class DemoFile(Document):
|
||||
file = FileField()
|
||||
the_file = FileField()
|
||||
DemoFile.objects.create()
|
||||
|
||||
|
||||
@@ -1709,20 +1759,20 @@ class FieldTest(unittest.TestCase):
|
||||
"""
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
file = FileField()
|
||||
the_file = FileField()
|
||||
|
||||
# First instance
|
||||
testfile = TestFile()
|
||||
testfile.name = "Hello, World!"
|
||||
testfile.file.put('Hello, World!')
|
||||
testfile.save()
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put('Hello, World!')
|
||||
test_file.save()
|
||||
|
||||
# Second instance
|
||||
testfiledupe = TestFile()
|
||||
data = testfiledupe.file.read() # Should be None
|
||||
test_file_dupe = TestFile()
|
||||
data = test_file_dupe.the_file.read() # Should be None
|
||||
|
||||
self.assertTrue(testfile.name != testfiledupe.name)
|
||||
self.assertTrue(testfile.file.read() != data)
|
||||
self.assertTrue(test_file.name != test_file_dupe.name)
|
||||
self.assertTrue(test_file.the_file.read() != data)
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
@@ -1730,17 +1780,25 @@ class FieldTest(unittest.TestCase):
|
||||
"""Ensure that a boolean test of a FileField indicates its presence
|
||||
"""
|
||||
class TestFile(Document):
|
||||
file = FileField()
|
||||
the_file = FileField()
|
||||
|
||||
testfile = TestFile()
|
||||
self.assertFalse(bool(testfile.file))
|
||||
testfile.file = 'Hello, World!'
|
||||
testfile.file.content_type = 'text/plain'
|
||||
testfile.save()
|
||||
self.assertTrue(bool(testfile.file))
|
||||
test_file = TestFile()
|
||||
self.assertFalse(bool(test_file.the_file))
|
||||
test_file.the_file = 'Hello, World!'
|
||||
test_file.the_file.content_type = 'text/plain'
|
||||
test_file.save()
|
||||
self.assertTrue(bool(test_file.the_file))
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
def test_file_cmp(self):
|
||||
"""Test comparing against other types"""
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
test_file = TestFile()
|
||||
self.assertFalse(test_file.the_file in [{"test": 1}])
|
||||
|
||||
def test_image_field(self):
|
||||
|
||||
class TestImage(Document):
|
||||
@@ -1804,30 +1862,30 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
|
||||
def test_file_multidb(self):
|
||||
register_connection('testfiles', 'testfiles')
|
||||
register_connection('test_files', 'test_files')
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
file = FileField(db_alias="testfiles",
|
||||
collection_name="macumba")
|
||||
the_file = FileField(db_alias="test_files",
|
||||
collection_name="macumba")
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
# delete old filesystem
|
||||
get_db("testfiles").macumba.files.drop()
|
||||
get_db("testfiles").macumba.chunks.drop()
|
||||
get_db("test_files").macumba.files.drop()
|
||||
get_db("test_files").macumba.chunks.drop()
|
||||
|
||||
# First instance
|
||||
testfile = TestFile()
|
||||
testfile.name = "Hello, World!"
|
||||
testfile.file.put('Hello, World!',
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put('Hello, World!',
|
||||
name="hello.txt")
|
||||
testfile.save()
|
||||
test_file.save()
|
||||
|
||||
data = get_db("testfiles").macumba.files.find_one()
|
||||
data = get_db("test_files").macumba.files.find_one()
|
||||
self.assertEquals(data.get('name'), 'hello.txt')
|
||||
|
||||
testfile = TestFile.objects.first()
|
||||
self.assertEquals(testfile.file.read(),
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEquals(test_file.the_file.read(),
|
||||
'Hello, World!')
|
||||
|
||||
def test_geo_indexes(self):
|
||||
@@ -2109,7 +2167,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertTrue(1 in error_dict['comments'])
|
||||
self.assertTrue('content' in error_dict['comments'][1])
|
||||
self.assertEquals(error_dict['comments'][1]['content'],
|
||||
u'Field is required ("content")')
|
||||
'Field is required')
|
||||
|
||||
post.comments[1].content = 'here we go'
|
||||
post.validate()
|
||||
|
@@ -579,6 +579,64 @@ class QuerySetTest(unittest.TestCase):
|
||||
Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True})
|
||||
self.assertEqual(Blog.objects.count(), 3)
|
||||
|
||||
def test_get_changed_fields_query_count(self):
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField('Organization'))
|
||||
projects = ListField(ReferenceField('Project'))
|
||||
|
||||
class Organization(Document):
|
||||
name = StringField()
|
||||
owner = ReferenceField('Person')
|
||||
employees = ListField(ReferenceField('Person'))
|
||||
|
||||
class Project(Document):
|
||||
name = StringField()
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
Project.drop_collection()
|
||||
|
||||
r1 = Project(name="r1").save()
|
||||
r2 = Project(name="r2").save()
|
||||
r3 = Project(name="r3").save()
|
||||
p1 = Person(name="p1", projects=[r1, r2]).save()
|
||||
p2 = Person(name="p2", projects=[r2]).save()
|
||||
o1 = Organization(name="o1", employees=[p1]).save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
fresh_o1 = Organization.objects.get(id=o1.id)
|
||||
self.assertEqual(1, q)
|
||||
fresh_o1._get_changed_fields()
|
||||
self.assertEqual(1, q)
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
fresh_o1 = Organization.objects.get(id=o1.id)
|
||||
fresh_o1.save()
|
||||
|
||||
self.assertEquals(q, 2)
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
fresh_o1 = Organization.objects.get(id=o1.id)
|
||||
fresh_o1.save(cascade=False)
|
||||
|
||||
self.assertEquals(q, 2)
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
fresh_o1 = Organization.objects.get(id=o1.id)
|
||||
fresh_o1.employees.append(p2)
|
||||
fresh_o1.save(cascade=False)
|
||||
|
||||
self.assertEquals(q, 3)
|
||||
|
||||
def test_slave_okay(self):
|
||||
"""Ensures that a query can take slave_okay syntax
|
||||
@@ -1520,7 +1578,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_update_push_and_pull(self):
|
||||
def test_update_push_and_pull_add_to_set(self):
|
||||
"""Ensure that the 'pull' update operation works correctly.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
@@ -1553,6 +1611,23 @@ class QuerySetTest(unittest.TestCase):
|
||||
post.reload()
|
||||
self.assertEqual(post.tags, ["code", "mongodb"])
|
||||
|
||||
def test_add_to_set_each(self):
|
||||
class Item(Document):
|
||||
name = StringField(required=True)
|
||||
description = StringField(max_length=50)
|
||||
parents = ListField(ReferenceField('self'))
|
||||
|
||||
Item.drop_collection()
|
||||
|
||||
item = Item(name='test item').save()
|
||||
parent_1 = Item(name='parent 1').save()
|
||||
parent_2 = Item(name='parent 2').save()
|
||||
|
||||
item.update(add_to_set__parents=[parent_1, parent_2, parent_1])
|
||||
item.reload()
|
||||
|
||||
self.assertEqual([parent_1, parent_2], item.parents)
|
||||
|
||||
def test_pull_nested(self):
|
||||
|
||||
class User(Document):
|
||||
@@ -1977,9 +2052,9 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
# Check item_frequencies works for non-list fields
|
||||
def test_assertions(f):
|
||||
self.assertEqual(set(['1', '2']), set(f.keys()))
|
||||
self.assertEqual(f['1'], 1)
|
||||
self.assertEqual(f['2'], 2)
|
||||
self.assertEqual(set([1, 2]), set(f.keys()))
|
||||
self.assertEqual(f[1], 1)
|
||||
self.assertEqual(f[2], 2)
|
||||
|
||||
exec_js = BlogPost.objects.item_frequencies('hits')
|
||||
map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True)
|
||||
@@ -2079,7 +2154,6 @@ class QuerySetTest(unittest.TestCase):
|
||||
data = EmbeddedDocumentField(Data, required=True)
|
||||
extra = EmbeddedDocumentField(Extra)
|
||||
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person()
|
||||
@@ -2097,6 +2171,52 @@ class QuerySetTest(unittest.TestCase):
|
||||
ot = Person.objects.item_frequencies('extra.tag', map_reduce=True)
|
||||
self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
|
||||
|
||||
def test_item_frequencies_with_0_values(self):
|
||||
class Test(Document):
|
||||
val = IntField()
|
||||
|
||||
Test.drop_collection()
|
||||
t = Test()
|
||||
t.val = 0
|
||||
t.save()
|
||||
|
||||
ot = Test.objects.item_frequencies('val', map_reduce=True)
|
||||
self.assertEquals(ot, {0: 1})
|
||||
ot = Test.objects.item_frequencies('val', map_reduce=False)
|
||||
self.assertEquals(ot, {0: 1})
|
||||
|
||||
def test_item_frequencies_with_False_values(self):
|
||||
class Test(Document):
|
||||
val = BooleanField()
|
||||
|
||||
Test.drop_collection()
|
||||
t = Test()
|
||||
t.val = False
|
||||
t.save()
|
||||
|
||||
ot = Test.objects.item_frequencies('val', map_reduce=True)
|
||||
self.assertEquals(ot, {False: 1})
|
||||
ot = Test.objects.item_frequencies('val', map_reduce=False)
|
||||
self.assertEquals(ot, {False: 1})
|
||||
|
||||
def test_item_frequencies_normalize(self):
|
||||
class Test(Document):
|
||||
val = IntField()
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
for i in xrange(50):
|
||||
Test(val=1).save()
|
||||
|
||||
for i in xrange(20):
|
||||
Test(val=2).save()
|
||||
|
||||
freqs = Test.objects.item_frequencies('val', map_reduce=False, normalize=True)
|
||||
self.assertEquals(freqs, {1: 50.0/70, 2: 20.0/70})
|
||||
|
||||
freqs = Test.objects.item_frequencies('val', map_reduce=True, normalize=True)
|
||||
self.assertEquals(freqs, {1: 50.0/70, 2: 20.0/70})
|
||||
|
||||
def test_average(self):
|
||||
"""Ensure that field can be averaged correctly.
|
||||
"""
|
||||
@@ -2166,28 +2286,29 @@ class QuerySetTest(unittest.TestCase):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
|
||||
@queryset_manager
|
||||
def objects(doc_cls, queryset):
|
||||
return queryset(deleted=False)
|
||||
def objects(cls, qryset):
|
||||
opts = {"deleted": False}
|
||||
return qryset(**opts)
|
||||
|
||||
@queryset_manager
|
||||
def music_posts(doc_cls, queryset):
|
||||
return queryset(tags='music', deleted=False).order_by('-date')
|
||||
def music_posts(doc_cls, queryset, deleted=False):
|
||||
return queryset(tags='music',
|
||||
deleted=deleted).order_by('date')
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post1 = BlogPost(tags=['music', 'film'])
|
||||
post1.save()
|
||||
post2 = BlogPost(tags=['music'])
|
||||
post2.save()
|
||||
post3 = BlogPost(tags=['film', 'actors'])
|
||||
post3.save()
|
||||
post4 = BlogPost(tags=['film', 'actors'], deleted=True)
|
||||
post4.save()
|
||||
post1 = BlogPost(tags=['music', 'film']).save()
|
||||
post2 = BlogPost(tags=['music']).save()
|
||||
post3 = BlogPost(tags=['film', 'actors']).save()
|
||||
post4 = BlogPost(tags=['film', 'actors', 'music'], deleted=True).save()
|
||||
|
||||
self.assertEqual([p.id for p in BlogPost.objects],
|
||||
self.assertEqual([p.id for p in BlogPost.objects()],
|
||||
[post1.id, post2.id, post3.id])
|
||||
self.assertEqual([p.id for p in BlogPost.music_posts],
|
||||
[post2.id, post1.id])
|
||||
self.assertEqual([p.id for p in BlogPost.music_posts()],
|
||||
[post1.id, post2.id])
|
||||
|
||||
self.assertEqual([p.id for p in BlogPost.music_posts(True)],
|
||||
[post4.id])
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
|
Reference in New Issue
Block a user