Compare commits
30 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
9cc6164026 | ||
|
475488b9f2 | ||
|
95b1783834 | ||
|
12c8b5c0b9 | ||
|
f99b7a811b | ||
|
2801b38c75 | ||
|
dc3fea875e | ||
|
aab8c2b687 | ||
|
3577773af3 | ||
|
8ac9e6dc19 | ||
|
4b3cea9e78 | ||
|
2420b5e937 | ||
|
f23a976bea | ||
|
4226cd08f1 | ||
|
7a230f1693 | ||
|
a43d0d4612 | ||
|
78a40a0c70 | ||
|
2c69d8f0b0 | ||
|
0018c38b83 | ||
|
8df81571fc | ||
|
48f988acd7 | ||
|
6526923345 | ||
|
24fd1acce6 | ||
|
cbb9235dc5 | ||
|
19ec2c9bc9 | ||
|
6459d4c0b6 | ||
|
1304f2721f | ||
|
8bde0c0e53 | ||
|
598ffd3e5c | ||
|
601f0eb168 |
4
AUTHORS
4
AUTHORS
@@ -113,4 +113,6 @@ that much better:
|
||||
* Alexander Koshelev
|
||||
* Jaime Irurzun
|
||||
* Alexandre González
|
||||
* Thomas Steinacher
|
||||
* Thomas Steinacher
|
||||
* Tommi Komulainen
|
||||
* Peter Landry
|
||||
|
8
LICENSE
8
LICENSE
@@ -1,5 +1,5 @@
|
||||
Copyright (c) 2009-2010 Harry Marr
|
||||
|
||||
Copyright (c) 2009-2012 See AUTHORS
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
@@ -8,10 +8,10 @@ copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
|
@@ -2,6 +2,7 @@
|
||||
MongoEngine
|
||||
===========
|
||||
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
||||
:Repository: https://github.com/MongoEngine/mongoengine
|
||||
:Author: Harry Marr (http://github.com/hmarr)
|
||||
:Maintainer: Ross Lawley (http://github.com/rozza)
|
||||
|
||||
|
@@ -2,6 +2,29 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
Changes in 0.6.20
|
||||
=================
|
||||
- Added support for distinct and db_alias (MongoEngine/mongoengine#59)
|
||||
- Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554)
|
||||
- Fixed BinaryField lookup re (MongoEngine/mongoengine#48)
|
||||
|
||||
Changes in 0.6.19
|
||||
=================
|
||||
|
||||
- Added Binary support to UUID (MongoEngine/mongoengine#47)
|
||||
- Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46)
|
||||
- Fixed BinaryField python value issue (MongoEngine/mongoengine#48)
|
||||
- Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41)
|
||||
- Fixed queryset manager issue (MongoEngine/mongoengine#52)
|
||||
- Fixed FileField comparision (hmarr/mongoengine#547)
|
||||
|
||||
Changes in 0.6.18
|
||||
=================
|
||||
- Fixed recursion loading bug in _get_changed_fields
|
||||
|
||||
Changes in 0.6.17
|
||||
=================
|
||||
- Fixed issue with custom queryset manager expecting explict variable names
|
||||
|
||||
Changes in 0.6.16
|
||||
=================
|
||||
@@ -27,7 +50,7 @@ Changes in 0.6.14
|
||||
- Added support for add_to_set and each
|
||||
|
||||
Changes in 0.6.13
|
||||
================
|
||||
=================
|
||||
- Fixed EmbeddedDocument db_field validation issue
|
||||
- Fixed StringField unicode issue
|
||||
- Fixes __repr__ modifying the cursor
|
||||
|
@@ -259,6 +259,35 @@ as the constructor's argument::
|
||||
content = StringField()
|
||||
|
||||
|
||||
.. _one-to-many-with-listfields:
|
||||
|
||||
One to Many with ListFields
|
||||
'''''''''''''''''''''''''''
|
||||
|
||||
If you are implementing a one to many relationship via a list of references,
|
||||
then the references are stored as DBRefs and to query you need to pass an
|
||||
instance of the object to the query::
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
class Page(Document):
|
||||
content = StringField()
|
||||
authors = ListField(ReferenceField(User))
|
||||
|
||||
bob = User(name="Bob Jones").save()
|
||||
john = User(name="John Smith").save()
|
||||
|
||||
Page(content="Test Page", authors=[bob, john]).save()
|
||||
Page(content="Another Page", authors=[john]).save()
|
||||
|
||||
# Find all pages Bob authored
|
||||
Page.objects(authors__in=[bob])
|
||||
|
||||
# Find all pages that both Bob and John have authored
|
||||
Page.objects(authors__all=[bob, john])
|
||||
|
||||
|
||||
Dealing with deletion of referred documents
|
||||
'''''''''''''''''''''''''''''''''''''''''''
|
||||
By default, MongoDB doesn't check the integrity of your data, so deleting
|
||||
|
@@ -232,7 +232,7 @@ custom manager methods as you like::
|
||||
BlogPost(title='test1', published=False).save()
|
||||
BlogPost(title='test2', published=True).save()
|
||||
assert len(BlogPost.objects) == 2
|
||||
assert len(BlogPost.live_posts) == 1
|
||||
assert len(BlogPost.live_posts()) == 1
|
||||
|
||||
Custom QuerySets
|
||||
================
|
||||
@@ -243,11 +243,16 @@ a document, set ``queryset_class`` to the custom class in a
|
||||
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
|
||||
|
||||
class AwesomerQuerySet(QuerySet):
|
||||
pass
|
||||
|
||||
def get_awesome(self):
|
||||
return self.filter(awesome=True)
|
||||
|
||||
class Page(Document):
|
||||
meta = {'queryset_class': AwesomerQuerySet}
|
||||
|
||||
# To call:
|
||||
Page.objects.get_awesome()
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
Aggregation
|
||||
|
@@ -12,7 +12,7 @@ from signals import *
|
||||
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
||||
queryset.__all__ + signals.__all__)
|
||||
|
||||
VERSION = (0, 6, 16)
|
||||
VERSION = (0, 6, 20)
|
||||
|
||||
|
||||
def get_version():
|
||||
|
@@ -1012,9 +1012,10 @@ Invalid data to create a `%s` instance.\n%s""".strip() % (cls._class_name, error
|
||||
field_list.update(self._dynamic_fields)
|
||||
|
||||
for field_name in field_list:
|
||||
|
||||
db_field_name = self._db_field_map.get(field_name, field_name)
|
||||
key = '%s.' % db_field_name
|
||||
field = getattr(self, field_name, None)
|
||||
field = self._data.get(field_name, None)
|
||||
if hasattr(field, 'id'):
|
||||
if field.id in inspected:
|
||||
continue
|
||||
|
@@ -34,7 +34,9 @@ class DeReference(object):
|
||||
|
||||
doc_type = None
|
||||
if instance and instance._fields:
|
||||
doc_type = instance._fields[name].field
|
||||
doc_type = instance._fields[name]
|
||||
if hasattr(doc_type, 'field'):
|
||||
doc_type = doc_type.field
|
||||
|
||||
if isinstance(doc_type, ReferenceField):
|
||||
doc_type = doc_type.document_type
|
||||
|
@@ -375,7 +375,7 @@ class DynamicDocument(Document):
|
||||
:class:`~mongoengine.DynamicField` and data can be attributed to that
|
||||
field.
|
||||
|
||||
..note::
|
||||
.. note::
|
||||
|
||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||
"""
|
||||
|
@@ -4,9 +4,9 @@ import decimal
|
||||
import gridfs
|
||||
import re
|
||||
import uuid
|
||||
import warnings
|
||||
|
||||
from bson import Binary, DBRef, SON, ObjectId
|
||||
|
||||
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
||||
ValidationError, get_document, BaseDocument)
|
||||
from queryset import DO_NOTHING, QuerySet
|
||||
@@ -169,7 +169,7 @@ class IntField(BaseField):
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
|
||||
return int(value)
|
||||
|
||||
|
||||
@@ -199,7 +199,7 @@ class FloatField(BaseField):
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
|
||||
return float(value)
|
||||
|
||||
|
||||
@@ -451,7 +451,7 @@ class GenericEmbeddedDocumentField(BaseField):
|
||||
|
||||
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
||||
|
||||
..note :: You can use the choices param to limit the acceptable
|
||||
.. note:: You can use the choices param to limit the acceptable
|
||||
EmbeddedDocument types
|
||||
"""
|
||||
|
||||
@@ -483,7 +483,7 @@ class GenericEmbeddedDocumentField(BaseField):
|
||||
|
||||
|
||||
class DynamicField(BaseField):
|
||||
"""A tryly dynamic field type capable of handling different and varying
|
||||
"""A truly dynamic field type capable of handling different and varying
|
||||
types of data.
|
||||
|
||||
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
|
||||
@@ -530,6 +530,8 @@ class ListField(ComplexBaseField):
|
||||
"""A list field that wraps a standard field, allowing multiple instances
|
||||
of the field to be used as a list in the database.
|
||||
|
||||
If using with ReferenceFields see: :ref:`one-to-many-with-listfields`
|
||||
|
||||
.. note::
|
||||
Required means it cannot be empty - as the default for ListFields is []
|
||||
"""
|
||||
@@ -766,10 +768,10 @@ class GenericReferenceField(BaseField):
|
||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
||||
that will be automatically dereferenced on access (lazily).
|
||||
|
||||
..note :: Any documents used as a generic reference must be registered in the
|
||||
.. note:: Any documents used as a generic reference must be registered in the
|
||||
document registry. Importing the model will automatically register it.
|
||||
|
||||
..note :: You can use the choices param to limit the acceptable Document types
|
||||
.. note:: You can use the choices param to limit the acceptable Document types
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
@@ -843,12 +845,9 @@ class BinaryField(BaseField):
|
||||
def to_mongo(self, value):
|
||||
return Binary(value)
|
||||
|
||||
def to_python(self, value):
|
||||
return "%s" % value
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, basestring):
|
||||
self.error('BinaryField only accepts string values')
|
||||
if not isinstance(value, (basestring, Binary)):
|
||||
self.error('BinaryField only accepts string or bson Binary values')
|
||||
|
||||
if self.max_bytes is not None and len(value) > self.max_bytes:
|
||||
self.error('Binary value is too long')
|
||||
@@ -905,6 +904,8 @@ class GridFSProxy(object):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
|
||||
|
||||
def __cmp__(self, other):
|
||||
if not isinstance(other, GridFSProxy):
|
||||
return -1
|
||||
return cmp((self.grid_id, self.collection_name, self.db_alias),
|
||||
(other.grid_id, other.collection_name, other.db_alias))
|
||||
|
||||
@@ -1287,7 +1288,7 @@ class SequenceField(IntField):
|
||||
instance._data[self.name] = value
|
||||
instance._mark_as_changed(self.name)
|
||||
|
||||
return value
|
||||
return int(value) if value else None
|
||||
|
||||
def __set__(self, instance, value):
|
||||
|
||||
@@ -1307,17 +1308,40 @@ class UUIDField(BaseField):
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
_binary = None
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
def __init__(self, binary=None, **kwargs):
|
||||
"""
|
||||
Store UUID data in the database
|
||||
|
||||
:param binary: (optional) boolean store as binary.
|
||||
|
||||
.. versionchanged:: 0.6.19
|
||||
"""
|
||||
if binary is None:
|
||||
binary = False
|
||||
msg = ("UUIDFields will soon default to store as binary, please "
|
||||
"configure binary=False if you wish to store as a string")
|
||||
warnings.warn(msg, FutureWarning)
|
||||
self._binary = binary
|
||||
super(UUIDField, self).__init__(**kwargs)
|
||||
|
||||
def to_python(self, value):
|
||||
if not isinstance(value, basestring):
|
||||
value = unicode(value)
|
||||
return uuid.UUID(value)
|
||||
if not self._binary:
|
||||
if not isinstance(value, basestring):
|
||||
value = unicode(value)
|
||||
return uuid.UUID(value)
|
||||
return value
|
||||
|
||||
def to_mongo(self, value):
|
||||
return unicode(value)
|
||||
if not self._binary:
|
||||
return unicode(value)
|
||||
return value
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return None
|
||||
return self.to_mongo(value)
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, uuid.UUID):
|
||||
|
@@ -641,7 +641,7 @@ class QuerySet(object):
|
||||
from mongoengine.fields import ReferenceField, GenericReferenceField
|
||||
if isinstance(field, (ReferenceField, GenericReferenceField)):
|
||||
raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts))
|
||||
if getattr(field, 'field', None):
|
||||
if hasattr(getattr(field, 'field', None), 'lookup_member'):
|
||||
new_field = field.field.lookup_member(field_name)
|
||||
else:
|
||||
# Look up subfield on the previous field
|
||||
@@ -765,8 +765,22 @@ class QuerySet(object):
|
||||
key = '.'.join(parts)
|
||||
if op is None or key not in mongo_query:
|
||||
mongo_query[key] = value
|
||||
elif key in mongo_query and isinstance(mongo_query[key], dict):
|
||||
mongo_query[key].update(value)
|
||||
elif key in mongo_query:
|
||||
if isinstance(mongo_query[key], dict) and isinstance(value, dict):
|
||||
mongo_query[key].update(value)
|
||||
elif isinstance(mongo_query[key], list):
|
||||
mongo_query[key].append(value)
|
||||
else:
|
||||
mongo_query[key] = [mongo_query[key], value]
|
||||
|
||||
for k, v in mongo_query.items():
|
||||
if isinstance(v, list):
|
||||
value = [{k:val} for val in v]
|
||||
if '$and' in mongo_query.keys():
|
||||
mongo_query['$and'].append(value)
|
||||
else:
|
||||
mongo_query['$and'] = value
|
||||
del mongo_query[k]
|
||||
|
||||
return mongo_query
|
||||
|
||||
@@ -806,9 +820,9 @@ class QuerySet(object):
|
||||
keyword argument called :attr:`defaults`.
|
||||
|
||||
.. note:: This requires two separate operations and therefore a
|
||||
race condition exists. Because there are no transactions in mongoDB
|
||||
other approaches should be investigated, to ensure you don't
|
||||
accidently duplicate data when using this method.
|
||||
race condition exists. Because there are no transactions in mongoDB
|
||||
other approaches should be investigated, to ensure you don't
|
||||
accidently duplicate data when using this method.
|
||||
|
||||
:param write_options: optional extra keyword arguments used if we
|
||||
have to create a new document.
|
||||
@@ -816,8 +830,8 @@ class QuerySet(object):
|
||||
|
||||
:param auto_save: if the object is to be saved automatically if not found.
|
||||
|
||||
.. versionchanged:: 0.6 - added `auto_save`
|
||||
.. versionadded:: 0.3
|
||||
.. versionupdated:: 0.6 - added `auto_save`
|
||||
"""
|
||||
defaults = query.get('defaults', {})
|
||||
if 'defaults' in query:
|
||||
@@ -1154,7 +1168,8 @@ class QuerySet(object):
|
||||
.. versionchanged:: 0.5 - Fixed handling references
|
||||
"""
|
||||
from dereference import DeReference
|
||||
return DeReference()(self._cursor.distinct(field), 1)
|
||||
return DeReference()(self._cursor.distinct(field), 1,
|
||||
name=field, instance=self._document)
|
||||
|
||||
def only(self, *fields):
|
||||
"""Load only a subset of this document's fields. ::
|
||||
@@ -1861,6 +1876,17 @@ class QuerySet(object):
|
||||
|
||||
|
||||
class QuerySetManager(object):
|
||||
"""
|
||||
The default QuerySet Manager.
|
||||
|
||||
Custom QuerySet Manager functions can extend this class and users can
|
||||
add extra queryset functionality. Any custom manager methods must accept a
|
||||
:class:`~mongoengine.Document` class as its first argument, and a
|
||||
:class:`~mongoengine.queryset.QuerySet` as its second argument.
|
||||
|
||||
The method function should return a :class:`~mongoengine.queryset.QuerySet`
|
||||
, probably the same one that was passed in, but modified in some way.
|
||||
"""
|
||||
|
||||
get_queryset = None
|
||||
|
||||
@@ -1881,10 +1907,10 @@ class QuerySetManager(object):
|
||||
queryset_class = owner._meta['queryset_class'] or QuerySet
|
||||
queryset = queryset_class(owner, owner._get_collection())
|
||||
if self.get_queryset:
|
||||
var_names = self.get_queryset.func_code.co_varnames
|
||||
if var_names == ('queryset',):
|
||||
arg_count = self.get_queryset.func_code.co_argcount
|
||||
if arg_count == 1:
|
||||
queryset = self.get_queryset(queryset)
|
||||
elif var_names == ('doc_cls', 'queryset',):
|
||||
elif arg_count == 2:
|
||||
queryset = self.get_queryset(owner, queryset)
|
||||
else:
|
||||
queryset = partial(self.get_queryset, owner, queryset)
|
||||
|
@@ -5,7 +5,7 @@
|
||||
%define srcname mongoengine
|
||||
|
||||
Name: python-%{srcname}
|
||||
Version: 0.6.16
|
||||
Version: 0.6.20
|
||||
Release: 1%{?dist}
|
||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||
|
||||
|
@@ -6,6 +6,7 @@ import StringIO
|
||||
import tempfile
|
||||
import gridfs
|
||||
|
||||
from bson import Binary
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import *
|
||||
@@ -271,25 +272,56 @@ class FieldTest(unittest.TestCase):
|
||||
person.admin = 'Yes'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_uuid_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to UUID fields.
|
||||
def test_uuid_field_string(self):
|
||||
"""Test UUID fields storing as String
|
||||
"""
|
||||
class Person(Document):
|
||||
api_key = UUIDField()
|
||||
api_key = UUIDField(binary=False)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
self.assertEqual(uu, Person.objects.first().api_key)
|
||||
|
||||
person = Person()
|
||||
# any uuid type is valid
|
||||
person.api_key = uuid.uuid4()
|
||||
person.validate()
|
||||
person.api_key = uuid.uuid1()
|
||||
person.validate()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
for api_key in valid:
|
||||
person.api_key = api_key
|
||||
person.validate()
|
||||
|
||||
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
|
||||
'9d159858-549b-4975-9f98-dd2f987c113')
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_uuid_field_binary(self):
|
||||
"""Test UUID fields storing as Binary object
|
||||
"""
|
||||
class Person(Document):
|
||||
api_key = UUIDField(binary=True)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
self.assertEqual(uu, Person.objects.first().api_key)
|
||||
|
||||
person = Person()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
for api_key in valid:
|
||||
person.api_key = api_key
|
||||
person.validate()
|
||||
|
||||
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
|
||||
'9d159858-549b-4975-9f98-dd2f987c113')
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
# last g cannot belong to an hex number
|
||||
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113g'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
# short strings don't validate
|
||||
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_datetime_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime fields.
|
||||
@@ -928,6 +960,19 @@ class FieldTest(unittest.TestCase):
|
||||
doc = self.db.test.find_one()
|
||||
self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2)
|
||||
|
||||
def test_map_field_lookup(self):
|
||||
"""Ensure MapField lookups succeed on Fields without a lookup method"""
|
||||
|
||||
class Log(Document):
|
||||
name = StringField()
|
||||
visited = MapField(DateTimeField())
|
||||
|
||||
Log.drop_collection()
|
||||
Log(name="wilson", visited={'friends': datetime.datetime.now()}).save()
|
||||
|
||||
self.assertEqual(1, Log.objects(
|
||||
visited__friends__exists=True).count())
|
||||
|
||||
def test_embedded_db_field(self):
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
@@ -1428,7 +1473,7 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
attachment_1 = Attachment.objects().first()
|
||||
self.assertEqual(MIME_TYPE, attachment_1.content_type)
|
||||
self.assertEqual(BLOB, attachment_1.blob)
|
||||
self.assertEqual(BLOB, str(attachment_1.blob))
|
||||
|
||||
Attachment.drop_collection()
|
||||
|
||||
@@ -1455,7 +1500,7 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
attachment_required = AttachmentRequired()
|
||||
self.assertRaises(ValidationError, attachment_required.validate)
|
||||
attachment_required.blob = '\xe6\x00\xc4\xff\x07'
|
||||
attachment_required.blob = Binary('\xe6\x00\xc4\xff\x07')
|
||||
attachment_required.validate()
|
||||
|
||||
attachment_size_limit = AttachmentSizeLimit(blob='\xe6\x00\xc4\xff\x07')
|
||||
@@ -1467,6 +1512,18 @@ class FieldTest(unittest.TestCase):
|
||||
AttachmentRequired.drop_collection()
|
||||
AttachmentSizeLimit.drop_collection()
|
||||
|
||||
def test_binary_field_primary(self):
|
||||
|
||||
class Attachment(Document):
|
||||
id = BinaryField(primary_key=True)
|
||||
|
||||
Attachment.drop_collection()
|
||||
|
||||
att = Attachment(id=uuid.uuid4().bytes).save()
|
||||
att.delete()
|
||||
|
||||
self.assertEqual(0, Attachment.objects.count())
|
||||
|
||||
def test_choices_validation(self):
|
||||
"""Ensure that value is in a container of allowed values.
|
||||
"""
|
||||
@@ -1567,13 +1624,13 @@ class FieldTest(unittest.TestCase):
|
||||
"""Ensure that file fields can be written to and their data retrieved
|
||||
"""
|
||||
class PutFile(Document):
|
||||
file = FileField()
|
||||
the_file = FileField()
|
||||
|
||||
class StreamFile(Document):
|
||||
file = FileField()
|
||||
the_file = FileField()
|
||||
|
||||
class SetFile(Document):
|
||||
file = FileField()
|
||||
the_file = FileField()
|
||||
|
||||
text = 'Hello, World!'
|
||||
more_text = 'Foo Bar'
|
||||
@@ -1584,14 +1641,14 @@ class FieldTest(unittest.TestCase):
|
||||
SetFile.drop_collection()
|
||||
|
||||
putfile = PutFile()
|
||||
putfile.file.put(text, content_type=content_type)
|
||||
putfile.the_file.put(text, content_type=content_type)
|
||||
putfile.save()
|
||||
putfile.validate()
|
||||
result = PutFile.objects.first()
|
||||
self.assertTrue(putfile == result)
|
||||
self.assertEquals(result.file.read(), text)
|
||||
self.assertEquals(result.file.content_type, content_type)
|
||||
result.file.delete() # Remove file from GridFS
|
||||
self.assertEquals(result.the_file.read(), text)
|
||||
self.assertEquals(result.the_file.content_type, content_type)
|
||||
result.the_file.delete() # Remove file from GridFS
|
||||
PutFile.objects.delete()
|
||||
|
||||
# Ensure file-like objects are stored
|
||||
@@ -1599,53 +1656,53 @@ class FieldTest(unittest.TestCase):
|
||||
putstring = StringIO.StringIO()
|
||||
putstring.write(text)
|
||||
putstring.seek(0)
|
||||
putfile.file.put(putstring, content_type=content_type)
|
||||
putfile.the_file.put(putstring, content_type=content_type)
|
||||
putfile.save()
|
||||
putfile.validate()
|
||||
result = PutFile.objects.first()
|
||||
self.assertTrue(putfile == result)
|
||||
self.assertEquals(result.file.read(), text)
|
||||
self.assertEquals(result.file.content_type, content_type)
|
||||
result.file.delete()
|
||||
self.assertEquals(result.the_file.read(), text)
|
||||
self.assertEquals(result.the_file.content_type, content_type)
|
||||
result.the_file.delete()
|
||||
|
||||
streamfile = StreamFile()
|
||||
streamfile.file.new_file(content_type=content_type)
|
||||
streamfile.file.write(text)
|
||||
streamfile.file.write(more_text)
|
||||
streamfile.file.close()
|
||||
streamfile.the_file.new_file(content_type=content_type)
|
||||
streamfile.the_file.write(text)
|
||||
streamfile.the_file.write(more_text)
|
||||
streamfile.the_file.close()
|
||||
streamfile.save()
|
||||
streamfile.validate()
|
||||
result = StreamFile.objects.first()
|
||||
self.assertTrue(streamfile == result)
|
||||
self.assertEquals(result.file.read(), text + more_text)
|
||||
self.assertEquals(result.file.content_type, content_type)
|
||||
result.file.seek(0)
|
||||
self.assertEquals(result.file.tell(), 0)
|
||||
self.assertEquals(result.file.read(len(text)), text)
|
||||
self.assertEquals(result.file.tell(), len(text))
|
||||
self.assertEquals(result.file.read(len(more_text)), more_text)
|
||||
self.assertEquals(result.file.tell(), len(text + more_text))
|
||||
result.file.delete()
|
||||
self.assertEquals(result.the_file.read(), text + more_text)
|
||||
self.assertEquals(result.the_file.content_type, content_type)
|
||||
result.the_file.seek(0)
|
||||
self.assertEquals(result.the_file.tell(), 0)
|
||||
self.assertEquals(result.the_file.read(len(text)), text)
|
||||
self.assertEquals(result.the_file.tell(), len(text))
|
||||
self.assertEquals(result.the_file.read(len(more_text)), more_text)
|
||||
self.assertEquals(result.the_file.tell(), len(text + more_text))
|
||||
result.the_file.delete()
|
||||
|
||||
# Ensure deleted file returns None
|
||||
self.assertTrue(result.file.read() == None)
|
||||
self.assertTrue(result.the_file.read() == None)
|
||||
|
||||
setfile = SetFile()
|
||||
setfile.file = text
|
||||
setfile.the_file = text
|
||||
setfile.save()
|
||||
setfile.validate()
|
||||
result = SetFile.objects.first()
|
||||
self.assertTrue(setfile == result)
|
||||
self.assertEquals(result.file.read(), text)
|
||||
self.assertEquals(result.the_file.read(), text)
|
||||
|
||||
# Try replacing file with new one
|
||||
result.file.replace(more_text)
|
||||
result.the_file.replace(more_text)
|
||||
result.save()
|
||||
result.validate()
|
||||
result = SetFile.objects.first()
|
||||
self.assertTrue(setfile == result)
|
||||
self.assertEquals(result.file.read(), more_text)
|
||||
result.file.delete()
|
||||
self.assertEquals(result.the_file.read(), more_text)
|
||||
result.the_file.delete()
|
||||
|
||||
PutFile.drop_collection()
|
||||
StreamFile.drop_collection()
|
||||
@@ -1653,7 +1710,7 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
# Make sure FileField is optional and not required
|
||||
class DemoFile(Document):
|
||||
file = FileField()
|
||||
the_file = FileField()
|
||||
DemoFile.objects.create()
|
||||
|
||||
|
||||
@@ -1704,20 +1761,20 @@ class FieldTest(unittest.TestCase):
|
||||
"""
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
file = FileField()
|
||||
the_file = FileField()
|
||||
|
||||
# First instance
|
||||
testfile = TestFile()
|
||||
testfile.name = "Hello, World!"
|
||||
testfile.file.put('Hello, World!')
|
||||
testfile.save()
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put('Hello, World!')
|
||||
test_file.save()
|
||||
|
||||
# Second instance
|
||||
testfiledupe = TestFile()
|
||||
data = testfiledupe.file.read() # Should be None
|
||||
test_file_dupe = TestFile()
|
||||
data = test_file_dupe.the_file.read() # Should be None
|
||||
|
||||
self.assertTrue(testfile.name != testfiledupe.name)
|
||||
self.assertTrue(testfile.file.read() != data)
|
||||
self.assertTrue(test_file.name != test_file_dupe.name)
|
||||
self.assertTrue(test_file.the_file.read() != data)
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
@@ -1725,17 +1782,25 @@ class FieldTest(unittest.TestCase):
|
||||
"""Ensure that a boolean test of a FileField indicates its presence
|
||||
"""
|
||||
class TestFile(Document):
|
||||
file = FileField()
|
||||
the_file = FileField()
|
||||
|
||||
testfile = TestFile()
|
||||
self.assertFalse(bool(testfile.file))
|
||||
testfile.file = 'Hello, World!'
|
||||
testfile.file.content_type = 'text/plain'
|
||||
testfile.save()
|
||||
self.assertTrue(bool(testfile.file))
|
||||
test_file = TestFile()
|
||||
self.assertFalse(bool(test_file.the_file))
|
||||
test_file.the_file = 'Hello, World!'
|
||||
test_file.the_file.content_type = 'text/plain'
|
||||
test_file.save()
|
||||
self.assertTrue(bool(test_file.the_file))
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
def test_file_cmp(self):
|
||||
"""Test comparing against other types"""
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
test_file = TestFile()
|
||||
self.assertFalse(test_file.the_file in [{"test": 1}])
|
||||
|
||||
def test_image_field(self):
|
||||
|
||||
class TestImage(Document):
|
||||
@@ -1799,30 +1864,30 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
|
||||
def test_file_multidb(self):
|
||||
register_connection('testfiles', 'testfiles')
|
||||
register_connection('test_files', 'test_files')
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
file = FileField(db_alias="testfiles",
|
||||
collection_name="macumba")
|
||||
the_file = FileField(db_alias="test_files",
|
||||
collection_name="macumba")
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
# delete old filesystem
|
||||
get_db("testfiles").macumba.files.drop()
|
||||
get_db("testfiles").macumba.chunks.drop()
|
||||
get_db("test_files").macumba.files.drop()
|
||||
get_db("test_files").macumba.chunks.drop()
|
||||
|
||||
# First instance
|
||||
testfile = TestFile()
|
||||
testfile.name = "Hello, World!"
|
||||
testfile.file.put('Hello, World!',
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put('Hello, World!',
|
||||
name="hello.txt")
|
||||
testfile.save()
|
||||
test_file.save()
|
||||
|
||||
data = get_db("testfiles").macumba.files.find_one()
|
||||
data = get_db("test_files").macumba.files.find_one()
|
||||
self.assertEquals(data.get('name'), 'hello.txt')
|
||||
|
||||
testfile = TestFile.objects.first()
|
||||
self.assertEquals(testfile.file.read(),
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEquals(test_file.the_file.read(),
|
||||
'Hello, World!')
|
||||
|
||||
def test_geo_indexes(self):
|
||||
|
@@ -579,6 +579,64 @@ class QuerySetTest(unittest.TestCase):
|
||||
Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True})
|
||||
self.assertEqual(Blog.objects.count(), 3)
|
||||
|
||||
def test_get_changed_fields_query_count(self):
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField('Organization'))
|
||||
projects = ListField(ReferenceField('Project'))
|
||||
|
||||
class Organization(Document):
|
||||
name = StringField()
|
||||
owner = ReferenceField('Person')
|
||||
employees = ListField(ReferenceField('Person'))
|
||||
|
||||
class Project(Document):
|
||||
name = StringField()
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
Project.drop_collection()
|
||||
|
||||
r1 = Project(name="r1").save()
|
||||
r2 = Project(name="r2").save()
|
||||
r3 = Project(name="r3").save()
|
||||
p1 = Person(name="p1", projects=[r1, r2]).save()
|
||||
p2 = Person(name="p2", projects=[r2]).save()
|
||||
o1 = Organization(name="o1", employees=[p1]).save()
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
fresh_o1 = Organization.objects.get(id=o1.id)
|
||||
self.assertEqual(1, q)
|
||||
fresh_o1._get_changed_fields()
|
||||
self.assertEqual(1, q)
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
fresh_o1 = Organization.objects.get(id=o1.id)
|
||||
fresh_o1.save()
|
||||
|
||||
self.assertEquals(q, 2)
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
fresh_o1 = Organization.objects.get(id=o1.id)
|
||||
fresh_o1.save(cascade=False)
|
||||
|
||||
self.assertEquals(q, 2)
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
fresh_o1 = Organization.objects.get(id=o1.id)
|
||||
fresh_o1.employees.append(p2)
|
||||
fresh_o1.save(cascade=False)
|
||||
|
||||
self.assertEquals(q, 3)
|
||||
|
||||
def test_slave_okay(self):
|
||||
"""Ensures that a query can take slave_okay syntax
|
||||
@@ -769,7 +827,11 @@ class QuerySetTest(unittest.TestCase):
|
||||
def test_filter_chaining(self):
|
||||
"""Ensure filters can be chained together.
|
||||
"""
|
||||
class Blog(Document):
|
||||
id = StringField(unique=True, primary_key=True)
|
||||
|
||||
class BlogPost(Document):
|
||||
blog = ReferenceField(Blog)
|
||||
title = StringField()
|
||||
is_published = BooleanField()
|
||||
published_date = DateTimeField()
|
||||
@@ -778,13 +840,24 @@ class QuerySetTest(unittest.TestCase):
|
||||
def published(doc_cls, queryset):
|
||||
return queryset(is_published=True)
|
||||
|
||||
blog_post_1 = BlogPost(title="Blog Post #1",
|
||||
Blog.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
blog_1 = Blog(id="1")
|
||||
blog_2 = Blog(id="2")
|
||||
blog_3 = Blog(id="3")
|
||||
|
||||
blog_1.save()
|
||||
blog_2.save()
|
||||
blog_3.save()
|
||||
|
||||
blog_post_1 = BlogPost(blog=blog_1, title="Blog Post #1",
|
||||
is_published = True,
|
||||
published_date=datetime(2010, 1, 5, 0, 0 ,0))
|
||||
blog_post_2 = BlogPost(title="Blog Post #2",
|
||||
blog_post_2 = BlogPost(blog=blog_2, title="Blog Post #2",
|
||||
is_published = True,
|
||||
published_date=datetime(2010, 1, 6, 0, 0 ,0))
|
||||
blog_post_3 = BlogPost(title="Blog Post #3",
|
||||
blog_post_3 = BlogPost(blog=blog_3, title="Blog Post #3",
|
||||
is_published = True,
|
||||
published_date=datetime(2010, 1, 7, 0, 0 ,0))
|
||||
|
||||
@@ -798,7 +871,14 @@ class QuerySetTest(unittest.TestCase):
|
||||
published_date__lt=datetime(2010, 1, 7, 0, 0 ,0))
|
||||
self.assertEqual(published_posts.count(), 2)
|
||||
|
||||
|
||||
blog_posts = BlogPost.objects
|
||||
blog_posts = blog_posts.filter(blog__in=[blog_1, blog_2])
|
||||
blog_posts = blog_posts.filter(blog=blog_3)
|
||||
self.assertEqual(blog_posts.count(), 0)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
Blog.drop_collection()
|
||||
|
||||
def test_ordering(self):
|
||||
"""Ensure default ordering is applied and can be overridden.
|
||||
@@ -2219,6 +2299,28 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
self.assertEquals(Foo.objects.distinct("bar"), [bar])
|
||||
|
||||
def test_distinct_handles_references_to_alias(self):
|
||||
register_connection('testdb', 'mongoenginetest2')
|
||||
|
||||
class Foo(Document):
|
||||
bar = ReferenceField("Bar")
|
||||
meta = {'db_alias': 'testdb'}
|
||||
|
||||
class Bar(Document):
|
||||
text = StringField()
|
||||
meta = {'db_alias': 'testdb'}
|
||||
|
||||
Bar.drop_collection()
|
||||
Foo.drop_collection()
|
||||
|
||||
bar = Bar(text="hi")
|
||||
bar.save()
|
||||
|
||||
foo = Foo(bar=bar)
|
||||
foo.save()
|
||||
|
||||
self.assertEquals(Foo.objects.distinct("bar"), [bar])
|
||||
|
||||
def test_custom_manager(self):
|
||||
"""Ensure that custom QuerySetManager instances work as expected.
|
||||
"""
|
||||
@@ -2228,28 +2330,29 @@ class QuerySetTest(unittest.TestCase):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
|
||||
@queryset_manager
|
||||
def objects(doc_cls, queryset):
|
||||
return queryset(deleted=False)
|
||||
def objects(cls, qryset):
|
||||
opts = {"deleted": False}
|
||||
return qryset(**opts)
|
||||
|
||||
@queryset_manager
|
||||
def music_posts(doc_cls, queryset):
|
||||
return queryset(tags='music', deleted=False).order_by('-date')
|
||||
def music_posts(doc_cls, queryset, deleted=False):
|
||||
return queryset(tags='music',
|
||||
deleted=deleted).order_by('date')
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post1 = BlogPost(tags=['music', 'film'])
|
||||
post1.save()
|
||||
post2 = BlogPost(tags=['music'])
|
||||
post2.save()
|
||||
post3 = BlogPost(tags=['film', 'actors'])
|
||||
post3.save()
|
||||
post4 = BlogPost(tags=['film', 'actors'], deleted=True)
|
||||
post4.save()
|
||||
post1 = BlogPost(tags=['music', 'film']).save()
|
||||
post2 = BlogPost(tags=['music']).save()
|
||||
post3 = BlogPost(tags=['film', 'actors']).save()
|
||||
post4 = BlogPost(tags=['film', 'actors', 'music'], deleted=True).save()
|
||||
|
||||
self.assertEqual([p.id for p in BlogPost.objects],
|
||||
self.assertEqual([p.id for p in BlogPost.objects()],
|
||||
[post1.id, post2.id, post3.id])
|
||||
self.assertEqual([p.id for p in BlogPost.music_posts],
|
||||
[post2.id, post1.id])
|
||||
self.assertEqual([p.id for p in BlogPost.music_posts()],
|
||||
[post1.id, post2.id])
|
||||
|
||||
self.assertEqual([p.id for p in BlogPost.music_posts(True)],
|
||||
[post4.id])
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
|
Reference in New Issue
Block a user