Compare commits

...

49 Commits

Author SHA1 Message Date
Ross Lawley
7a1b110f62 Added Tristan Escalada to authors
refs #hmarr/mongoengine#520
2012-06-23 22:24:09 +01:00
Ross Lawley
db8df057ce Merge pull request #520 from tescalada/patch-1
documentation typo: inheritence
2012-06-23 14:23:07 -07:00
Ross Lawley
5d8ffded40 Fixed issue with embedded_docs and db_fields
Bumped version also
refs: hmarr/mongoengine#523
2012-06-23 22:19:02 +01:00
Ross Lawley
07f3e5356d Updated changelog / AUTHORS
refs: hmarr/mongoengine#522
2012-06-23 21:46:31 +01:00
Ross Lawley
1ece62f960 Merge branch 'unicode-fix' of https://github.com/aparajita/mongoengine 2012-06-23 21:43:09 +01:00
Ross Lawley
056c604dc3 Fixes __repr__ modifying the cursor
Fixes MongoEngine/mongoengine#30
2012-06-22 16:22:27 +01:00
Aparajita Fishman
2d08eec093 Fix conversion of StringField value to unicode, replace outdated (str, unicode) check with unicode 2012-06-21 18:57:14 -07:00
Tristan Escalada
614b590551 documentation typo: inheritence
inheritence corrected to inheritance
only in the documentation, not in the code
2012-06-19 17:08:28 -03:00
Ross Lawley
6d90ce250a Version bump 2012-06-19 17:01:28 +01:00
Ross Lawley
ea31846a19 Fixes scalar lookups for primary_key
fixes hmarr/mongoengine#519
2012-06-19 16:59:18 +01:00
Ross Lawley
e6317776c1 Fixes DBRef handling in _delta
refs: hmarr/mongoengine#518
2012-06-19 16:45:23 +01:00
Ross Lawley
efeaba39a4 Version bump 2012-06-19 14:34:16 +01:00
Ross Lawley
1a97dfd479 Better fix for .save() _delta issue with DbRefs
refs: hmarr/mongoengine#518
2012-06-19 14:05:53 +01:00
Ross Lawley
9fecf2b303 Fixed inconsistency handling None values field attrs
fixes hmarr/mongoengine#505
2012-06-19 11:22:12 +01:00
Ross Lawley
3d0d2f48ad Fixed map_field embedded db_field bug
fixes hmarr/mongoengine#512
2012-06-19 10:57:43 +01:00
Ross Lawley
581605e0e2 Added test case for _delta
refs: hmarr/mongoengine#518
2012-06-19 10:08:56 +01:00
Ross Lawley
45d3a7f6ff Updated Changelog 2012-06-19 09:49:55 +01:00
Ross Lawley
7ca2ea0766 Fixes .save _delta issue with DBRefs
Fixes hmarr/mongoengine#518
2012-06-19 09:49:22 +01:00
Ross Lawley
89220c142b Fixed django test class
refs hmarr/mongoengine#506
2012-06-18 21:18:40 +01:00
Ross Lawley
c73ce3d220 Updated changelog / AUTHORS
refs hmarr/mongoengine#511
2012-06-18 21:13:55 +01:00
Ross Lawley
b0f127af4e Merge branch 'master' of https://github.com/andreyfedoseev/mongoengine 2012-06-18 21:12:52 +01:00
Ross Lawley
766d54795f Merge branch 'master' of https://github.com/MeirKriheli/mongoengine
Conflicts:
	docs/changelog.rst
2012-06-18 21:10:14 +01:00
Ross Lawley
bd41c6eea4 Updated changelog & AUTHORS
refs hmarr/mongoengine#517
2012-06-18 21:04:41 +01:00
Ross Lawley
2435786713 Merge branch 'master' of https://github.com/shaunduncan/mongoengine 2012-06-18 20:55:32 +01:00
Ross Lawley
9e7ea64bd2 Fixed db_field load error
Fixes mongoengine/MongoEngine#45
2012-06-18 20:49:33 +01:00
Ross Lawley
89a6eee6af Fixes cascading saves with filefields
fixes #24 #25
2012-06-18 16:45:14 +01:00
Shaun Duncan
2ec1476e50 Adding test case for self-referencing documents with cascade deletes 2012-06-16 11:05:23 -04:00
Shaun Duncan
2d9b581f34 Adding check if cascade delete is self-referencing. If so, prevent
recursing if there are no objects to evaluate
2012-06-15 15:42:19 -04:00
Harry Marr
5bb63f645b Fix minor typo w/ FloatField 2012-06-08 19:24:10 +02:00
Meir Kriheli
a856c7cc37 Fix formatting of the docstring 2012-06-07 12:36:14 +03:00
Meir Kriheli
26db9d8a9d Documentation for PULL reverse_delete_rule 2012-06-07 12:32:02 +03:00
Meir Kriheli
8060179f6d Implement PULL reverse_delete_rule 2012-06-07 12:16:00 +03:00
Meir Kriheli
77ebd87fed Test PULL reverse_delete_rule 2012-06-07 12:02:19 +03:00
Valentin Gorbunov
e4bc92235d test_save_max_recursion_not_hit_with_file_field added 2012-06-06 15:48:16 +04:00
Ross Lawley
27a4d83ce8 Remove comment - it was wrong 2012-05-29 17:32:41 +01:00
Ross Lawley
ece9b902f8 Setup.py cleanups 2012-05-29 17:32:14 +01:00
Ross Lawley
65a2f8a68b Updated configs 2012-05-29 17:06:03 +01:00
Ross Lawley
9c212306b8 Updated setup / added datetime test 2012-05-29 16:24:25 +01:00
Ross Lawley
1fdc7ce6bb Releasing Version 0.6.10 2012-05-23 08:58:43 +01:00
Andrey Fedoseev
0b22c140c5 Add sensible __eq__ method to EmbeddedDocument 2012-05-22 22:31:59 +06:00
Ross Lawley
944aa45459 Updated changelog 2012-05-21 15:21:45 +01:00
Ross Lawley
c9842ba13a Fix base classes to return
fixes hmarr/mongoengine#507
2012-05-21 15:20:46 +01:00
Ross Lawley
8840680303 Promoted BaseDynamicField to DynamicField
closes mongoengine/mongoengine#22
2012-05-17 21:54:17 +01:00
Ross Lawley
376b9b1316 updated the readme 2012-05-17 21:14:25 +01:00
Ross Lawley
54bb1cb3d9 Updated travis settings and Readme 2012-05-17 16:59:50 +01:00
Ross Lawley
43468b474e Adding travis support 2012-05-17 16:49:13 +01:00
Ross Lawley
28a957c684 Version bump 2012-05-14 12:43:00 +01:00
Ross Lawley
ec5ddbf391 Fixed sparse indexes with inheritance
fixes hmarr/mongoengine#497
2012-05-14 12:06:25 +01:00
Ross Lawley
bab186e195 Reverted document.delete auto gridfs delete 2012-05-14 12:02:07 +01:00
26 changed files with 515 additions and 166 deletions

3
.gitignore vendored
View File

@@ -13,4 +13,5 @@ env/
.settings .settings
.project .project
.pydevproject .pydevproject
tests/bugfix.py tests/test_bugfix.py
htmlcov/

12
.travis.yml Normal file
View File

@@ -0,0 +1,12 @@
# http://travis-ci.org/#!/MongoEngine/mongoengine
language: python
python:
- 2.6
- 2.7
install:
- sudo apt-get install zlib1g zlib1g-dev
- sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/
- pip install PIL --use-mirrors ; true
- python setup.py install
script:
- python setup.py test

View File

@@ -104,4 +104,9 @@ that much better:
* swashbuckler * swashbuckler
* Adam Reeve * Adam Reeve
* Anthony Nemitz * Anthony Nemitz
* deignacio * deignacio
* shaunduncan
* Meir Kriheli
* Andrey Fedoseev
* aparajita
* Tristan Escalada

View File

@@ -5,6 +5,9 @@ MongoEngine
:Author: Harry Marr (http://github.com/hmarr) :Author: Harry Marr (http://github.com/hmarr)
:Maintainer: Ross Lawley (http://github.com/rozza) :Maintainer: Ross Lawley (http://github.com/rozza)
.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
:target: http://travis-ci.org/MongoEngine/mongoengine
About About
===== =====
MongoEngine is a Python Object-Document Mapper for working with MongoDB. MongoEngine is a Python Object-Document Mapper for working with MongoDB.
@@ -96,3 +99,4 @@ Contributing
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
contribute to the project, fork it on GitHub and send a pull request, all contribute to the project, fork it on GitHub and send a pull request, all
contributions and suggestions are welcome! contributions and suggestions are welcome!

View File

@@ -2,6 +2,39 @@
Changelog Changelog
========= =========
Changes in 0.6.13
================
- Fixed EmbeddedDocument db_field validation issue
- Fixed StringField unicode issue
- Fixes __repr__ modifying the cursor
Changes in 0.6.12
=================
- Fixes scalar lookups for primary_key
- Fixes error with _delta handling DBRefs
Changes in 0.6.11
==================
- Fixed inconsistency handling None values field attrs
- Fixed map_field embedded db_field issue
- Fixed .save() _delta issue with DbRefs
- Fixed Django TestCase
- Added cmp to Embedded Document
- Added PULL reverse_delete_rule
- Fixed CASCADE delete bug
- Fixed db_field data load error
- Fixed recursive save with FileField
Changes in 0.6.10
=================
- Fixed basedict / baselist to return super(..)
- Promoted BaseDynamicField to DynamicField
Changes in 0.6.9
================
- Fixed sparse indexes on inherited docs
- Removed FileField auto deletion, needs more work maybe 0.7
Changes in 0.6.8 Changes in 0.6.8
================ ================
- Fixed FileField losing reference when no default set - Fixed FileField losing reference when no default set

View File

@@ -289,6 +289,10 @@ Its value can take any of the following constants:
:const:`mongoengine.CASCADE` :const:`mongoengine.CASCADE`
Any object containing fields that are refererring to the object being deleted Any object containing fields that are refererring to the object being deleted
are deleted first. are deleted first.
:const:`mongoengine.PULL`
Removes the reference to the object (using MongoDB's "pull" operation)
from any object's fields of
:class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`).
.. warning:: .. warning::

View File

@@ -65,12 +65,13 @@ Deleting stored files is achieved with the :func:`delete` method::
marmot.photo.delete() marmot.photo.delete()
.. note:: .. warning::
The FileField in a Document actually only stores the ID of a file in a The FileField in a Document actually only stores the ID of a file in a
separate GridFS collection. This means that `Animal.drop_collection()` will separate GridFS collection. This means that deleting a document
not delete any files. Care should be taken to manually remove associated with a defined FileField does not actually delete the file. You must be
files before dropping a collection. careful to delete any files in a Document as above before deleting the
Document itself.
Replacing files Replacing files

View File

@@ -12,7 +12,7 @@ from signals import *
__all__ = (document.__all__ + fields.__all__ + connection.__all__ + __all__ = (document.__all__ + fields.__all__ + connection.__all__ +
queryset.__all__ + signals.__all__) queryset.__all__ + signals.__all__)
VERSION = (0, 6, 8) VERSION = (0, 6, 13)
def get_version(): def get_version():

View File

@@ -435,47 +435,6 @@ class ComplexBaseField(BaseField):
owner_document = property(_get_owner_document, _set_owner_document) owner_document = property(_get_owner_document, _set_owner_document)
class BaseDynamicField(BaseField):
"""Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
def to_mongo(self, value):
"""Convert a Python type to a MongoDBcompatible type.
"""
if isinstance(value, basestring):
return value
if hasattr(value, 'to_mongo'):
return value.to_mongo()
if not isinstance(value, (dict, list, tuple)):
return value
is_list = False
if not hasattr(value, 'items'):
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
data = {}
for k, v in value.items():
data[k] = self.to_mongo(v)
if is_list: # Convert back to a list
value = [v for k, v in sorted(data.items(), key=operator.itemgetter(0))]
else:
value = data
return value
def lookup_member(self, member_name):
return member_name
def prepare_query_value(self, op, value):
if isinstance(value, basestring):
from mongoengine.fields import StringField
return StringField().prepare_query_value(op, value)
return self.to_mongo(value)
class ObjectIdField(BaseField): class ObjectIdField(BaseField):
"""An field wrapper around MongoDB's ObjectIds. """An field wrapper around MongoDB's ObjectIds.
""" """
@@ -618,10 +577,6 @@ class DocumentMetaclass(type):
raise InvalidDocumentError("Reverse delete rules are not supported for EmbeddedDocuments (field: %s)" % field.name) raise InvalidDocumentError("Reverse delete rules are not supported for EmbeddedDocuments (field: %s)" % field.name)
f.document_type.register_delete_rule(new_class, field.name, delete_rule) f.document_type.register_delete_rule(new_class, field.name, delete_rule)
proxy_class = getattr(field, 'proxy_class', None)
if proxy_class is not None:
new_class.register_proxy_field(field.name, proxy_class)
if field.name and hasattr(Document, field.name) and EmbeddedDocument not in new_class.mro(): if field.name and hasattr(Document, field.name) and EmbeddedDocument not in new_class.mro():
raise InvalidDocumentError("%s is a document method and not a valid field name" % field.name) raise InvalidDocumentError("%s is a document method and not a valid field name" % field.name)
@@ -723,7 +678,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
'index_opts': {}, 'index_opts': {},
'queryset_class': QuerySet, 'queryset_class': QuerySet,
'delete_rules': {}, 'delete_rules': {},
'proxy_fields': {},
'allow_inheritance': True 'allow_inheritance': True
} }
@@ -844,6 +798,7 @@ class BaseDocument(object):
dynamic_data[key] = value dynamic_data[key] = value
else: else:
for key, value in values.items(): for key, value in values.items():
key = self._reverse_db_field_map.get(key, key)
setattr(self, key, value) setattr(self, key, value)
# Set any get_fieldname_display methods # Set any get_fieldname_display methods
@@ -864,7 +819,8 @@ class BaseDocument(object):
field = None field = None
if not hasattr(self, name) and not name.startswith('_'): if not hasattr(self, name) and not name.startswith('_'):
field = BaseDynamicField(db_field=name) from fields import DynamicField
field = DynamicField(db_field=name)
field.name = name field.name = name
self._dynamic_fields[name] = field self._dynamic_fields[name] = field
@@ -877,13 +833,6 @@ class BaseDocument(object):
if hasattr(self, '_changed_fields'): if hasattr(self, '_changed_fields'):
self._mark_as_changed(name) self._mark_as_changed(name)
# Handle None values for required fields
if value is None and name in getattr(self, '_fields', {}):
self._data[name] = value
if hasattr(self, '_changed_fields'):
self._mark_as_changed(name)
return
if not self._created and name in self._meta.get('shard_key', tuple()): if not self._created and name in self._meta.get('shard_key', tuple()):
from queryset import OperationError from queryset import OperationError
raise OperationError("Shard Keys are immutable. Tried to update %s" % name) raise OperationError("Shard Keys are immutable. Tried to update %s" % name)
@@ -1008,6 +957,8 @@ class BaseDocument(object):
try: try:
data[field_name] = (value if value is None data[field_name] = (value if value is None
else field.to_python(value)) else field.to_python(value))
if field_name != field.db_field:
del data[field.db_field]
except (AttributeError, ValueError), e: except (AttributeError, ValueError), e:
errors_dict[field_name] = e errors_dict[field_name] = e
elif field.default: elif field.default:
@@ -1094,13 +1045,16 @@ Invalid data to create a `%s` instance.\n%s""".strip() % (cls._class_name, error
for path in set_fields: for path in set_fields:
parts = path.split('.') parts = path.split('.')
d = doc d = doc
new_path = []
for p in parts: for p in parts:
if hasattr(d, '__getattr__'): if isinstance(d, DBRef):
d = getattr(p, d) break
elif p.isdigit(): elif p.isdigit():
d = d[int(p)] d = d[int(p)]
else: elif hasattr(d, 'get'):
d = d.get(p) d = d.get(p)
new_path.append(p)
path = '.'.join(new_path)
set_data[path] = d set_data[path] = d
else: else:
set_data = doc set_data = doc
@@ -1262,15 +1216,15 @@ class BaseList(list):
def __init__(self, list_items, instance, name): def __init__(self, list_items, instance, name):
self._instance = instance self._instance = instance
self._name = name self._name = name
super(BaseList, self).__init__(list_items) return super(BaseList, self).__init__(list_items)
def __setitem__(self, *args, **kwargs): def __setitem__(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseList, self).__setitem__(*args, **kwargs) return super(BaseList, self).__setitem__(*args, **kwargs)
def __delitem__(self, *args, **kwargs): def __delitem__(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseList, self).__delitem__(*args, **kwargs) return super(BaseList, self).__delitem__(*args, **kwargs)
def __getstate__(self): def __getstate__(self):
self.observer = None self.observer = None
@@ -1324,23 +1278,23 @@ class BaseDict(dict):
def __init__(self, dict_items, instance, name): def __init__(self, dict_items, instance, name):
self._instance = instance self._instance = instance
self._name = name self._name = name
super(BaseDict, self).__init__(dict_items) return super(BaseDict, self).__init__(dict_items)
def __setitem__(self, *args, **kwargs): def __setitem__(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).__setitem__(*args, **kwargs) return super(BaseDict, self).__setitem__(*args, **kwargs)
def __delete__(self, *args, **kwargs): def __delete__(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).__delete__(*args, **kwargs) return super(BaseDict, self).__delete__(*args, **kwargs)
def __delitem__(self, *args, **kwargs): def __delitem__(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).__delitem__(*args, **kwargs) return super(BaseDict, self).__delitem__(*args, **kwargs)
def __delattr__(self, *args, **kwargs): def __delattr__(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).__delattr__(*args, **kwargs) return super(BaseDict, self).__delattr__(*args, **kwargs)
def __getstate__(self): def __getstate__(self):
self.instance = None self.instance = None
@@ -1353,19 +1307,19 @@ class BaseDict(dict):
def clear(self, *args, **kwargs): def clear(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).clear(*args, **kwargs) return super(BaseDict, self).clear(*args, **kwargs)
def pop(self, *args, **kwargs): def pop(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).pop(*args, **kwargs) return super(BaseDict, self).pop(*args, **kwargs)
def popitem(self, *args, **kwargs): def popitem(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).popitem(*args, **kwargs) return super(BaseDict, self).popitem(*args, **kwargs)
def update(self, *args, **kwargs): def update(self, *args, **kwargs):
self._mark_as_changed() self._mark_as_changed()
super(BaseDict, self).update(*args, **kwargs) return super(BaseDict, self).update(*args, **kwargs)
def _mark_as_changed(self): def _mark_as_changed(self):
if hasattr(self._instance, '_mark_as_changed'): if hasattr(self._instance, '_mark_as_changed'):

View File

@@ -114,7 +114,7 @@ class DeReference(object):
doc = get_document(ref["_cls"])._from_son(ref) doc = get_document(ref["_cls"])._from_son(ref)
elif doc_type is None: elif doc_type is None:
doc = get_document( doc = get_document(
''.join(x.capitalize() ''.join(x.capitalize()
for x in col.split('_')))._from_son(ref) for x in col.split('_')))._from_son(ref)
else: else:
doc = doc_type._from_son(ref) doc = doc_type._from_son(ref)

View File

@@ -10,7 +10,7 @@ class MongoTestCase(TestCase):
""" """
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
def __init__(self, methodName='runtest'): def __init__(self, methodName='runtest'):
self.db = connect(self.db_name) self.db = connect(self.db_name).get_db()
super(MongoTestCase, self).__init__(methodName) super(MongoTestCase, self).__init__(methodName)
def _post_teardown(self): def _post_teardown(self):

View File

@@ -1,4 +1,5 @@
import pymongo import pymongo
from bson.dbref import DBRef from bson.dbref import DBRef
from mongoengine import signals from mongoengine import signals
@@ -39,6 +40,11 @@ class EmbeddedDocument(BaseDocument):
else: else:
super(EmbeddedDocument, self).__delattr__(*args, **kwargs) super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._data == other._data
return False
class Document(BaseDocument): class Document(BaseDocument):
"""The base class used for defining the structure and properties of """The base class used for defining the structure and properties of
@@ -81,7 +87,7 @@ class Document(BaseDocument):
system. system.
By default, _types will be added to the start of every index (that By default, _types will be added to the start of every index (that
doesn't contain a list) if allow_inheritence is True. This can be doesn't contain a list) if allow_inheritance is True. This can be
disabled by either setting types to False on the specific index or disabled by either setting types to False on the specific index or
by setting index_types to False on the meta dictionary for the document. by setting index_types to False on the meta dictionary for the document.
""" """
@@ -220,6 +226,7 @@ class Document(BaseDocument):
if cascade_kwargs: # Allow granular control over cascades if cascade_kwargs: # Allow granular control over cascades
kwargs.update(cascade_kwargs) kwargs.update(cascade_kwargs)
kwargs['_refs'] = _refs kwargs['_refs'] = _refs
#self._changed_fields = []
self.cascade_save(**kwargs) self.cascade_save(**kwargs)
except pymongo.errors.OperationFailure, err: except pymongo.errors.OperationFailure, err:
@@ -239,6 +246,7 @@ class Document(BaseDocument):
"""Recursively saves any references / generic references on an object""" """Recursively saves any references / generic references on an object"""
from fields import ReferenceField, GenericReferenceField from fields import ReferenceField, GenericReferenceField
_refs = kwargs.get('_refs', []) or [] _refs = kwargs.get('_refs', []) or []
for name, cls in self._fields.items(): for name, cls in self._fields.items():
if not isinstance(cls, (ReferenceField, GenericReferenceField)): if not isinstance(cls, (ReferenceField, GenericReferenceField)):
continue continue
@@ -278,11 +286,6 @@ class Document(BaseDocument):
signals.pre_delete.send(self.__class__, document=self) signals.pre_delete.send(self.__class__, document=self)
try: try:
for field_name in self._meta['proxy_fields']:
proxy_class = self._meta['proxy_fields'][field_name]
if hasattr(proxy_class, 'delete'):
proxy = getattr(self, field_name)
proxy.delete()
self.__class__.objects(pk=self.pk).delete(safe=safe) self.__class__.objects(pk=self.pk).delete(safe=safe)
except pymongo.errors.OperationFailure, err: except pymongo.errors.OperationFailure, err:
message = u'Could not delete document (%s)' % err.message message = u'Could not delete document (%s)' % err.message
@@ -347,13 +350,6 @@ class Document(BaseDocument):
""" """
cls._meta['delete_rules'][(document_cls, field_name)] = rule cls._meta['delete_rules'][(document_cls, field_name)] = rule
@classmethod
def register_proxy_field(cls, field_name, proxy_class):
"""This method registers fields with proxy classes to delete them when
removing this object.
"""
cls._meta['proxy_fields'][field_name] = proxy_class
@classmethod @classmethod
def drop_collection(cls): def drop_collection(cls):
"""Drops the entire collection associated with this """Drops the entire collection associated with this
@@ -371,7 +367,7 @@ class DynamicDocument(Document):
way as an ordinary document but has expando style properties. Any data way as an ordinary document but has expando style properties. Any data
passed or set against the :class:`~mongoengine.DynamicDocument` that is passed or set against the :class:`~mongoengine.DynamicDocument` that is
not a field is automatically converted into a not a field is automatically converted into a
:class:`~mongoengine.BaseDynamicField` and data can be attributed to that :class:`~mongoengine.DynamicField` and data can be attributed to that
field. field.
..note:: ..note::

View File

@@ -30,7 +30,7 @@ except ImportError:
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', 'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', 'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
'DecimalField', 'ComplexDateTimeField', 'URLField', 'DecimalField', 'ComplexDateTimeField', 'URLField', 'DynamicField',
'GenericReferenceField', 'FileField', 'BinaryField', 'GenericReferenceField', 'FileField', 'BinaryField',
'SortedListField', 'EmailField', 'GeoPointField', 'ImageField', 'SortedListField', 'EmailField', 'GeoPointField', 'ImageField',
'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField'] 'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField']
@@ -49,10 +49,13 @@ class StringField(BaseField):
super(StringField, self).__init__(**kwargs) super(StringField, self).__init__(**kwargs)
def to_python(self, value): def to_python(self, value):
return unicode(value) if isinstance(value, unicode):
return value
else:
return value.decode('utf-8')
def validate(self, value): def validate(self, value):
if not isinstance(value, (str, unicode)): if not isinstance(value, basestring):
self.error('StringField only accepts string values') self.error('StringField only accepts string values')
if self.max_length is not None and len(value) > self.max_length: if self.max_length is not None and len(value) > self.max_length:
@@ -182,7 +185,7 @@ class FloatField(BaseField):
if isinstance(value, int): if isinstance(value, int):
value = float(value) value = float(value)
if not isinstance(value, float): if not isinstance(value, float):
self.error('FoatField only accepts float values') self.error('FloatField only accepts float values')
if self.min_value is not None and value < self.min_value: if self.min_value is not None and value < self.min_value:
self.error('Float value is too small') self.error('Float value is too small')
@@ -369,7 +372,7 @@ class ComplexDateTimeField(StringField):
return self._convert_from_string(data) return self._convert_from_string(data)
def __set__(self, instance, value): def __set__(self, instance, value):
value = self._convert_from_datetime(value) value = self._convert_from_datetime(value) if value else value
return super(ComplexDateTimeField, self).__set__(instance, value) return super(ComplexDateTimeField, self).__set__(instance, value)
def validate(self, value): def validate(self, value):
@@ -473,6 +476,47 @@ class GenericEmbeddedDocumentField(BaseField):
return data return data
class DynamicField(BaseField):
"""Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
def to_mongo(self, value):
"""Convert a Python type to a MongoDBcompatible type.
"""
if isinstance(value, basestring):
return value
if hasattr(value, 'to_mongo'):
return value.to_mongo()
if not isinstance(value, (dict, list, tuple)):
return value
is_list = False
if not hasattr(value, 'items'):
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
data = {}
for k, v in value.items():
data[k] = self.to_mongo(v)
if is_list: # Convert back to a list
value = [v for k, v in sorted(data.items(), key=itemgetter(0))]
else:
value = data
return value
def lookup_member(self, member_name):
return member_name
def prepare_query_value(self, op, value):
if isinstance(value, basestring):
from mongoengine.fields import StringField
return StringField().prepare_query_value(op, value)
return self.to_mongo(value)
class ListField(ComplexBaseField): class ListField(ComplexBaseField):
"""A list field that wraps a standard field, allowing multiple instances """A list field that wraps a standard field, allowing multiple instances
of the field to be used as a list in the database. of the field to be used as a list in the database.
@@ -615,6 +659,7 @@ class ReferenceField(BaseField):
* NULLIFY - Updates the reference to null. * NULLIFY - Updates the reference to null.
* CASCADE - Deletes the documents associated with the reference. * CASCADE - Deletes the documents associated with the reference.
* DENY - Prevent the deletion of the reference object. * DENY - Prevent the deletion of the reference object.
* PULL - Pull the reference from a :class:`~mongoengine.ListField` of references
Alternative syntax for registering delete rules (useful when implementing Alternative syntax for registering delete rules (useful when implementing
bi-directional delete rules) bi-directional delete rules)
@@ -848,6 +893,13 @@ class GridFSProxy(object):
self_dict['_fs'] = None self_dict['_fs'] = None
return self_dict return self_dict
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
def __cmp__(self, other):
return cmp((self.grid_id, self.collection_name, self.db_alias),
(other.grid_id, other.collection_name, other.db_alias))
@property @property
def fs(self): def fs(self):
if not self._fs: if not self._fs:

View File

@@ -10,7 +10,7 @@ from bson.code import Code
from mongoengine import signals from mongoengine import signals
__all__ = ['queryset_manager', 'Q', 'InvalidQueryError', __all__ = ['queryset_manager', 'Q', 'InvalidQueryError',
'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY'] 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL']
# The maximum number of items to display in a QuerySet.__repr__ # The maximum number of items to display in a QuerySet.__repr__
@@ -21,6 +21,7 @@ DO_NOTHING = 0
NULLIFY = 1 NULLIFY = 1
CASCADE = 2 CASCADE = 2
DENY = 3 DENY = 3
PULL = 4
class DoesNotExist(Exception): class DoesNotExist(Exception):
@@ -340,6 +341,7 @@ class QuerySet(object):
self._timeout = True self._timeout = True
self._class_check = True self._class_check = True
self._slave_okay = False self._slave_okay = False
self._iter = False
self._scalar = [] self._scalar = []
# If inheritance is allowed, only return instances and instances of # If inheritance is allowed, only return instances and instances of
@@ -512,6 +514,10 @@ class QuerySet(object):
key = '.'.join(parts) key = '.'.join(parts)
index_list.append((key, direction)) index_list.append((key, direction))
# If sparse - dont include types
if spec.get('sparse', False):
use_types = False
# Check if a list field is being used, don't use _types if it is # Check if a list field is being used, don't use _types if it is
if use_types and not all(f._index_with_types for f in fields): if use_types and not all(f._index_with_types for f in fields):
use_types = False use_types = False
@@ -615,6 +621,7 @@ class QuerySet(object):
"Can't use index on unsubscriptable field (%s)" % err) "Can't use index on unsubscriptable field (%s)" % err)
fields.append(field_name) fields.append(field_name)
continue continue
if field is None: if field is None:
# Look up first field from the document # Look up first field from the document
if field_name == 'pk': if field_name == 'pk':
@@ -623,8 +630,8 @@ class QuerySet(object):
if field_name in document._fields: if field_name in document._fields:
field = document._fields[field_name] field = document._fields[field_name]
elif document._dynamic: elif document._dynamic:
from base import BaseDynamicField from fields import DynamicField
field = BaseDynamicField(db_field=field_name) field = DynamicField(db_field=field_name)
else: else:
raise InvalidQueryError('Cannot resolve field "%s"' raise InvalidQueryError('Cannot resolve field "%s"'
% field_name) % field_name)
@@ -632,8 +639,11 @@ class QuerySet(object):
from mongoengine.fields import ReferenceField, GenericReferenceField from mongoengine.fields import ReferenceField, GenericReferenceField
if isinstance(field, (ReferenceField, GenericReferenceField)): if isinstance(field, (ReferenceField, GenericReferenceField)):
raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts)) raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts))
# Look up subfield on the previous field if getattr(field, 'field', None):
new_field = field.lookup_member(field_name) new_field = field.field.lookup_member(field_name)
else:
# Look up subfield on the previous field
new_field = field.lookup_member(field_name)
from base import ComplexBaseField from base import ComplexBaseField
if not new_field and isinstance(field, ComplexBaseField): if not new_field and isinstance(field, ComplexBaseField):
fields.append(field_name) fields.append(field_name)
@@ -944,6 +954,7 @@ class QuerySet(object):
def next(self): def next(self):
"""Wrap the result in a :class:`~mongoengine.Document` object. """Wrap the result in a :class:`~mongoengine.Document` object.
""" """
self._iter = True
try: try:
if self._limit == 0: if self._limit == 0:
raise StopIteration raise StopIteration
@@ -960,6 +971,7 @@ class QuerySet(object):
.. versionadded:: 0.3 .. versionadded:: 0.3
""" """
self._iter = False
self._cursor.rewind() self._cursor.rewind()
def count(self): def count(self):
@@ -1310,11 +1322,17 @@ class QuerySet(object):
document_cls, field_name = rule_entry document_cls, field_name = rule_entry
rule = doc._meta['delete_rules'][rule_entry] rule = doc._meta['delete_rules'][rule_entry]
if rule == CASCADE: if rule == CASCADE:
document_cls.objects(**{field_name + '__in': self}).delete(safe=safe) ref_q = document_cls.objects(**{field_name + '__in': self})
if doc != document_cls or (doc == document_cls and ref_q.count() > 0):
ref_q.delete(safe=safe)
elif rule == NULLIFY: elif rule == NULLIFY:
document_cls.objects(**{field_name + '__in': self}).update( document_cls.objects(**{field_name + '__in': self}).update(
safe_update=safe, safe_update=safe,
**{'unset__%s' % field_name: 1}) **{'unset__%s' % field_name: 1})
elif rule == PULL:
document_cls.objects(**{field_name + '__in': self}).update(
safe_update=safe,
**{'pull_all__%s' % field_name: self})
self._collection.remove(self._query, safe=safe) self._collection.remove(self._query, safe=safe)
@@ -1481,8 +1499,6 @@ class QuerySet(object):
def lookup(obj, name): def lookup(obj, name):
chunks = name.split('__') chunks = name.split('__')
for chunk in chunks: for chunk in chunks:
if hasattr(obj, '_db_field_map'):
chunk = obj._db_field_map.get(chunk, chunk)
obj = getattr(obj, chunk) obj = getattr(obj, chunk)
return obj return obj
@@ -1795,21 +1811,24 @@ class QuerySet(object):
return data return data
def __repr__(self): def __repr__(self):
limit = REPR_OUTPUT_SIZE + 1 """Provides the string representation of the QuerySet
start = (0 if self._skip is None else self._skip)
if self._limit is None: .. versionchanged:: 0.6.13 Now doesnt modify the cursor
stop = start + limit """
if self._limit is not None:
if self._limit - start > limit: if self._iter:
stop = start + limit return '.. queryset mid-iteration ..'
else:
stop = self._limit data = []
try: for i in xrange(REPR_OUTPUT_SIZE + 1):
data = list(self[start:stop]) try:
except pymongo.errors.InvalidOperation: data.append(self.next())
return ".. queryset mid-iteration .." except StopIteration:
break
if len(data) > REPR_OUTPUT_SIZE: if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..." data[-1] = "...(remaining elements truncated)..."
self.rewind()
return repr(data) return repr(data)
def select_related(self, max_depth=1): def select_related(self, max_depth=1):

View File

@@ -5,7 +5,7 @@
%define srcname mongoengine %define srcname mongoengine
Name: python-%{srcname} Name: python-%{srcname}
Version: 0.6.8 Version: 0.6.13
Release: 1%{?dist} Release: 1%{?dist}
Summary: A Python Document-Object Mapper for working with MongoDB Summary: A Python Document-Object Mapper for working with MongoDB

13
setup.cfg Normal file
View File

@@ -0,0 +1,13 @@
[aliases]
test = nosetests
[nosetests]
verbosity = 2
detailed-errors = 1
#with-coverage = 1
cover-html = 1
cover-html-dir = ../htmlcov
cover-package = mongoengine
cover-erase = 1
where = tests
#tests = test_bugfix.py

View File

@@ -48,6 +48,5 @@ setup(name='mongoengine',
platforms=['any'], platforms=['any'],
classifiers=CLASSIFIERS, classifiers=CLASSIFIERS,
install_requires=['pymongo'], install_requires=['pymongo'],
test_suite='tests', tests_require=['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL']
tests_require=['blinker', 'django>=1.3', 'PIL']
) )

View File

@@ -1,8 +1,11 @@
import unittest import datetime
import pymongo import pymongo
import unittest
import mongoengine.connection import mongoengine.connection
from bson.tz_util import utc
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_db, get_connection, ConnectionError from mongoengine.connection import get_db, get_connection, ConnectionError
@@ -70,11 +73,26 @@ class ConnectionTest(unittest.TestCase):
""" """
connect('mongoenginetest', alias='t1', tz_aware=True) connect('mongoenginetest', alias='t1', tz_aware=True)
conn = get_connection('t1') conn = get_connection('t1')
self.assertTrue(conn.tz_aware) self.assertTrue(conn.tz_aware)
connect('mongoenginetest2', alias='t2') connect('mongoenginetest2', alias='t2')
conn = get_connection('t2') conn = get_connection('t2')
self.assertFalse(conn.tz_aware) self.assertFalse(conn.tz_aware)
def test_datetime(self):
connect('mongoenginetest', tz_aware=True)
d = datetime.datetime(2010, 5, 5, tzinfo=utc)
class DateDoc(Document):
the_date = DateTimeField(required=True)
DateDoc.drop_collection()
DateDoc(the_date=d).save()
date_doc = DateDoc.objects.first()
self.assertEqual(d, date_doc.the_date)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -1,3 +1,4 @@
import os
import pickle import pickle
import pymongo import pymongo
import bson import bson
@@ -6,13 +7,15 @@ import warnings
from datetime import datetime from datetime import datetime
from fixtures import Base, Mixin, PickleEmbedded, PickleTest from tests.fixtures import Base, Mixin, PickleEmbedded, PickleTest
from mongoengine import * from mongoengine import *
from mongoengine.base import NotRegistered, InvalidDocumentError from mongoengine.base import NotRegistered, InvalidDocumentError
from mongoengine.queryset import InvalidQueryError from mongoengine.queryset import InvalidQueryError
from mongoengine.connection import get_db from mongoengine.connection import get_db
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
class DocumentTest(unittest.TestCase): class DocumentTest(unittest.TestCase):
@@ -661,6 +664,26 @@ class DocumentTest(unittest.TestCase):
BlogPost.drop_collection() BlogPost.drop_collection()
def test_db_field_load(self):
"""Ensure we load data correctly
"""
class Person(Document):
name = StringField(required=True)
_rank = StringField(required=False, db_field="rank")
@property
def rank(self):
return self._rank or "Private"
Person.drop_collection()
Person(name="Jack", _rank="Corporal").save()
Person(name="Fred").save()
self.assertEquals(Person.objects.get(name="Jack").rank, "Corporal")
self.assertEquals(Person.objects.get(name="Fred").rank, "Private")
def test_explicit_geo2d_index(self): def test_explicit_geo2d_index(self):
"""Ensure that geo2d indexes work when created via meta[indexes] """Ensure that geo2d indexes work when created via meta[indexes]
""" """
@@ -1259,6 +1282,22 @@ class DocumentTest(unittest.TestCase):
comment.date = datetime.now() comment.date = datetime.now()
comment.validate() comment.validate()
def test_embedded_db_field_validate(self):
class SubDoc(EmbeddedDocument):
val = IntField()
class Doc(Document):
e = EmbeddedDocumentField(SubDoc, db_field='eb')
Doc.drop_collection()
Doc(e=SubDoc(val=15)).save()
doc = Doc.objects.first()
doc.validate()
self.assertEquals([None, 'e'], doc._data.keys())
def test_save(self): def test_save(self):
"""Ensure that a document may be saved in the database. """Ensure that a document may be saved in the database.
""" """
@@ -1328,6 +1367,30 @@ class DocumentTest(unittest.TestCase):
p0.name = 'wpjunior' p0.name = 'wpjunior'
p0.save() p0.save()
def test_save_max_recursion_not_hit_with_file_field(self):
class Foo(Document):
name = StringField()
picture = FileField()
bar = ReferenceField('self')
Foo.drop_collection()
a = Foo(name='hello')
a.save()
a.bar = a
a.picture = open(TEST_IMAGE_PATH, 'rb')
a.save()
# Confirm can save and it resets the changed fields without hitting
# max recursion error
b = Foo.objects.with_id(a.id)
b.name='world'
b.save()
self.assertEquals(b.picture, b.bar.picture, b.bar.bar.picture)
def test_save_cascades(self): def test_save_cascades(self):
class Person(Document): class Person(Document):
@@ -1591,6 +1654,77 @@ class DocumentTest(unittest.TestCase):
site = Site.objects.first() site = Site.objects.first()
self.assertEqual(site.page.log_message, "Error: Dummy message") self.assertEqual(site.page.log_message, "Error: Dummy message")
def test_circular_reference_deltas(self):
class Person(Document):
name = StringField()
owns = ListField(ReferenceField('Organization'))
class Organization(Document):
name = StringField()
owner = ReferenceField('Person')
Person.drop_collection()
Organization.drop_collection()
person = Person(name="owner")
person.save()
organization = Organization(name="company")
organization.save()
person.owns.append(organization)
organization.owner = person
person.save()
organization.save()
p = Person.objects[0].select_related()
o = Organization.objects.first()
self.assertEquals(p.owns[0], o)
self.assertEquals(o.owner, p)
def test_circular_reference_deltas_2(self):
class Person( Document ):
name = StringField()
owns = ListField( ReferenceField( 'Organization' ) )
employer = ReferenceField( 'Organization' )
class Organization( Document ):
name = StringField()
owner = ReferenceField( 'Person' )
employees = ListField( ReferenceField( 'Person' ) )
Person.drop_collection()
Organization.drop_collection()
person = Person( name="owner" )
person.save()
employee = Person( name="employee" )
employee.save()
organization = Organization( name="company" )
organization.save()
person.owns.append( organization )
organization.owner = person
organization.employees.append( employee )
employee.employer = organization
person.save()
organization.save()
employee.save()
p = Person.objects.get(name="owner")
e = Person.objects.get(name="employee")
o = Organization.objects.first()
self.assertEquals(p.owns[0], o)
self.assertEquals(o.owner, p)
self.assertEquals(e.employer, o)
def test_delta(self): def test_delta(self):
class Doc(Document): class Doc(Document):

View File

@@ -82,7 +82,6 @@ class FieldTest(unittest.TestCase):
# Retrive data from db and verify it. # Retrive data from db and verify it.
ret = HandleNoneFields.objects.all()[0] ret = HandleNoneFields.objects.all()[0]
self.assertEqual(ret.str_fld, None) self.assertEqual(ret.str_fld, None)
self.assertEqual(ret.int_fld, None) self.assertEqual(ret.int_fld, None)
self.assertEqual(ret.flt_fld, None) self.assertEqual(ret.flt_fld, None)
@@ -913,6 +912,48 @@ class FieldTest(unittest.TestCase):
Extensible.drop_collection() Extensible.drop_collection()
def test_embedded_mapfield_db_field(self):
class Embedded(EmbeddedDocument):
number = IntField(default=0, db_field='i')
class Test(Document):
my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field='x')
Test.drop_collection()
test = Test()
test.my_map['DICTIONARY_KEY'] = Embedded(number=1)
test.save()
Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1)
test = Test.objects.get()
self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2)
doc = self.db.test.find_one()
self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2)
def test_embedded_db_field(self):
class Embedded(EmbeddedDocument):
number = IntField(default=0, db_field='i')
class Test(Document):
embedded = EmbeddedDocumentField(Embedded, db_field='x')
Test.drop_collection()
test = Test()
test.embedded = Embedded(number=1)
test.save()
Test.objects.update_one(inc__embedded__number=1)
test = Test.objects.get()
self.assertEqual(test.embedded.number, 2)
doc = self.db.test.find_one()
self.assertEqual(doc['x']['i'], 2)
def test_embedded_document_validation(self): def test_embedded_document_validation(self):
"""Ensure that invalid embedded documents cannot be assigned to """Ensure that invalid embedded documents cannot be assigned to
embedded document fields. embedded document fields.
@@ -1620,38 +1661,6 @@ class FieldTest(unittest.TestCase):
file = FileField() file = FileField()
DemoFile.objects.create() DemoFile.objects.create()
def test_file_delete_cleanup(self):
"""Ensure that the gridfs file is deleted when a document
with a GridFSProxied Field is deleted"""
class TestFile(Document):
file = FileField()
class TestImage(Document):
image = ImageField()
TestFile.drop_collection()
testfile = TestFile()
testfile.file.put('Hello, World!')
testfile.save()
testfile_grid_id = testfile.file.grid_id
testfile_fs = testfile.file.fs
testfile.delete()
self.assertFalse(testfile_fs.exists(testfile_grid_id))
TestImage.drop_collection()
testimage = TestImage()
testimage.image.put(open(TEST_IMAGE_PATH, 'r'))
testimage.save()
testimage_grid_id = testimage.image.grid_id
testimage_fs = testimage.image.fs
testimage.delete()
self.assertFalse(testimage_fs.exists(testimage_grid_id))
def test_file_field_no_default(self): def test_file_field_no_default(self):

View File

@@ -636,17 +636,38 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(people1, people2) self.assertEqual(people1, people2)
self.assertEqual(people1, people3) self.assertEqual(people1, people3)
def test_repr_iteration(self): def test_repr(self):
"""Ensure that QuerySet __repr__ can handle loops """Test repr behavior isnt destructive"""
"""
self.Person(name='Person 1').save()
self.Person(name='Person 2').save()
queryset = self.Person.objects class Doc(Document):
self.assertEquals('[<Person: Person object>, <Person: Person object>]', repr(queryset)) number = IntField()
for person in queryset:
self.assertEquals('.. queryset mid-iteration ..', repr(queryset))
def __repr__(self):
return "<Doc: %s>" % self.number
Doc.drop_collection()
for i in xrange(1000):
Doc(number=i).save()
docs = Doc.objects.order_by('number')
self.assertEquals(docs.count(), 1000)
self.assertEquals(len(docs), 1000)
docs_string = "%s" % docs
self.assertTrue("Doc: 0" in docs_string)
self.assertEquals(docs.count(), 1000)
self.assertEquals(len(docs), 1000)
# Limit and skip
self.assertEquals('[<Doc: 1>, <Doc: 2>, <Doc: 3>]', "%s" % docs[1:4])
self.assertEquals(docs.count(), 3)
self.assertEquals(len(docs), 3)
for doc in docs:
self.assertEqual('.. queryset mid-iteration ..', repr(docs))
def test_regex_query_shortcuts(self): def test_regex_query_shortcuts(self):
"""Ensure that contains, startswith, endswith, etc work. """Ensure that contains, startswith, endswith, etc work.
@@ -1344,6 +1365,37 @@ class QuerySetTest(unittest.TestCase):
self.Person.objects(name='Test User').delete() self.Person.objects(name='Test User').delete()
self.assertEqual(1, BlogPost.objects.count()) self.assertEqual(1, BlogPost.objects.count())
def test_reverse_delete_rule_cascade_self_referencing(self):
"""Ensure self-referencing CASCADE deletes do not result in infinite loop
"""
class Category(Document):
name = StringField()
parent = ReferenceField('self', reverse_delete_rule=CASCADE)
num_children = 3
base = Category(name='Root')
base.save()
# Create a simple parent-child tree
for i in range(num_children):
child_name = 'Child-%i' % i
child = Category(name=child_name, parent=base)
child.save()
for i in range(num_children):
child_child_name = 'Child-Child-%i' % i
child_child = Category(name=child_child_name, parent=child)
child_child.save()
tree_size = 1 + num_children + (num_children * num_children)
self.assertEquals(tree_size, Category.objects.count())
self.assertEquals(num_children, Category.objects(parent=base).count())
# The delete should effectively wipe out the Category collection
# without resulting in infinite parent-child cascade recursion
base.delete()
self.assertEquals(0, Category.objects.count())
def test_reverse_delete_rule_nullify(self): def test_reverse_delete_rule_nullify(self):
"""Ensure nullification of references to deleted documents. """Ensure nullification of references to deleted documents.
""" """
@@ -1388,6 +1440,36 @@ class QuerySetTest(unittest.TestCase):
self.assertRaises(OperationError, self.Person.objects.delete) self.assertRaises(OperationError, self.Person.objects.delete)
def test_reverse_delete_rule_pull(self):
"""Ensure pulling of references to deleted documents.
"""
class BlogPost(Document):
content = StringField()
authors = ListField(ReferenceField(self.Person,
reverse_delete_rule=PULL))
BlogPost.drop_collection()
self.Person.drop_collection()
me = self.Person(name='Test User')
me.save()
someoneelse = self.Person(name='Some-one Else')
someoneelse.save()
post = BlogPost(content='Watching TV', authors=[me, someoneelse])
post.save()
another = BlogPost(content='Chilling Out', authors=[someoneelse])
another.save()
someoneelse.delete()
post.reload()
another.reload()
self.assertEqual(post.authors, [me])
self.assertEqual(another.authors, [])
def test_update(self): def test_update(self):
"""Ensure that atomic updates work properly. """Ensure that atomic updates work properly.
""" """
@@ -2945,6 +3027,19 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(plist[1], (20, False)) self.assertEqual(plist[1], (20, False))
self.assertEqual(plist[2], (30, True)) self.assertEqual(plist[2], (30, True))
def test_scalar_primary_key(self):
class SettingValue(Document):
key = StringField(primary_key=True)
value = StringField()
SettingValue.drop_collection()
s = SettingValue(key="test", value="test value")
s.save()
val = SettingValue.objects.scalar('key', 'value')
self.assertEqual(list(val), [('test', 'test value')])
def test_scalar_cursor_behaviour(self): def test_scalar_cursor_behaviour(self):
"""Ensure that a query returns a valid set of results. """Ensure that a query returns a valid set of results.
""" """