Merge branch 'master' into 0.7
This commit is contained in:
commit
d582394a42
11
.travis.yml
11
.travis.yml
@ -6,10 +6,19 @@ python:
|
||||
- 2.7
|
||||
- 3.1
|
||||
- 3.2
|
||||
env:
|
||||
- PYMONGO=dev
|
||||
- PYMONGO=2.3
|
||||
- PYMONGO=2.2
|
||||
install:
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/; fi
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi
|
||||
- if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
|
||||
- if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi
|
||||
- python setup.py install
|
||||
script:
|
||||
- python setup.py test
|
||||
- python setup.py test
|
||||
notifications:
|
||||
irc: "irc.freenode.org#mongoengine"
|
1
AUTHORS
1
AUTHORS
@ -122,3 +122,4 @@ that much better:
|
||||
* Sergey Nikitin
|
||||
* psychogenic
|
||||
* Stefan Wójcik
|
||||
* dimonb
|
||||
|
@ -4,6 +4,10 @@ Changelog
|
||||
|
||||
Changes in 0.7.X
|
||||
=================
|
||||
- Updated queryset.delete so you can use with skip / limit (MongoEngine/mongoengine#107)
|
||||
- Updated index creation allows kwargs to be passed through refs (MongoEngine/mongoengine#104)
|
||||
- Fixed Q object merge edge case (MongoEngine/mongoengine#109)
|
||||
- Fixed reloading on sharded documents (hmarr/mongoengine#569)
|
||||
- Added NotUniqueError for duplicate keys (MongoEngine/mongoengine#62)
|
||||
- Added custom collection / sequence naming for SequenceFields (MongoEngine/mongoengine#92)
|
||||
- Fixed UnboundLocalError in composite index with pk field (MongoEngine/mongoengine#88)
|
||||
|
@ -11,7 +11,7 @@ from queryset import DoesNotExist, MultipleObjectsReturned
|
||||
from queryset import DO_NOTHING
|
||||
|
||||
from mongoengine import signals
|
||||
from mongoengine.python_support import (PY3, PY25, txt_type,
|
||||
from mongoengine.python_support import (PY3, UNICODE_KWARGS, txt_type,
|
||||
to_str_keys_recursive)
|
||||
|
||||
import pymongo
|
||||
@ -949,8 +949,10 @@ class BaseDocument(object):
|
||||
self._data[name] = value
|
||||
if hasattr(self, '_changed_fields'):
|
||||
self._mark_as_changed(name)
|
||||
|
||||
if (self._is_document and not self._created and
|
||||
name in self._meta.get('shard_key', tuple())):
|
||||
name in self._meta.get('shard_key', tuple()) and
|
||||
self._data.get(name) != value):
|
||||
OperationError = _import_class('OperationError')
|
||||
msg = "Shard Keys are immutable. Tried to update %s" % name
|
||||
raise OperationError(msg)
|
||||
@ -1052,9 +1054,9 @@ class BaseDocument(object):
|
||||
# class if unavailable
|
||||
class_name = son.get('_cls', cls._class_name)
|
||||
data = dict(("%s" % key, value) for key, value in son.items())
|
||||
if PY25:
|
||||
# PY25 cannot handle unicode keys passed to class constructor
|
||||
# example: cls(**data)
|
||||
if not UNICODE_KWARGS:
|
||||
# python 2.6.4 and lower cannot handle unicode keys
|
||||
# passed to class constructor example: cls(**data)
|
||||
to_str_keys_recursive(data)
|
||||
|
||||
if '_types' in data:
|
||||
|
@ -295,6 +295,16 @@ class Document(BaseDocument):
|
||||
ref.save(**kwargs)
|
||||
ref._changed_fields = []
|
||||
|
||||
@property
|
||||
def _object_key(self):
|
||||
"""Dict to identify object in collection
|
||||
"""
|
||||
select_dict = {'pk': self.pk}
|
||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||
for k in shard_key:
|
||||
select_dict[k] = getattr(self, k)
|
||||
return select_dict
|
||||
|
||||
def update(self, **kwargs):
|
||||
"""Performs an update on the :class:`~mongoengine.Document`
|
||||
A convenience wrapper to :meth:`~mongoengine.QuerySet.update`.
|
||||
@ -306,11 +316,7 @@ class Document(BaseDocument):
|
||||
raise OperationError('attempt to update a document not yet saved')
|
||||
|
||||
# Need to add shard key to query, or you get an error
|
||||
select_dict = {'pk': self.pk}
|
||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||
for k in shard_key:
|
||||
select_dict[k] = getattr(self, k)
|
||||
return self.__class__.objects(**select_dict).update_one(**kwargs)
|
||||
return self.__class__.objects(**self._object_key).update_one(**kwargs)
|
||||
|
||||
def delete(self, safe=False):
|
||||
"""Delete the :class:`~mongoengine.Document` from the database. This
|
||||
@ -321,7 +327,7 @@ class Document(BaseDocument):
|
||||
signals.pre_delete.send(self.__class__, document=self)
|
||||
|
||||
try:
|
||||
self.__class__.objects(pk=self.pk).delete(safe=safe)
|
||||
self.__class__.objects(**self._object_key).delete(safe=safe)
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
message = u'Could not delete document (%s)' % err.message
|
||||
raise OperationError(message)
|
||||
|
@ -4,6 +4,7 @@ import sys
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
PY25 = sys.version_info[:2] == (2, 5)
|
||||
UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264
|
||||
|
||||
if PY3:
|
||||
import codecs
|
||||
|
@ -218,7 +218,7 @@ class QNode(object):
|
||||
def _combine(self, other, operation):
|
||||
"""Combine this node with another node into a QCombination object.
|
||||
"""
|
||||
if other.empty:
|
||||
if getattr(other, 'empty', True):
|
||||
return self
|
||||
|
||||
if self.empty:
|
||||
@ -398,12 +398,12 @@ class QuerySet(object):
|
||||
or a **-** to determine the index ordering
|
||||
"""
|
||||
index_spec = QuerySet._build_index_spec(self._document, key_or_list)
|
||||
self._collection.ensure_index(
|
||||
index_spec['fields'],
|
||||
drop_dups=drop_dups,
|
||||
background=background,
|
||||
sparse=index_spec.get('sparse', False),
|
||||
unique=index_spec.get('unique', False))
|
||||
fields = index_spec.pop('fields')
|
||||
index_spec['drop_dups'] = drop_dups
|
||||
index_spec['background'] = background
|
||||
index_spec.update(kwargs)
|
||||
|
||||
self._collection.ensure_index(fields, **index_spec)
|
||||
return self
|
||||
|
||||
def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query):
|
||||
@ -473,11 +473,11 @@ class QuerySet(object):
|
||||
# Ensure document-defined indexes are created
|
||||
if self._document._meta['index_specs']:
|
||||
for spec in self._document._meta['index_specs']:
|
||||
types_indexed = types_indexed or includes_types(spec['fields'])
|
||||
fields = spec.pop('fields')
|
||||
types_indexed = types_indexed or includes_types(fields)
|
||||
opts = index_opts.copy()
|
||||
opts['unique'] = spec.get('unique', False)
|
||||
opts['sparse'] = spec.get('sparse', False)
|
||||
self._collection.ensure_index(spec['fields'],
|
||||
opts.update(spec)
|
||||
self._collection.ensure_index(fields,
|
||||
background=background, **opts)
|
||||
|
||||
# If _types is being used (for polymorphism), it needs an index,
|
||||
@ -1343,6 +1343,12 @@ class QuerySet(object):
|
||||
"""
|
||||
doc = self._document
|
||||
|
||||
# Handle deletes where skips or limits have been applied
|
||||
if self._skip or self._limit:
|
||||
for doc in self:
|
||||
doc.delete()
|
||||
return
|
||||
|
||||
delete_rules = doc._meta.get('delete_rules') or {}
|
||||
# Check for DENY rules before actually deleting/nullifying any other
|
||||
# references
|
||||
|
@ -16,7 +16,7 @@ from tests.fixtures import Base, Mixin, PickleEmbedded, PickleTest
|
||||
from mongoengine import *
|
||||
from mongoengine.base import NotRegistered, InvalidDocumentError
|
||||
from mongoengine.queryset import InvalidQueryError
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.connection import get_db, get_connection
|
||||
|
||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
|
||||
|
||||
@ -1102,6 +1102,32 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_ttl_indexes(self):
|
||||
|
||||
class Log(Document):
|
||||
created = DateTimeField(default=datetime.now)
|
||||
meta = {
|
||||
'indexes': [
|
||||
{'fields': ['created'], 'expireAfterSeconds': 3600}
|
||||
]
|
||||
}
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
if pymongo.version_tuple[0] < 2 and pymongo.version_tuple[1] < 3:
|
||||
raise SkipTest('pymongo needs to be 2.3 or higher for this test')
|
||||
|
||||
connection = get_connection()
|
||||
version_array = connection.server_info()['versionArray']
|
||||
if version_array[0] < 2 and version_array[1] < 2:
|
||||
raise SkipTest('MongoDB needs to be 2.2 or higher for this test')
|
||||
|
||||
# Indexes are lazy so use list() to perform query
|
||||
list(Log.objects)
|
||||
info = Log.objects._collection.index_information()
|
||||
self.assertEqual(3600,
|
||||
info['_types_1_created_1']['expireAfterSeconds'])
|
||||
|
||||
def test_unique_and_indexes(self):
|
||||
"""Ensure that 'unique' constraints aren't overridden by
|
||||
meta.indexes.
|
||||
@ -1258,6 +1284,17 @@ class DocumentTest(unittest.TestCase):
|
||||
self.assertEqual(person.name, "Mr Test User")
|
||||
self.assertEqual(person.age, 21)
|
||||
|
||||
def test_reload_sharded(self):
|
||||
class Animal(Document):
|
||||
superphylum = StringField()
|
||||
meta = {'shard_key': ('superphylum',)}
|
||||
|
||||
Animal.drop_collection()
|
||||
doc = Animal(superphylum = 'Deuterostomia')
|
||||
doc.save()
|
||||
doc.reload()
|
||||
Animal.drop_collection()
|
||||
|
||||
def test_reload_referencing(self):
|
||||
"""Ensures reloading updates weakrefs correctly
|
||||
"""
|
||||
|
@ -1347,6 +1347,21 @@ class QuerySetTest(unittest.TestCase):
|
||||
query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query
|
||||
self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1})
|
||||
|
||||
def test_q_merge_queries_edge_case(self):
|
||||
|
||||
class User(Document):
|
||||
email = EmailField(required=False)
|
||||
name = StringField()
|
||||
|
||||
User.drop_collection()
|
||||
pk = ObjectId()
|
||||
User(email='example@example.com', pk=pk).save()
|
||||
|
||||
self.assertEqual(1, User.objects.filter(
|
||||
Q(email='example@example.com') |
|
||||
Q(name='John Doe')
|
||||
).limit(2).filter(pk=pk).count())
|
||||
|
||||
def test_exec_js_query(self):
|
||||
"""Ensure that queries are properly formed for use in exec_js.
|
||||
"""
|
||||
@ -1486,7 +1501,8 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(1, BlogPost.objects.count())
|
||||
|
||||
def test_reverse_delete_rule_cascade_self_referencing(self):
|
||||
"""Ensure self-referencing CASCADE deletes do not result in infinite loop
|
||||
"""Ensure self-referencing CASCADE deletes do not result in infinite
|
||||
loop
|
||||
"""
|
||||
class Category(Document):
|
||||
name = StringField()
|
||||
@ -1592,6 +1608,40 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(post.authors, [me])
|
||||
self.assertEqual(another.authors, [])
|
||||
|
||||
def test_delete_with_limits(self):
|
||||
|
||||
class Log(Document):
|
||||
pass
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
for i in xrange(10):
|
||||
Log().save()
|
||||
|
||||
Log.objects()[3:5].delete()
|
||||
self.assertEqual(8, Log.objects.count())
|
||||
|
||||
def test_delete_with_limit_handles_delete_rules(self):
|
||||
"""Ensure cascading deletion of referring documents from the database.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = ReferenceField(self.Person, reverse_delete_rule=CASCADE)
|
||||
BlogPost.drop_collection()
|
||||
|
||||
me = self.Person(name='Test User')
|
||||
me.save()
|
||||
someoneelse = self.Person(name='Some-one Else')
|
||||
someoneelse.save()
|
||||
|
||||
BlogPost(content='Watching TV', author=me).save()
|
||||
BlogPost(content='Chilling out', author=me).save()
|
||||
BlogPost(content='Pro Testing', author=someoneelse).save()
|
||||
|
||||
self.assertEqual(3, BlogPost.objects.count())
|
||||
self.Person.objects()[:1].delete()
|
||||
self.assertEqual(1, BlogPost.objects.count())
|
||||
|
||||
def test_update(self):
|
||||
"""Ensure that atomic updates work properly.
|
||||
"""
|
||||
@ -2519,30 +2569,30 @@ class QuerySetTest(unittest.TestCase):
|
||||
"""Ensure that index_types will, when disabled, prevent _types
|
||||
being added to all indices.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
class BloggPost(Document):
|
||||
date = DateTimeField()
|
||||
meta = {'index_types': False,
|
||||
'indexes': ['-date']}
|
||||
|
||||
# Indexes are lazy so use list() to perform query
|
||||
list(BlogPost.objects)
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
list(BloggPost.objects)
|
||||
info = BloggPost.objects._collection.index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('_types', 1)] not in info)
|
||||
self.assertTrue([('date', -1)] in info)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
BloggPost.drop_collection()
|
||||
|
||||
class BlogPost(Document):
|
||||
class BloggPost(Document):
|
||||
title = StringField()
|
||||
meta = {'allow_inheritance': False}
|
||||
|
||||
# _types is not used on objects where allow_inheritance is False
|
||||
list(BlogPost.objects)
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
list(BloggPost.objects)
|
||||
info = BloggPost.objects._collection.index_information()
|
||||
self.assertFalse([('_types', 1)] in info.values())
|
||||
|
||||
BlogPost.drop_collection()
|
||||
BloggPost.drop_collection()
|
||||
|
||||
def test_types_index_with_pk(self):
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user