Compare commits

...

15 Commits

Author SHA1 Message Date
Stefan Wojcik
a8889b6dfb cleaner Document._save_update 2016-12-29 22:57:24 -05:00
Stefan Wojcik
d05301b3a1 minor tweaks 2016-12-29 22:27:15 -05:00
Stefan Wojcik
a120eae5ae slightly cleaner Document.save 2016-12-29 22:16:14 -05:00
Stefan Wójcik
3d75573889 Validate db_field (#1448) 2016-12-29 12:39:05 -05:00
Stefan Wójcik
c6240ca415 Test connection's write concern (#1456) 2016-12-29 12:37:38 -05:00
Stefan Wójcik
2ee8984b44 add a $rename operator (#1454) 2016-12-28 23:25:38 -05:00
Stefan Wojcik
b7ec587e5b better docstring for BaseDocument.to_json 2016-12-28 22:15:46 -05:00
Stefan Wojcik
47c58bce2b fix "connect" example in the docs 2016-12-28 21:08:18 -05:00
Stefan Wojcik
96e95ac533 minor readme tweaks 2016-12-28 17:18:55 -05:00
Stefan Wojcik
b013a065f7 remove readme mention of the irc channel 2016-12-28 11:50:28 -05:00
Stefan Wojcik
74b37d11cf only validate db_field if it's a string type 2016-12-28 11:46:18 -05:00
Stefan Wójcik
c6cc013617 fix BaseQuerySet.fields when mixing exclusion/inclusion with complex values like $slice (#1452) 2016-12-28 11:40:57 -05:00
Stefan Wójcik
f4e1d80a87 support a negative dec operator (#1450) 2016-12-28 02:04:49 -05:00
Stefan Wójcik
91dad4060f raise an error when trying to save an abstract document (#1449) 2016-12-28 00:51:47 -05:00
Stefan Wojcik
e07cb82c15 validate db_field 2016-12-27 17:38:26 -05:00
14 changed files with 204 additions and 86 deletions

View File

@@ -14,7 +14,7 @@ Before starting to write code, look for existing `tickets
<https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one
<https://github.com/MongoEngine/mongoengine/issues>`_ for your specific
issue or feature request. That way you avoid working on something
that might not be of interest or that has already been addressed. If in doubt
that might not be of interest or that has already been addressed. If in doubt
post to the `user group <http://groups.google.com/group/mongoengine-users>`
Supported Interpreters

View File

@@ -57,7 +57,7 @@ Some simple examples of what MongoEngine code looks like:
class BlogPost(Document):
title = StringField(required=True, max_length=200)
posted = DateTimeField(default=datetime.datetime.now)
posted = DateTimeField(default=datetime.datetime.utcnow)
tags = ListField(StringField(max_length=50))
meta = {'allow_inheritance': True}
@@ -87,17 +87,18 @@ Some simple examples of what MongoEngine code looks like:
... print
...
>>> len(BlogPost.objects)
# Count all blog posts and its subtypes
>>> BlogPost.objects.count()
2
>>> len(TextPost.objects)
>>> TextPost.objects.count()
1
>>> len(LinkPost.objects)
>>> LinkPost.objects.count()
1
# Find tagged posts
>>> len(BlogPost.objects(tags='mongoengine'))
# Count tagged posts
>>> BlogPost.objects(tags='mongoengine').count()
2
>>> len(BlogPost.objects(tags='mongodb'))
>>> BlogPost.objects(tags='mongodb').count()
1
Tests
@@ -130,8 +131,7 @@ Community
<http://groups.google.com/group/mongoengine-users>`_
- `MongoEngine Developers mailing list
<http://groups.google.com/group/mongoengine-dev>`_
- `#mongoengine IRC channel <http://webchat.freenode.net/?channels=mongoengine>`_
Contributing
============
We welcome contributions! see the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_
We welcome contributions! See the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_

View File

@@ -33,7 +33,7 @@ the :attr:`host` to
corresponding parameters in :func:`~mongoengine.connect`: ::
connect(
name='test',
db='test',
username='user',
password='12345',
host='mongodb://admin:qwerty@localhost/production'

View File

@@ -5,7 +5,7 @@ __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
'push_all', 'pull', 'pull_all', 'add_to_set',
'set_on_insert', 'min', 'max'])
'set_on_insert', 'min', 'max', 'rename'])
_document_registry = {}

View File

@@ -402,9 +402,11 @@ class BaseDocument(object):
raise ValidationError(message, errors=errors)
def to_json(self, *args, **kwargs):
"""Converts a document to JSON.
:param use_db_field: Set to True by default but enables the output of the json structure with the field names
and not the mongodb store db_names in case of set to False
"""Convert this document to JSON.
:param use_db_field: Serialize field names as they appear in
MongoDB (as opposed to attribute names on this document).
Defaults to True.
"""
use_db_field = kwargs.pop('use_db_field', True)
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)

View File

@@ -41,7 +41,7 @@ class BaseField(object):
"""
:param db_field: The database field to store this field in
(defaults to the name of the field)
:param name: Depreciated - use db_field
:param name: Deprecated - use db_field
:param required: If the field is required. Whether it has to have a
value or not. Defaults to False.
:param default: (optional) The default value for this field if no value
@@ -81,6 +81,17 @@ class BaseField(object):
self.sparse = sparse
self._owner_document = None
# Validate the db_field
if isinstance(self.db_field, six.string_types) and (
'.' in self.db_field or
'\0' in self.db_field or
self.db_field.startswith('$')
):
raise ValueError(
'field names cannot contain dots (".") or null characters '
'("\\0"), and they must not start with a dollar sign ("$").'
)
# Detect and report conflicts between metadata and base properties.
conflicts = set(dir(self)) & set(kwargs)
if conflicts:

View File

@@ -313,6 +313,9 @@ class Document(BaseDocument):
.. versionchanged:: 0.10.7
Add signal_kwargs argument
"""
if self._meta.get('abstract'):
raise InvalidDocumentError('Cannot save an abstract document.')
signal_kwargs = signal_kwargs or {}
signals.pre_save.send(self.__class__, document=self, **signal_kwargs)
@@ -329,68 +332,20 @@ class Document(BaseDocument):
signals.pre_save_post_validation.send(self.__class__, document=self,
created=created, **signal_kwargs)
if self._meta.get('auto_create_index', True):
self.ensure_indexes()
try:
collection = self._get_collection()
if self._meta.get('auto_create_index', True):
self.ensure_indexes()
# Save a new document or update an existing one
if created:
if force_insert:
object_id = collection.insert(doc, **write_concern)
else:
object_id = collection.save(doc, **write_concern)
# In PyMongo 3.0, the save() call calls internally the _update() call
# but they forget to return the _id value passed back, therefore getting it back here
# Correct behaviour in 2.X and in 3.0.1+ versions
if not object_id and pymongo.version_tuple == (3, 0):
pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk)
object_id = (
self._qs.filter(pk=pk_as_mongo_obj).first() and
self._qs.filter(pk=pk_as_mongo_obj).first().pk
) # TODO doesn't this make 2 queries?
object_id = self._save_create(doc, force_insert, write_concern)
else:
object_id = doc['_id']
updates, removals = self._delta()
# Need to add shard key to query, or you get an error
if save_condition is not None:
select_dict = transform.query(self.__class__,
**save_condition)
else:
select_dict = {}
select_dict['_id'] = object_id
shard_key = self._meta.get('shard_key', tuple())
for k in shard_key:
path = self._lookup_field(k.split('.'))
actual_key = [p.db_field for p in path]
val = doc
for ak in actual_key:
val = val[ak]
select_dict['.'.join(actual_key)] = val
def is_new_object(last_error):
if last_error is not None:
updated = last_error.get('updatedExisting')
if updated is not None:
return not updated
return created
update_query = {}
if updates:
update_query['$set'] = updates
if removals:
update_query['$unset'] = removals
if updates or removals:
upsert = save_condition is None
last_error = collection.update(select_dict, update_query,
upsert=upsert, **write_concern)
if not upsert and last_error['n'] == 0:
raise SaveConditionError('Race condition preventing'
' document update detected')
created = is_new_object(last_error)
object_id, created = self._save_update(doc, save_condition,
write_concern)
if cascade is None:
cascade = self._meta.get(
'cascade', False) or cascade_kwargs is not None
cascade = (self._meta.get('cascade', False) or
cascade_kwargs is not None)
if cascade:
kwargs = {
@@ -403,6 +358,7 @@ class Document(BaseDocument):
kwargs.update(cascade_kwargs)
kwargs['_refs'] = _refs
self.cascade_save(**kwargs)
except pymongo.errors.DuplicateKeyError as err:
message = u'Tried to save duplicate unique keys (%s)'
raise NotUniqueError(message % six.text_type(err))
@@ -415,16 +371,91 @@ class Document(BaseDocument):
raise NotUniqueError(message % six.text_type(err))
raise OperationError(message % six.text_type(err))
# Make sure we store the PK on this document now that it's saved
id_field = self._meta['id_field']
if created or id_field not in self._meta.get('shard_key', []):
self[id_field] = self._fields[id_field].to_python(object_id)
signals.post_save.send(self.__class__, document=self,
created=created, **signal_kwargs)
self._clear_changed_fields()
self._created = False
return self
def _save_create(self, doc, force_insert, write_concern):
"""Save a new document.
Helper method, should only be used inside save().
"""
collection = self._get_collection()
if force_insert:
return collection.insert(doc, **write_concern)
object_id = collection.save(doc, **write_concern)
# In PyMongo 3.0, the save() call calls internally the _update() call
# but they forget to return the _id value passed back, therefore getting it back here
# Correct behaviour in 2.X and in 3.0.1+ versions
if not object_id and pymongo.version_tuple == (3, 0):
pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk)
object_id = (
self._qs.filter(pk=pk_as_mongo_obj).first() and
self._qs.filter(pk=pk_as_mongo_obj).first().pk
) # TODO doesn't this make 2 queries?
return object_id
def _save_update(self, doc, save_condition, write_concern):
"""Update an existing document.
Helper method, should only be used inside save().
"""
collection = self._get_collection()
object_id = doc['_id']
created = False
select_dict = {}
if save_condition is not None:
select_dict = transform.query(self.__class__, **save_condition)
select_dict['_id'] = object_id
# Need to add shard key to query, or you get an error
shard_key = self._meta.get('shard_key', tuple())
for k in shard_key:
path = self._lookup_field(k.split('.'))
actual_key = [p.db_field for p in path]
val = doc
for ak in actual_key:
val = val[ak]
select_dict['.'.join(actual_key)] = val
updates, removals = self._delta()
update_query = {}
if updates:
update_query['$set'] = updates
if removals:
update_query['$unset'] = removals
if updates or removals:
upsert = save_condition is None
last_error = collection.update(select_dict, update_query,
upsert=upsert, **write_concern)
if not upsert and last_error['n'] == 0:
raise SaveConditionError('Race condition preventing'
' document update detected')
if last_error is not None:
updated_existing = last_error.get('updatedExisting')
if updated_existing is False:
created = True
# !!! This is bad, means we accidentally created a new,
# potentially corrupted document. See
# https://github.com/MongoEngine/mongoengine/issues/564
return object_id, created
def cascade_save(self, **kwargs):
"""Recursively save any references and generic references on the
document.
@@ -828,7 +859,6 @@ class Document(BaseDocument):
""" Lists all of the indexes that should be created for given
collection. It includes all the indexes from super- and sub-classes.
"""
if cls._meta.get('abstract'):
return []

View File

@@ -933,7 +933,20 @@ class BaseQuerySet(object):
key = '.'.join(parts)
cleaned_fields.append((key, value))
fields = sorted(cleaned_fields, key=operator.itemgetter(1))
# Sort fields by their values, explicitly excluded fields first, then
# explicitly included, and then more complicated operators such as
# $slice.
def _sort_key(field_tuple):
key, value = field_tuple
if isinstance(value, (int)):
return value # 0 for exclusion, 1 for inclusion
else:
return 2 # so that complex values appear last
fields = sorted(cleaned_fields, key=_sort_key)
# Clone the queryset, group all fields by their value, convert
# each of them to db_fields, and set the queryset's _loaded_fields
queryset = self.clone()
for value, group in itertools.groupby(fields, lambda x: x[1]):
fields = [field for field, value in group]

View File

@@ -233,8 +233,7 @@ def update(_doc_cls=None, **update):
# Support decrement by flipping a positive value's sign
# and using 'inc'
op = 'inc'
if value > 0:
value = -value
value = -value
elif op == 'add_to_set':
op = 'addToSet'
elif op == 'set_on_insert':

View File

@@ -435,6 +435,15 @@ class InstanceTest(unittest.TestCase):
person.to_dbref()
def test_save_abstract_document(self):
"""Saving an abstract document should fail."""
class Doc(Document):
name = StringField()
meta = {'abstract': True}
with self.assertRaises(InvalidDocumentError):
Doc(name='aaa').save()
def test_reload(self):
"""Ensure that attributes may be reloaded.
"""
@@ -1223,6 +1232,19 @@ class InstanceTest(unittest.TestCase):
self.assertEqual(person.name, None)
self.assertEqual(person.age, None)
def test_update_rename_operator(self):
"""Test the $rename operator."""
coll = self.Person._get_collection()
doc = self.Person(name='John').save()
raw_doc = coll.find_one({'_id': doc.pk})
self.assertEqual(set(raw_doc.keys()), set(['_id', '_cls', 'name']))
doc.update(rename__name='first_name')
raw_doc = coll.find_one({'_id': doc.pk})
self.assertEqual(set(raw_doc.keys()),
set(['_id', '_cls', 'first_name']))
self.assertEqual(raw_doc['first_name'], 'John')
def test_inserts_if_you_set_the_pk(self):
p1 = self.Person(name='p1', id=bson.ObjectId()).save()
p2 = self.Person(name='p2')

View File

@@ -306,6 +306,24 @@ class FieldTest(unittest.TestCase):
person.id = '497ce96f395f2f052a494fd4'
person.validate()
def test_db_field_validation(self):
"""Ensure that db_field doesn't accept invalid values."""
# dot in the name
with self.assertRaises(ValueError):
class User(Document):
name = StringField(db_field='user.name')
# name starting with $
with self.assertRaises(ValueError):
class User(Document):
name = StringField(db_field='$name')
# name containing a null character
with self.assertRaises(ValueError):
class User(Document):
name = StringField(db_field='name\0')
def test_string_validation(self):
"""Ensure that invalid values cannot be assigned to string fields.
"""
@@ -3973,30 +3991,25 @@ class FieldTest(unittest.TestCase):
"""Tests if a `FieldDoesNotExist` exception is raised when trying to
instanciate a document with a field that's not defined.
"""
class Doc(Document):
foo = StringField(db_field='f')
foo = StringField()
def test():
with self.assertRaises(FieldDoesNotExist):
Doc(bar='test')
self.assertRaises(FieldDoesNotExist, test)
def test_undefined_field_exception_with_strict(self):
"""Tests if a `FieldDoesNotExist` exception is raised when trying to
instanciate a document with a field that's not defined,
even when strict is set to False.
"""
class Doc(Document):
foo = StringField(db_field='f')
foo = StringField()
meta = {'strict': False}
def test():
with self.assertRaises(FieldDoesNotExist):
Doc(bar='test')
self.assertRaises(FieldDoesNotExist, test)
def test_long_field_is_considered_as_int64(self):
"""
Tests that long fields are stored as long in mongo, even if long value

View File

@@ -141,6 +141,16 @@ class OnlyExcludeAllTest(unittest.TestCase):
self.assertEqual(qs._loaded_fields.as_dict(),
{'b': {'$slice': 5}})
def test_mix_slice_with_other_fields(self):
class MyDoc(Document):
a = ListField()
b = ListField()
c = ListField()
qs = MyDoc.objects.fields(a=1, b=0, slice__c=2)
self.assertEqual(qs._loaded_fields.as_dict(),
{'c': {'$slice': 2}, 'a': 1})
def test_only(self):
"""Ensure that QuerySet.only only returns the requested fields.
"""

View File

@@ -1826,6 +1826,11 @@ class QuerySetTest(unittest.TestCase):
post.reload()
self.assertEqual(post.hits, 10)
# Negative dec operator is equal to a positive inc operator
BlogPost.objects.update_one(dec__hits=-1)
post.reload()
self.assertEqual(post.hits, 11)
BlogPost.objects.update(push__tags='mongo')
post.reload()
self.assertTrue('mongo' in post.tags)

View File

@@ -296,6 +296,19 @@ class ConnectionTest(unittest.TestCase):
conn = get_connection('t2')
self.assertFalse(get_tz_awareness(conn))
def test_write_concern(self):
"""Ensure write concern can be specified in connect() via
a kwarg or as part of the connection URI.
"""
conn1 = connect(alias='conn1', host='mongodb://localhost/testing?w=1&j=true')
conn2 = connect('testing', alias='conn2', w=1, j=True)
if IS_PYMONGO_3:
self.assertEqual(conn1.write_concern.document, {'w': 1, 'j': True})
self.assertEqual(conn2.write_concern.document, {'w': 1, 'j': True})
else:
self.assertEqual(dict(conn1.write_concern), {'w': 1, 'j': True})
self.assertEqual(dict(conn2.write_concern), {'w': 1, 'j': True})
def test_datetime(self):
connect('mongoenginetest', tz_aware=True)
d = datetime.datetime(2010, 5, 5, tzinfo=utc)