Compare commits

..

3 Commits

Author SHA1 Message Date
Stefan Wojcik
a8889b6dfb cleaner Document._save_update 2016-12-29 22:57:24 -05:00
Stefan Wojcik
d05301b3a1 minor tweaks 2016-12-29 22:27:15 -05:00
Stefan Wojcik
a120eae5ae slightly cleaner Document.save 2016-12-29 22:16:14 -05:00
6 changed files with 23 additions and 59 deletions

View File

@@ -13,7 +13,6 @@ Changes in 0.11.0
- BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428 - BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428
- BREAKING CHANGE: Dropped Python 2.6 support. #1428 - BREAKING CHANGE: Dropped Python 2.6 support. #1428
- BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428 - BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428
- BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334
- Fixed absent rounding for DecimalField when `force_string` is set. #1103 - Fixed absent rounding for DecimalField when `force_string` is set. #1103
Changes in 0.10.8 Changes in 0.10.8

View File

@@ -429,7 +429,7 @@ class StrictDict(object):
def __eq__(self, other): def __eq__(self, other):
return self.items() == other.items() return self.items() == other.items()
def __ne__(self, other): def __neq__(self, other):
return self.items() != other.items() return self.items() != other.items()
@classmethod @classmethod

View File

@@ -50,8 +50,8 @@ class FieldDoesNotExist(Exception):
or an :class:`~mongoengine.EmbeddedDocument`. or an :class:`~mongoengine.EmbeddedDocument`.
To avoid this behavior on data loading, To avoid this behavior on data loading,
you should set the :attr:`strict` to ``False`` you should the :attr:`strict` to ``False``
in the :attr:`meta` dictionary. in the :attr:`meta` dictionnary.
""" """

View File

@@ -296,25 +296,22 @@ class BaseQuerySet(object):
result = None result = None
return result return result
def insert(self, doc_or_docs, load_bulk=True, write_concern=None, def insert(self, doc_or_docs, load_bulk=True,
signal_kwargs=None, continue_on_error=None): write_concern=None, signal_kwargs=None):
"""bulk insert documents """bulk insert documents
:param doc_or_docs: a document or list of documents to be inserted :param doc_or_docs: a document or list of documents to be inserted
:param load_bulk (optional): If True returns the list of document :param load_bulk (optional): If True returns the list of document
instances instances
:param write_concern: Optional keyword argument passed down to :param write_concern: Extra keyword arguments are passed down to
:meth:`~pymongo.collection.Collection.insert`, representing :meth:`~pymongo.collection.Collection.insert`
the write concern. For example, which will be used as options for the resultant
``insert(..., write_concert={w: 2, fsync: True})`` will ``getLastError`` command. For example,
wait until at least two servers have recorded the write ``insert(..., {w: 2, fsync: True})`` will wait until at least
and will force an fsync on each server being written to. two servers have recorded the write and will force an fsync on
each server being written to.
:parm signal_kwargs: (optional) kwargs dictionary to be passed to :parm signal_kwargs: (optional) kwargs dictionary to be passed to
the signal calls. the signal calls.
:param continue_on_error: Optional keyword argument passed down to
:meth:`~pymongo.collection.Collection.insert`. Defines what
to do when a document cannot be inserted (e.g. due to
duplicate IDs). Read PyMongo's docs for more info.
By default returns document instances, set ``load_bulk`` to False to By default returns document instances, set ``load_bulk`` to False to
return just ``ObjectIds`` return just ``ObjectIds``
@@ -325,10 +322,12 @@ class BaseQuerySet(object):
""" """
Document = _import_class('Document') Document = _import_class('Document')
# Determine if we're inserting one doc or more if write_concern is None:
write_concern = {}
docs = doc_or_docs docs = doc_or_docs
return_one = False return_one = False
if isinstance(docs, Document): if isinstance(docs, Document) or issubclass(docs.__class__, Document):
return_one = True return_one = True
docs = [docs] docs = [docs]
@@ -345,16 +344,9 @@ class BaseQuerySet(object):
signals.pre_bulk_insert.send(self._document, signals.pre_bulk_insert.send(self._document,
documents=docs, **signal_kwargs) documents=docs, **signal_kwargs)
# Resolve optional insert kwargs
insert_kwargs = {}
if write_concern is not None:
insert_kwargs.update(write_concern)
if continue_on_error is not None:
insert_kwargs['continue_on_error'] = continue_on_error
raw = [doc.to_mongo() for doc in docs] raw = [doc.to_mongo() for doc in docs]
try: try:
ids = self._collection.insert(raw, **insert_kwargs) ids = self._collection.insert(raw, **write_concern)
except pymongo.errors.DuplicateKeyError as err: except pymongo.errors.DuplicateKeyError as err:
message = 'Could not save document (%s)' message = 'Could not save document (%s)'
raise NotUniqueError(message % six.text_type(err)) raise NotUniqueError(message % six.text_type(err))

View File

@@ -1985,7 +1985,7 @@ class FieldTest(unittest.TestCase):
self.assertEqual(content, User.objects.first().groups[0].content) self.assertEqual(content, User.objects.first().groups[0].content)
def test_reference_miss(self): def test_reference_miss(self):
"""Ensure an exception is raised when dereferencing unknown document """Ensure an exception is raised when dereferencing unknow document
""" """
class Foo(Document): class Foo(Document):

View File

@@ -766,7 +766,8 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(record.embed.field, 2) self.assertEqual(record.embed.field, 2)
def test_bulk_insert(self): def test_bulk_insert(self):
"""Ensure that bulk insert works.""" """Ensure that bulk insert works
"""
class Comment(EmbeddedDocument): class Comment(EmbeddedDocument):
name = StringField() name = StringField()
@@ -884,37 +885,9 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(Blog.objects.count(), 2) self.assertEqual(Blog.objects.count(), 2)
def test_bulk_insert_continue_on_error(self): Blog.objects.insert([blog2, blog3],
"""Ensure that bulk insert works with the continue_on_error option.""" write_concern={"w": 0, 'continue_on_error': True})
self.assertEqual(Blog.objects.count(), 3)
class Person(Document):
email = EmailField(unique=True)
Person.drop_collection()
Person.objects.insert([
Person(email='alice@example.com'),
Person(email='bob@example.com')
])
self.assertEqual(Person.objects.count(), 2)
new_docs = [
Person(email='alice@example.com'), # dupe
Person(email='bob@example.com'), # dupe
Person(email='steve@example.com') # new one
]
# By default inserting dupe docs should fail and no new docs should
# be inserted.
with self.assertRaises(NotUniqueError):
Person.objects.insert(new_docs)
self.assertEqual(Person.objects.count(), 2)
# With continue_on_error, new doc should be inserted, even though we
# still get a NotUniqueError caused by the other 2 dupes.
with self.assertRaises(NotUniqueError):
Person.objects.insert(new_docs, continue_on_error=True)
self.assertEqual(Person.objects.count(), 3)
def test_get_changed_fields_query_count(self): def test_get_changed_fields_query_count(self):