NotUniqueError gracefully replacing ambiguous OperationError when appropriate

This commit is contained in:
Ross Lawley 2012-08-24 10:38:00 +01:00
parent eedf908770
commit 1c5e6a3425
4 changed files with 23 additions and 9 deletions

View File

@ -1,18 +1,19 @@
import warnings import warnings
import pymongo import pymongo
import re
from bson.dbref import DBRef from bson.dbref import DBRef
from mongoengine import signals, queryset from mongoengine import signals, queryset
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
BaseDict, BaseList) BaseDict, BaseList)
from queryset import OperationError from queryset import OperationError, NotUniqueError
from connection import get_db, DEFAULT_CONNECTION_NAME from connection import get_db, DEFAULT_CONNECTION_NAME
__all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument', __all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument',
'DynamicEmbeddedDocument', 'OperationError', 'DynamicEmbeddedDocument', 'OperationError',
'InvalidCollectionError'] 'InvalidCollectionError', 'NotUniqueError']
class InvalidCollectionError(Exception): class InvalidCollectionError(Exception):
@ -250,8 +251,11 @@ class Document(BaseDocument):
except pymongo.errors.OperationFailure, err: except pymongo.errors.OperationFailure, err:
message = 'Could not save document (%s)' message = 'Could not save document (%s)'
if u'duplicate key' in unicode(err): if re.match('^E1100[01] duplicate key', unicode(err)):
# E11000 - duplicate key error index
# E11001 - duplicate key on update
message = u'Tried to save duplicate unique keys (%s)' message = u'Tried to save duplicate unique keys (%s)'
raise NotUniqueError(message % unicode(err))
raise OperationError(message % unicode(err)) raise OperationError(message % unicode(err))
id_field = self._meta['id_field'] id_field = self._meta['id_field']
if id_field not in self._meta.get('shard_key', []): if id_field not in self._meta.get('shard_key', []):

View File

@ -45,6 +45,10 @@ class OperationError(Exception):
pass pass
class NotUniqueError(OperationError):
pass
RE_TYPE = type(re.compile('')) RE_TYPE = type(re.compile(''))
@ -924,8 +928,11 @@ class QuerySet(object):
ids = self._collection.insert(raw, **write_options) ids = self._collection.insert(raw, **write_options)
except pymongo.errors.OperationFailure, err: except pymongo.errors.OperationFailure, err:
message = 'Could not save document (%s)' message = 'Could not save document (%s)'
if u'duplicate key' in unicode(err): if re.match('^E1100[01] duplicate key', unicode(err)):
# E11000 - duplicate key error index
# E11001 - duplicate key on update
message = u'Tried to save duplicate unique keys (%s)' message = u'Tried to save duplicate unique keys (%s)'
raise NotUniqueError(message % unicode(err))
raise OperationError(message % unicode(err)) raise OperationError(message % unicode(err))
if not load_bulk: if not load_bulk:

View File

@ -1013,6 +1013,9 @@ class DocumentTest(unittest.TestCase):
# Two posts with the same slug is not allowed # Two posts with the same slug is not allowed
post2 = BlogPost(title='test2', slug='test') post2 = BlogPost(title='test2', slug='test')
self.assertRaises(NotUniqueError, post2.save)
# Ensure backwards compatibilty for errors
self.assertRaises(OperationError, post2.save) self.assertRaises(OperationError, post2.save)
def test_unique_with(self): def test_unique_with(self):
@ -1063,7 +1066,7 @@ class DocumentTest(unittest.TestCase):
# Now there will be two docs with the same sub.slug # Now there will be two docs with the same sub.slug
post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test')) post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test'))
self.assertRaises(OperationError, post3.save) self.assertRaises(NotUniqueError, post3.save)
BlogPost.drop_collection() BlogPost.drop_collection()
@ -1090,11 +1093,11 @@ class DocumentTest(unittest.TestCase):
# Now there will be two docs with the same sub.slug # Now there will be two docs with the same sub.slug
post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test')) post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test'))
self.assertRaises(OperationError, post3.save) self.assertRaises(NotUniqueError, post3.save)
# Now there will be two docs with the same title and year # Now there will be two docs with the same title and year
post3 = BlogPost(title='test1', sub=SubDocument(year=2009, slug='test-1')) post3 = BlogPost(title='test1', sub=SubDocument(year=2009, slug='test-1'))
self.assertRaises(OperationError, post3.save) self.assertRaises(NotUniqueError, post3.save)
BlogPost.drop_collection() BlogPost.drop_collection()
@ -1117,7 +1120,7 @@ class DocumentTest(unittest.TestCase):
try: try:
cust_dupe.save() cust_dupe.save()
raise AssertionError, "We saved a dupe!" raise AssertionError, "We saved a dupe!"
except OperationError: except NotUniqueError:
pass pass
Customer.drop_collection() Customer.drop_collection()

View File

@ -578,7 +578,7 @@ class QuerySetTest(unittest.TestCase):
def throw_operation_error_not_unique(): def throw_operation_error_not_unique():
Blog.objects.insert([blog2, blog3], safe=True) Blog.objects.insert([blog2, blog3], safe=True)
self.assertRaises(OperationError, throw_operation_error_not_unique) self.assertRaises(NotUniqueError, throw_operation_error_not_unique)
self.assertEqual(Blog.objects.count(), 2) self.assertEqual(Blog.objects.count(), 2)
Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True}) Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True})